mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of https://github.com/arangodb/arangodb into FMH
This commit is contained in:
commit
d53e47c7d5
12
CHANGELOG
12
CHANGELOG
|
@ -167,10 +167,18 @@ the sort parameter.
|
||||||
`json` option. The `body` instead contains the response body as a string.
|
`json` option. The `body` instead contains the response body as a string.
|
||||||
|
|
||||||
|
|
||||||
v2.8.8 (????-??-??)
|
v2.8.9 (XXXX-XX-XX)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
* fixed issue #1805: Query: internal error (location: arangod/Aql/AqlValue.cpp:182). Please report this error to arangodb.com (while executing)
|
* fixed access to /_admin/statistics API in case statistics are disable via option
|
||||||
|
`--server.disable-statistics`
|
||||||
|
|
||||||
|
|
||||||
|
v2.8.8 (2016-04-19)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* fixed issue #1805: Query: internal error (location: arangod/Aql/AqlValue.cpp:182).
|
||||||
|
Please report this error to arangodb.com (while executing)
|
||||||
|
|
||||||
* allow specifying collection name prefixes for `_from` and `_to` in arangoimp:
|
* allow specifying collection name prefixes for `_from` and `_to` in arangoimp:
|
||||||
|
|
||||||
|
|
|
@ -196,55 +196,39 @@ TRI_doc_mptr_t* EdgeIndexIterator::next() {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_buffer == nullptr) {
|
if (_buffer.empty()) {
|
||||||
// We start a new lookup
|
// We start a new lookup
|
||||||
TRI_ASSERT(_position == 0);
|
|
||||||
_posInBuffer = 0;
|
_posInBuffer = 0;
|
||||||
_last = nullptr;
|
|
||||||
|
|
||||||
VPackSlice tmp = _keys.at(_position);
|
VPackSlice tmp = _keys.at(_position);
|
||||||
if (tmp.isObject()) {
|
if (tmp.isObject()) {
|
||||||
tmp = tmp.get(TRI_SLICE_KEY_EQUAL);
|
tmp = tmp.get(TRI_SLICE_KEY_EQUAL);
|
||||||
}
|
}
|
||||||
_buffer = _index->lookupByKey(_trx, &tmp, _batchSize);
|
_index->lookupByKey(_trx, &tmp, _buffer, _batchSize);
|
||||||
// fallthrough intentional
|
// fallthrough intentional
|
||||||
} else if (_posInBuffer >= _buffer->size()) {
|
} else if (_posInBuffer >= _buffer.size()) {
|
||||||
// We have to refill the buffer
|
// We have to refill the buffer
|
||||||
delete _buffer;
|
auto last = _buffer.back();
|
||||||
_buffer = nullptr;
|
_buffer.clear();
|
||||||
|
|
||||||
_posInBuffer = 0;
|
_posInBuffer = 0;
|
||||||
if (_last != nullptr) {
|
_index->lookupByKeyContinue(_trx, last, _buffer, _batchSize);
|
||||||
_buffer = _index->lookupByKeyContinue(_trx, _last, _batchSize);
|
|
||||||
} else {
|
|
||||||
VPackSlice tmp = _keys.at(_position);
|
|
||||||
if (tmp.isObject()) {
|
|
||||||
tmp = tmp.get(TRI_SLICE_KEY_EQUAL);
|
|
||||||
}
|
|
||||||
_buffer = _index->lookupByKey(_trx, &tmp, _batchSize);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_buffer->empty()) {
|
if (!_buffer.empty()) {
|
||||||
// found something
|
// found something
|
||||||
_last = _buffer->back();
|
return _buffer.at(_posInBuffer++);
|
||||||
return _buffer->at(_posInBuffer++);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// found no result. now go to next lookup value in _keys
|
// found no result. now go to next lookup value in _keys
|
||||||
++_position;
|
++_position;
|
||||||
// reset the _last value
|
|
||||||
_last = nullptr;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void EdgeIndexIterator::reset() {
|
void EdgeIndexIterator::reset() {
|
||||||
_last = nullptr;
|
|
||||||
_position = 0;
|
_position = 0;
|
||||||
_posInBuffer = 0;
|
_posInBuffer = 0;
|
||||||
// Free the vector space, not the content
|
_buffer.clear();
|
||||||
delete _buffer;
|
|
||||||
_buffer = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TRI_doc_mptr_t* AnyDirectionEdgeIndexIterator::next() {
|
TRI_doc_mptr_t* AnyDirectionEdgeIndexIterator::next() {
|
||||||
|
|
|
@ -51,11 +51,8 @@ class EdgeIndexIterator final : public IndexIterator {
|
||||||
_searchValues(searchValues),
|
_searchValues(searchValues),
|
||||||
_keys(_searchValues.slice()),
|
_keys(_searchValues.slice()),
|
||||||
_position(0),
|
_position(0),
|
||||||
_last(nullptr),
|
|
||||||
_buffer(nullptr),
|
|
||||||
_posInBuffer(0),
|
_posInBuffer(0),
|
||||||
_batchSize(50) { // This might be adjusted
|
_batchSize(1000) {}
|
||||||
}
|
|
||||||
|
|
||||||
EdgeIndexIterator(arangodb::Transaction* trx,
|
EdgeIndexIterator(arangodb::Transaction* trx,
|
||||||
TRI_EdgeIndexHash_t const* index,
|
TRI_EdgeIndexHash_t const* index,
|
||||||
|
@ -65,16 +62,8 @@ class EdgeIndexIterator final : public IndexIterator {
|
||||||
_searchValues(arangodb::velocypack::Builder::clone(searchValues)),
|
_searchValues(arangodb::velocypack::Builder::clone(searchValues)),
|
||||||
_keys(_searchValues.slice()),
|
_keys(_searchValues.slice()),
|
||||||
_position(0),
|
_position(0),
|
||||||
_last(nullptr),
|
|
||||||
_buffer(nullptr),
|
|
||||||
_posInBuffer(0),
|
_posInBuffer(0),
|
||||||
_batchSize(50) { // This might be adjusted
|
_batchSize(1000) {}
|
||||||
}
|
|
||||||
|
|
||||||
~EdgeIndexIterator() {
|
|
||||||
// Free the vector space, not the content
|
|
||||||
delete _buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
arangodb::Transaction* _trx;
|
arangodb::Transaction* _trx;
|
||||||
|
@ -82,8 +71,7 @@ class EdgeIndexIterator final : public IndexIterator {
|
||||||
arangodb::velocypack::Builder const _searchValues;
|
arangodb::velocypack::Builder const _searchValues;
|
||||||
arangodb::velocypack::Slice const _keys;
|
arangodb::velocypack::Slice const _keys;
|
||||||
size_t _position;
|
size_t _position;
|
||||||
TRI_doc_mptr_t* _last;
|
std::vector<TRI_doc_mptr_t*> _buffer;
|
||||||
std::vector<TRI_doc_mptr_t*>* _buffer;
|
|
||||||
size_t _posInBuffer;
|
size_t _posInBuffer;
|
||||||
size_t _batchSize;
|
size_t _batchSize;
|
||||||
};
|
};
|
||||||
|
|
|
@ -469,6 +469,10 @@ bool NeighborsOptions::matchesVertex(std::string const& collectionName,
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
bool NeighborsOptions::matchesVertex(std::string const& id) const {
|
bool NeighborsOptions::matchesVertex(std::string const& id) const {
|
||||||
|
if (!useVertexFilter) {
|
||||||
|
// Nothing to do
|
||||||
|
return true;
|
||||||
|
}
|
||||||
std::vector<std::string> parts =
|
std::vector<std::string> parts =
|
||||||
arangodb::basics::StringUtils::split(id, "/");
|
arangodb::basics::StringUtils::split(id, "/");
|
||||||
TRI_ASSERT(parts.size() == 2);
|
TRI_ASSERT(parts.size() == 2);
|
||||||
|
|
|
@ -155,13 +155,17 @@ authRouter.post('disableVersionCheck', function(req, res) {
|
||||||
authRouter.post('/query/explain', function(req, res) {
|
authRouter.post('/query/explain', function(req, res) {
|
||||||
const bindVars = req.body.bindVars;
|
const bindVars = req.body.bindVars;
|
||||||
const query = req.body.query;
|
const query = req.body.query;
|
||||||
|
const id = req.body.id;
|
||||||
|
const batchSize = req.body.batchSize;
|
||||||
let msg = null;
|
let msg = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (bindVars) {
|
if (bindVars) {
|
||||||
msg = require('@arangodb/aql/explainer').explain({
|
msg = require('@arangodb/aql/explainer').explain({
|
||||||
query: query,
|
query: query,
|
||||||
bindVars: bindVars
|
bindVars: bindVars,
|
||||||
|
batchSize: batchSize,
|
||||||
|
id: id
|
||||||
}, {colors: false}, false, bindVars);
|
}, {colors: false}, false, bindVars);
|
||||||
} else {
|
} else {
|
||||||
msg = require('@arangodb/aql/explainer').explain(query, {colors: false}, false);
|
msg = require('@arangodb/aql/explainer').explain(query, {colors: false}, false);
|
||||||
|
@ -174,7 +178,9 @@ authRouter.post('/query/explain', function(req, res) {
|
||||||
})
|
})
|
||||||
.body(joi.object({
|
.body(joi.object({
|
||||||
query: joi.string().required(),
|
query: joi.string().required(),
|
||||||
bindVars: joi.object().optional()
|
bindVars: joi.object().optional(),
|
||||||
|
batchSize: joi.number().optional(),
|
||||||
|
id: joi.string().optional()
|
||||||
}).required(), 'Query and bindVars to explain.')
|
}).required(), 'Query and bindVars to explain.')
|
||||||
.summary('Explains a query')
|
.summary('Explains a query')
|
||||||
.description(dd`
|
.description(dd`
|
||||||
|
|
|
@ -145,7 +145,7 @@ installer.put('/zip', function (req) {
|
||||||
|
|
||||||
foxxRouter.delete('/', function (req, res) {
|
foxxRouter.delete('/', function (req, res) {
|
||||||
const mount = decodeURIComponent(req.queryParams.mount);
|
const mount = decodeURIComponent(req.queryParams.mount);
|
||||||
const runTeardown = req.parameters.teardown;
|
const runTeardown = req.queryParams.teardown;
|
||||||
const service = FoxxManager.uninstall(mount, {
|
const service = FoxxManager.uninstall(mount, {
|
||||||
teardown: runTeardown,
|
teardown: runTeardown,
|
||||||
force: true
|
force: true
|
||||||
|
|
|
@ -5,6 +5,7 @@ div.headerDropdown,
|
||||||
border-radius: 2px;
|
border-radius: 2px;
|
||||||
clear: both;
|
clear: both;
|
||||||
display: none;
|
display: none;
|
||||||
|
margin-bottom: 15px;
|
||||||
padding: 10px;
|
padding: 10px;
|
||||||
position: relative;
|
position: relative;
|
||||||
width: auto;
|
width: auto;
|
||||||
|
|
|
@ -346,15 +346,18 @@
|
||||||
@extend %pull-left;
|
@extend %pull-left;
|
||||||
margin-top: 5px;
|
margin-top: 5px;
|
||||||
padding: 0 8px;
|
padding: 0 8px;
|
||||||
|
width: 100%;
|
||||||
|
|
||||||
.percentage {
|
.percentage {
|
||||||
@extend %pull-left;
|
@extend %pull-left;
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.absolut {
|
.absolut {
|
||||||
@extend %pull-left;
|
@extend %pull-left;
|
||||||
text-align: right;
|
text-align: center;
|
||||||
|
width: 100%;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -302,6 +302,7 @@
|
||||||
.dashboard-bar-chart-container {
|
.dashboard-bar-chart-container {
|
||||||
$int-width: ($content-size / 3) - 18px - 2px;
|
$int-width: ($content-size / 3) - 18px - 2px;
|
||||||
$int-height: (($dashboard-height - 30px) / 2) - $tendency-height-corrector;
|
$int-height: (($dashboard-height - 30px) / 2) - $tendency-height-corrector;
|
||||||
|
$int-height2: ($int-height - 20px);
|
||||||
height: $int-height + 2px;
|
height: $int-height + 2px;
|
||||||
|
|
||||||
.dashboard-bar-chart {
|
.dashboard-bar-chart {
|
||||||
|
@ -317,6 +318,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
.absolut {
|
.absolut {
|
||||||
|
line-height: ($int-height2 * 2 / 3) - 10px;
|
||||||
width: ($int-width - (($int-width / 2) * 1.4)) - 6px;
|
width: ($int-width - (($int-width / 2) * 1.4)) - 6px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -324,10 +326,11 @@
|
||||||
.dashboard-bar-chart-chart {
|
.dashboard-bar-chart-chart {
|
||||||
|
|
||||||
$int-height2: ($int-height - 20px);
|
$int-height2: ($int-height - 20px);
|
||||||
|
box-sizing: border-box;
|
||||||
height: ($int-height2 * 2 / 3) - 5px;
|
height: ($int-height2 * 2 / 3) - 5px;
|
||||||
line-height: ($int-height2 * 2 / 3) - 5px;
|
line-height: ($int-height2 * 2 / 3) - 5px;
|
||||||
padding: 5px 8px 10px;
|
padding: 5px 8px 10px;
|
||||||
width: $int-width - 16px;
|
width: 100%;
|
||||||
|
|
||||||
#residentSizeChart {
|
#residentSizeChart {
|
||||||
height: ($int-height2 * 2 / 3);
|
height: ($int-height2 * 2 / 3);
|
||||||
|
|
|
@ -45,28 +45,6 @@ global.clearInterval = global.clearInterval || function () {};
|
||||||
global.setTimeout = global.setTimeout || function () {};
|
global.setTimeout = global.setTimeout || function () {};
|
||||||
global.clearTimeout = global.clearTimeout || function () {};
|
global.clearTimeout = global.clearTimeout || function () {};
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
/// @brief template string generator for building an AQL query
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
global.aqlQuery = function () {
|
|
||||||
var strings = arguments[0];
|
|
||||||
var bindVars = {};
|
|
||||||
var query = strings[0];
|
|
||||||
var name, value, i;
|
|
||||||
for (i = 1; i < arguments.length; i++) {
|
|
||||||
value = arguments[i];
|
|
||||||
name = 'value' + (i - 1);
|
|
||||||
if (value && value.constructor && value.constructor.name === 'ArangoCollection') {
|
|
||||||
name = '@' + name;
|
|
||||||
value = value.name();
|
|
||||||
}
|
|
||||||
bindVars[name] = value;
|
|
||||||
query += '@' + name + strings[i];
|
|
||||||
}
|
|
||||||
return {query: query, bindVars: bindVars};
|
|
||||||
};
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief start paging
|
/// @brief start paging
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -128,6 +106,12 @@ global.console = global.console || require("console");
|
||||||
|
|
||||||
global.db = require("@arangodb").db;
|
global.db = require("@arangodb").db;
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief template string generator for building an AQL query
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
global.aqlQuery = require("@arangodb").aql;
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief global 'arango'
|
/// @brief global 'arango'
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -40,6 +40,23 @@ Object.keys(internal.errors).forEach(function (key) {
|
||||||
exports[key] = internal.errors[key].code;
|
exports[key] = internal.errors[key].code;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
exports.aql = function () {
|
||||||
|
let strings = arguments[0];
|
||||||
|
const bindVars = {};
|
||||||
|
let query = strings[0];
|
||||||
|
for (let i = 1; i < arguments.length; i++) {
|
||||||
|
let value = arguments[i];
|
||||||
|
let name = `value${i - 1}`;
|
||||||
|
if (value && value.isArangoCollection) {
|
||||||
|
name = `@${name}`;
|
||||||
|
value = value.name();
|
||||||
|
}
|
||||||
|
bindVars[name] = value;
|
||||||
|
query += `@${name}${strings[i]}`;
|
||||||
|
}
|
||||||
|
return {query, bindVars};
|
||||||
|
};
|
||||||
|
|
||||||
exports.errors = internal.errors;
|
exports.errors = internal.errors;
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*jshint globalstrict:false, strict:false */
|
/*jshint globalstrict:false, strict:false */
|
||||||
/*global assertEqual, assertTrue, assertFalse, assertUndefined, assertMatch, aqlQuery, fail */
|
/*global assertEqual, assertTrue, assertFalse, assertUndefined, assertMatch, fail */
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief test the statement class
|
/// @brief test the statement class
|
||||||
|
@ -32,6 +32,7 @@ var jsunity = require("jsunity");
|
||||||
|
|
||||||
var arangodb = require("@arangodb");
|
var arangodb = require("@arangodb");
|
||||||
var db = arangodb.db;
|
var db = arangodb.db;
|
||||||
|
var aql = arangodb.aql;
|
||||||
var ERRORS = arangodb.errors;
|
var ERRORS = arangodb.errors;
|
||||||
|
|
||||||
|
|
||||||
|
@ -873,7 +874,7 @@ function StatementSuite () {
|
||||||
|
|
||||||
testTemplateStringBuilder : function () {
|
testTemplateStringBuilder : function () {
|
||||||
var foo = "foo-matic", bar = "BAR o MATIC", what = "' this string \\ \" is ' evil\n`";
|
var foo = "foo-matic", bar = "BAR o MATIC", what = "' this string \\ \" is ' evil\n`";
|
||||||
var result = aqlQuery`FOR ${foo} IN ${bar} RETURN ${what}`;
|
var result = aql`FOR ${foo} IN ${bar} RETURN ${what}`;
|
||||||
assertEqual("FOR @value0 IN @value1 RETURN @value2", result.query);
|
assertEqual("FOR @value0 IN @value1 RETURN @value2", result.query);
|
||||||
assertEqual({ value0: foo, value1: bar, value2: what }, result.bindVars);
|
assertEqual({ value0: foo, value1: bar, value2: what }, result.bindVars);
|
||||||
},
|
},
|
||||||
|
@ -884,7 +885,7 @@ function StatementSuite () {
|
||||||
|
|
||||||
testTemplateStringBuilderComplexTypes : function () {
|
testTemplateStringBuilderComplexTypes : function () {
|
||||||
var list = [ 1, 2, 3, 4 ], what = { foo: "bar", baz: "bark" };
|
var list = [ 1, 2, 3, 4 ], what = { foo: "bar", baz: "bark" };
|
||||||
var result = aqlQuery`FOR i IN ${list} RETURN ${what}`;
|
var result = aql`FOR i IN ${list} RETURN ${what}`;
|
||||||
assertEqual("FOR i IN @value0 RETURN @value1", result.query);
|
assertEqual("FOR i IN @value0 RETURN @value1", result.query);
|
||||||
assertEqual({ value0: [ 1, 2, 3, 4 ], value1: { foo: "bar", baz: "bark" } }, result.bindVars);
|
assertEqual({ value0: [ 1, 2, 3, 4 ], value1: { foo: "bar", baz: "bark" } }, result.bindVars);
|
||||||
},
|
},
|
||||||
|
@ -894,7 +895,7 @@ function StatementSuite () {
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
testTemplateStringBuilderObject : function () {
|
testTemplateStringBuilderObject : function () {
|
||||||
var result = aqlQuery`RETURN ${new Date('2015-01-01').toISOString()}`;
|
var result = aql`RETURN ${new Date('2015-01-01').toISOString()}`;
|
||||||
assertEqual("RETURN @value0", result.query);
|
assertEqual("RETURN @value0", result.query);
|
||||||
assertEqual({ value0 : "2015-01-01T00:00:00.000Z" }, result.bindVars);
|
assertEqual({ value0 : "2015-01-01T00:00:00.000Z" }, result.bindVars);
|
||||||
},
|
},
|
||||||
|
@ -905,7 +906,7 @@ function StatementSuite () {
|
||||||
|
|
||||||
testTemplateString : function () {
|
testTemplateString : function () {
|
||||||
var one = 1, two = 2, three = 3, add = 9;
|
var one = 1, two = 2, three = 3, add = 9;
|
||||||
var st = db._createStatement(aqlQuery`FOR u IN [ ${one}, ${two}, ${three} ] RETURN u + ${add}`);
|
var st = db._createStatement(aql`FOR u IN [ ${one}, ${two}, ${three} ] RETURN u + ${add}`);
|
||||||
var result = st.execute().toArray();
|
var result = st.execute().toArray();
|
||||||
|
|
||||||
assertEqual([ 10, 11, 12 ], result);
|
assertEqual([ 10, 11, 12 ], result);
|
||||||
|
@ -918,7 +919,7 @@ function StatementSuite () {
|
||||||
testTemplateStringStrings : function () {
|
testTemplateStringStrings : function () {
|
||||||
var FOR = "FOR", RETURN = "RETURN", PLUS = "+";
|
var FOR = "FOR", RETURN = "RETURN", PLUS = "+";
|
||||||
try {
|
try {
|
||||||
db._createStatement(aqlQuery`${FOR} i IN 1..2 ${RETURN} i ${PLUS} 1`).execute();
|
db._createStatement(aql`${FOR} i IN 1..2 ${RETURN} i ${PLUS} 1`).execute();
|
||||||
fail();
|
fail();
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
|
@ -932,7 +933,7 @@ function StatementSuite () {
|
||||||
|
|
||||||
testTemplateStringString : function () {
|
testTemplateStringString : function () {
|
||||||
var a = "FROM TO RETURN INSERT";
|
var a = "FROM TO RETURN INSERT";
|
||||||
var st = db._createStatement(aqlQuery`RETURN ${a}`);
|
var st = db._createStatement(aql`RETURN ${a}`);
|
||||||
var result = st.execute().toArray();
|
var result = st.execute().toArray();
|
||||||
|
|
||||||
assertEqual([ a ], result);
|
assertEqual([ a ], result);
|
||||||
|
@ -945,7 +946,7 @@ function StatementSuite () {
|
||||||
testTemplateStringUndefined : function () {
|
testTemplateStringUndefined : function () {
|
||||||
try {
|
try {
|
||||||
/*global foo */
|
/*global foo */
|
||||||
db._createStatement(aqlQuery`FOR u IN ${foo} RETURN 1`);
|
db._createStatement(aql`FOR u IN ${foo} RETURN 1`);
|
||||||
fail();
|
fail();
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
|
|
|
@ -64,29 +64,8 @@ global.clearInterval = function () {};
|
||||||
global.setTimeout = function () {};
|
global.setTimeout = function () {};
|
||||||
global.clearTimeout = function () {};
|
global.clearTimeout = function () {};
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
// template string generator for building an AQL query
|
||||||
/// @brief template string generator for building an AQL query
|
global.aqlQuery = require('@arangodb').aql;
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
global.aqlQuery = function () {
|
|
||||||
let strings = arguments[0];
|
|
||||||
const bindVars = {};
|
|
||||||
let query = strings[0];
|
|
||||||
for (let i = 1; i < arguments.length; i++) {
|
|
||||||
let value = arguments[i];
|
|
||||||
let name = `value${i - 1}`;
|
|
||||||
if (value && value.constructor && value.constructor.name === 'ArangoCollection') {
|
|
||||||
name = `@${name}`;
|
|
||||||
value = value.name();
|
|
||||||
}
|
|
||||||
bindVars[name] = value;
|
|
||||||
query += `@${name}${strings[i]}`;
|
|
||||||
}
|
|
||||||
return {query, bindVars};
|
|
||||||
};
|
|
||||||
|
|
||||||
// extend prototypes for internally defined classes
|
|
||||||
require('@arangodb');
|
|
||||||
|
|
||||||
// load the actions from the actions directory
|
// load the actions from the actions directory
|
||||||
require('@arangodb/actions').startup();
|
require('@arangodb/actions').startup();
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
/*global aqlQuery */
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -28,6 +27,7 @@ const internal = require('internal');
|
||||||
const arangodb = require('@arangodb');
|
const arangodb = require('@arangodb');
|
||||||
const NOT_FOUND = arangodb.errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code;
|
const NOT_FOUND = arangodb.errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code;
|
||||||
const db = arangodb.db;
|
const db = arangodb.db;
|
||||||
|
const aql = arangodb.aql;
|
||||||
|
|
||||||
|
|
||||||
module.exports = function systemStorage(cfg) {
|
module.exports = function systemStorage(cfg) {
|
||||||
|
@ -35,7 +35,7 @@ module.exports = function systemStorage(cfg) {
|
||||||
const expiry = Number(internal.options()['server.session-timeout']) * 1000;
|
const expiry = Number(internal.options()['server.session-timeout']) * 1000;
|
||||||
return {
|
return {
|
||||||
prune() {
|
prune() {
|
||||||
return db._query(aqlQuery`
|
return db._query(aql`
|
||||||
FOR session IN _sessions
|
FOR session IN _sessions
|
||||||
FILTER session.lastAccess < ${Date.now() - expiry}
|
FILTER session.lastAccess < ${Date.now() - expiry}
|
||||||
REMOVE session IN _sessions
|
REMOVE session IN _sessions
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
/*global aqlQuery */
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -27,6 +26,7 @@ const assert = require('assert');
|
||||||
const arangodb = require('@arangodb');
|
const arangodb = require('@arangodb');
|
||||||
const NOT_FOUND = arangodb.errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code;
|
const NOT_FOUND = arangodb.errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code;
|
||||||
const db = arangodb.db;
|
const db = arangodb.db;
|
||||||
|
const aql = arangodb.aql;
|
||||||
|
|
||||||
|
|
||||||
module.exports = function collectionStorage(cfg) {
|
module.exports = function collectionStorage(cfg) {
|
||||||
|
@ -47,7 +47,7 @@ module.exports = function collectionStorage(cfg) {
|
||||||
assert(collection.isArangoCollection, `No such collection: ${cfg.collection}`);
|
assert(collection.isArangoCollection, `No such collection: ${cfg.collection}`);
|
||||||
return {
|
return {
|
||||||
prune() {
|
prune() {
|
||||||
return db._query(aqlQuery`
|
return db._query(aql`
|
||||||
FOR session IN ${collection}
|
FOR session IN ${collection}
|
||||||
FILTER session.expires < DATE_NOW()
|
FILTER session.expires < DATE_NOW()
|
||||||
REMOVE session IN ${collection}
|
REMOVE session IN ${collection}
|
||||||
|
|
|
@ -358,7 +358,8 @@ function optimizerRuleTestSuite () {
|
||||||
var queries = [
|
var queries = [
|
||||||
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = doc.a RETURN a * 2", [ 2, 4 ] ],
|
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = doc.a RETURN a * 2", [ 2, 4 ] ],
|
||||||
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = doc.a * 2 RETURN a * 2", [ 4, 8 ] ],
|
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = doc.a * 2 RETURN a * 2", [ 4, 8 ] ],
|
||||||
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = doc RETURN a.a", [ 1, 2 ] ]
|
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = doc RETURN a.a", [ 1, 2 ] ],
|
||||||
|
[ "FOR doc IN [ { a: 1, b: 2 }, { a: 2, b: 4 } ] LET a = SLICE(ATTRIBUTES(doc), 0, 1) RETURN KEEP(doc, a)", [ { a: 1 }, { a: 2 } ] ]
|
||||||
];
|
];
|
||||||
queries.forEach(function(query) {
|
queries.forEach(function(query) {
|
||||||
var result = AQL_EXPLAIN(query[0]);
|
var result = AQL_EXPLAIN(query[0]);
|
||||||
|
|
|
@ -776,6 +776,25 @@ class AssocMulti {
|
||||||
std::vector<Element*>* lookupByKey(UserData* userData, Key const* key,
|
std::vector<Element*>* lookupByKey(UserData* userData, Key const* key,
|
||||||
size_t limit = 0) const {
|
size_t limit = 0) const {
|
||||||
auto result = std::make_unique<std::vector<Element*>>();
|
auto result = std::make_unique<std::vector<Element*>>();
|
||||||
|
lookupByKey(userData, key, *result, limit);
|
||||||
|
return result.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief lookups an element given a key
|
||||||
|
/// Accepts a result vector as input. The result of this lookup will
|
||||||
|
/// be appended to the given vector.
|
||||||
|
/// This function returns as soon as limit many elements are inside
|
||||||
|
/// the given vector, no matter if the come from this lookup or
|
||||||
|
/// have been in the result before.
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
void lookupByKey(UserData* userData, Key const* key,
|
||||||
|
std::vector<Element*>& result, size_t limit = 0) const {
|
||||||
|
if (limit > 0 && result.size() >= limit) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// compute the hash
|
// compute the hash
|
||||||
uint64_t hashByKey = _hashKey(userData, key);
|
uint64_t hashByKey = _hashKey(userData, key);
|
||||||
|
@ -803,13 +822,12 @@ class AssocMulti {
|
||||||
// We found the beginning of the linked list:
|
// We found the beginning of the linked list:
|
||||||
|
|
||||||
do {
|
do {
|
||||||
result->push_back(b._table[i].ptr);
|
result.push_back(b._table[i].ptr);
|
||||||
i = b._table[i].next;
|
i = b._table[i].next;
|
||||||
} while (i != INVALID_INDEX && (limit == 0 || result->size() < limit));
|
} while (i != INVALID_INDEX && (limit == 0 || result.size() < limit));
|
||||||
}
|
}
|
||||||
|
|
||||||
// return whatever we found
|
// return whatever we found
|
||||||
return result.release();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -820,6 +838,28 @@ class AssocMulti {
|
||||||
Element const* element,
|
Element const* element,
|
||||||
size_t limit = 0) const {
|
size_t limit = 0) const {
|
||||||
auto result = std::make_unique<std::vector<Element*>>();
|
auto result = std::make_unique<std::vector<Element*>>();
|
||||||
|
lookupWithElementByKey(userData, element, *result, limit);
|
||||||
|
return result.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief looks up all elements with the same key as a given element
|
||||||
|
/// Accepts a result vector as input. The result of this lookup will
|
||||||
|
/// be appended to the given vector.
|
||||||
|
/// This function returns as soon as limit many elements are inside
|
||||||
|
/// the given vector, no matter if the come from this lookup or
|
||||||
|
/// have been in the result before.
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
void lookupWithElementByKey(UserData* userData, Element const* element,
|
||||||
|
std::vector<Element*>& result,
|
||||||
|
size_t limit = 0) const {
|
||||||
|
if (limit > 0 && result.size() >= limit) {
|
||||||
|
// The vector is full, nothing to do.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// compute the hash
|
// compute the hash
|
||||||
uint64_t hashByKey = _hashElement(userData, element, true);
|
uint64_t hashByKey = _hashElement(userData, element, true);
|
||||||
|
@ -847,13 +887,11 @@ class AssocMulti {
|
||||||
// We found the beginning of the linked list:
|
// We found the beginning of the linked list:
|
||||||
|
|
||||||
do {
|
do {
|
||||||
result->push_back(b._table[i].ptr);
|
result.push_back(b._table[i].ptr);
|
||||||
i = b._table[i].next;
|
i = b._table[i].next;
|
||||||
} while (i != INVALID_INDEX && (limit == 0 || result->size() < limit));
|
} while (i != INVALID_INDEX && (limit == 0 || result.size() < limit));
|
||||||
}
|
}
|
||||||
|
|
||||||
// return whatever we found
|
// return whatever we found
|
||||||
return result.release();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -864,6 +902,28 @@ class AssocMulti {
|
||||||
std::vector<Element*>* lookupWithElementByKeyContinue(
|
std::vector<Element*>* lookupWithElementByKeyContinue(
|
||||||
UserData* userData, Element const* element, size_t limit = 0) const {
|
UserData* userData, Element const* element, size_t limit = 0) const {
|
||||||
auto result = std::make_unique<std::vector<Element*>>();
|
auto result = std::make_unique<std::vector<Element*>>();
|
||||||
|
lookupWithElementByKeyContinue(userData, element, limit);
|
||||||
|
return result.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief looks up all elements with the same key as a given element,
|
||||||
|
/// continuation.
|
||||||
|
/// Accepts a result vector as input. The result of this lookup will
|
||||||
|
/// be appended to the given vector.
|
||||||
|
/// This function returns as soon as limit many elements are inside
|
||||||
|
/// the given vector, no matter if the come from this lookup or
|
||||||
|
/// have been in the result before.
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
void lookupWithElementByKeyContinue(UserData* userData,
|
||||||
|
Element const* element,
|
||||||
|
std::vector<Element*>& result,
|
||||||
|
size_t limit = 0) const {
|
||||||
|
if (limit > 0 && result.size() >= limit) {
|
||||||
|
// The vector is full, nothing to do.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
uint64_t hashByKey = _hashElement(userData, element, true);
|
uint64_t hashByKey = _hashElement(userData, element, true);
|
||||||
Bucket const& b = _buckets[hashByKey & _bucketsMask];
|
Bucket const& b = _buckets[hashByKey & _bucketsMask];
|
||||||
|
@ -891,21 +951,21 @@ class AssocMulti {
|
||||||
|
|
||||||
if (b._table[i].ptr == nullptr) {
|
if (b._table[i].ptr == nullptr) {
|
||||||
// This cannot really happen, but we handle it gracefully anyway
|
// This cannot really happen, but we handle it gracefully anyway
|
||||||
return nullptr;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// continue search of the table
|
// continue search of the table
|
||||||
while (true) {
|
while (true) {
|
||||||
i = b._table[i].next;
|
i = b._table[i].next;
|
||||||
if (i == INVALID_INDEX || (limit != 0 && result->size() >= limit)) {
|
if (i == INVALID_INDEX || (limit != 0 && result.size() >= limit)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
result->push_back(b._table[i].ptr);
|
result.push_back(b._table[i].ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
// return whatever we found
|
// return whatever we found
|
||||||
return result.release();
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -919,6 +979,23 @@ class AssocMulti {
|
||||||
return lookupWithElementByKeyContinue(userData, element, limit);
|
return lookupWithElementByKeyContinue(userData, element, limit);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief looks up all elements with the same key as a given element,
|
||||||
|
/// continuation
|
||||||
|
/// Accepts a result vector as input. The result of this lookup will
|
||||||
|
/// be appended to the given vector.
|
||||||
|
/// This function returns as soon as limit many elements are inside
|
||||||
|
/// the given vector, no matter if the come from this lookup or
|
||||||
|
/// have been in the result before.
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
void lookupByKeyContinue(UserData* userData, Element const* element,
|
||||||
|
std::vector<Element*>& result,
|
||||||
|
size_t limit = 0) const {
|
||||||
|
lookupWithElementByKeyContinue(userData, element, result, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief removes an element from the array, caller is responsible to free it
|
/// @brief removes an element from the array, caller is responsible to free it
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
Loading…
Reference in New Issue