mirror of https://gitee.com/bigwinds/arangodb
aggregate fixes
This commit is contained in:
parent
9ac22971c6
commit
6937dbc489
|
@ -119,6 +119,7 @@ The current list of keywords is:
|
|||
- ANY
|
||||
- ALL
|
||||
- NONE
|
||||
- AGGREGATE
|
||||
|
||||
Additional keywords may be added in future versions of ArangoDB.
|
||||
|
||||
|
|
|
@ -624,6 +624,7 @@ SHELL_SERVER_AQL = @top_srcdir@/js/server/tests/aql-arithmetic.js \
|
|||
@top_srcdir@/js/server/tests/aql-modify-noncluster-serializetest.js \
|
||||
@top_srcdir@/js/server/tests/aql-multi-modify.js \
|
||||
@top_srcdir@/js/server/tests/aql-operators.js \
|
||||
@top_srcdir@/js/server/tests/aql-optimizer-collect-aggregate.js \
|
||||
@top_srcdir@/js/server/tests/aql-optimizer-collect-count.js \
|
||||
@top_srcdir@/js/server/tests/aql-optimizer-collect-into.js \
|
||||
@top_srcdir@/js/server/tests/aql-optimizer-collect-methods.js \
|
||||
|
|
|
@ -43,6 +43,8 @@ Aggregator* Aggregator::fromTypeString(triagens::arango::AqlTransaction* trx, st
|
|||
return new AggregatorAverage(trx);
|
||||
}
|
||||
|
||||
// aggregator function name should have been validated before
|
||||
TRI_ASSERT(false);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
|
|
@ -588,7 +588,11 @@ AstNode* Ast::createNodeCollectAggregate(AstNode const* list, AstNode const* agg
|
|||
|
||||
node->addMember(options);
|
||||
node->addMember(list);
|
||||
node->addMember(aggregations);
|
||||
|
||||
// wrap aggregations again
|
||||
auto agg = createNode(NODE_TYPE_AGGREGATIONS);
|
||||
agg->addMember(aggregations);
|
||||
node->addMember(agg);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
@ -1609,6 +1613,8 @@ void Ast::validateAndOptimize() {
|
|||
// NOOPT will turn all function optimizations off
|
||||
++(static_cast<TraversalContext*>(data)->stopOptimizationRequests);
|
||||
}
|
||||
} else if (node->type == NODE_TYPE_AGGREGATIONS) {
|
||||
++(static_cast<TraversalContext*>(data)->stopOptimizationRequests);
|
||||
} else if (node->hasFlag(FLAG_BIND_PARAMETER)) {
|
||||
return false;
|
||||
} else if (node->type == NODE_TYPE_REMOVE ||
|
||||
|
@ -1653,6 +1659,8 @@ void Ast::validateAndOptimize() {
|
|||
// NOOPT will turn all function optimizations off
|
||||
--(static_cast<TraversalContext*>(data)->stopOptimizationRequests);
|
||||
}
|
||||
} else if (node->type == NODE_TYPE_AGGREGATIONS) {
|
||||
--(static_cast<TraversalContext*>(data)->stopOptimizationRequests);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -146,8 +146,8 @@ std::unordered_map<int, std::string const> const AstNode::TypeNames{
|
|||
{static_cast<int>(NODE_TYPE_FCALL_USER), "user function call"},
|
||||
{static_cast<int>(NODE_TYPE_RANGE), "range"},
|
||||
{static_cast<int>(NODE_TYPE_NOP), "no-op"},
|
||||
{static_cast<int>(NODE_TYPE_COLLECT_COUNT), "collect count"},
|
||||
{static_cast<int>(NODE_TYPE_COLLECT_EXPRESSION), "collect expression"},
|
||||
{static_cast<int>(NODE_TYPE_COLLECT_COUNT), "collect with count"},
|
||||
{static_cast<int>(NODE_TYPE_COLLECT_EXPRESSION), "collect with expression"},
|
||||
{static_cast<int>(NODE_TYPE_CALCULATED_OBJECT_ELEMENT),
|
||||
"calculated object element"},
|
||||
{static_cast<int>(NODE_TYPE_EXAMPLE), "example"},
|
||||
|
@ -159,7 +159,8 @@ std::unordered_map<int, std::string const> const AstNode::TypeNames{
|
|||
{static_cast<int>(NODE_TYPE_COLLECTION_LIST), "collection list"},
|
||||
{static_cast<int>(NODE_TYPE_OPERATOR_NARY_AND), "n-ary and"},
|
||||
{static_cast<int>(NODE_TYPE_OPERATOR_NARY_OR), "n-ary or"},
|
||||
{static_cast<int>(NODE_TYPE_COLLECT_AGGREGATE), "collect aggregate"}};
|
||||
{static_cast<int>(NODE_TYPE_COLLECT_AGGREGATE), "collect with aggregate"},
|
||||
{static_cast<int>(NODE_TYPE_AGGREGATIONS), "aggregations array"}};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief names for AST node value types
|
||||
|
@ -574,6 +575,7 @@ AstNode::AstNode(Ast* ast, triagens::basics::Json const& json)
|
|||
case NODE_TYPE_COLLECT_COUNT:
|
||||
case NODE_TYPE_COLLECT_EXPRESSION:
|
||||
case NODE_TYPE_COLLECT_AGGREGATE:
|
||||
case NODE_TYPE_AGGREGATIONS:
|
||||
case NODE_TYPE_SORT:
|
||||
case NODE_TYPE_SORT_ELEMENT:
|
||||
case NODE_TYPE_LIMIT:
|
||||
|
@ -723,6 +725,7 @@ AstNode::AstNode(std::function<void(AstNode*)> registerNode,
|
|||
case NODE_TYPE_COLLECT_COUNT:
|
||||
case NODE_TYPE_COLLECT_EXPRESSION:
|
||||
case NODE_TYPE_COLLECT_AGGREGATE:
|
||||
case NODE_TYPE_AGGREGATIONS:
|
||||
case NODE_TYPE_SORT:
|
||||
case NODE_TYPE_SORT_ELEMENT:
|
||||
case NODE_TYPE_LIMIT:
|
||||
|
@ -2439,6 +2442,7 @@ void AstNode::findVariableAccess(
|
|||
case NODE_TYPE_COLLECT_COUNT:
|
||||
case NODE_TYPE_COLLECT_EXPRESSION:
|
||||
case NODE_TYPE_COLLECT_AGGREGATE:
|
||||
case NODE_TYPE_AGGREGATIONS:
|
||||
case NODE_TYPE_CALCULATED_OBJECT_ELEMENT:
|
||||
case NODE_TYPE_UPSERT:
|
||||
case NODE_TYPE_EXAMPLE:
|
||||
|
@ -2581,6 +2585,7 @@ AstNode const* AstNode::findReference(AstNode const* findme) const {
|
|||
case NODE_TYPE_COLLECT_COUNT:
|
||||
case NODE_TYPE_COLLECT_EXPRESSION:
|
||||
case NODE_TYPE_COLLECT_AGGREGATE:
|
||||
case NODE_TYPE_AGGREGATIONS:
|
||||
case NODE_TYPE_CALCULATED_OBJECT_ELEMENT:
|
||||
case NODE_TYPE_UPSERT:
|
||||
case NODE_TYPE_EXAMPLE:
|
||||
|
|
|
@ -190,7 +190,8 @@ enum AstNodeType : uint32_t {
|
|||
NODE_TYPE_DIRECTION = 61,
|
||||
NODE_TYPE_OPERATOR_NARY_AND = 62,
|
||||
NODE_TYPE_OPERATOR_NARY_OR = 63,
|
||||
NODE_TYPE_COLLECT_AGGREGATE = 64
|
||||
NODE_TYPE_COLLECT_AGGREGATE = 64,
|
||||
NODE_TYPE_AGGREGATIONS = 65
|
||||
};
|
||||
|
||||
static_assert(NODE_TYPE_VALUE < NODE_TYPE_ARRAY, "incorrect node types order");
|
||||
|
|
|
@ -73,6 +73,7 @@ void SortedCollectBlock::CollectGroup::initialize(size_t capacity) {
|
|||
|
||||
// reset aggregators
|
||||
for (auto& it : aggregators) {
|
||||
TRI_ASSERT(it != nullptr);
|
||||
it->reset();
|
||||
}
|
||||
}
|
||||
|
@ -95,6 +96,7 @@ void SortedCollectBlock::CollectGroup::reset() {
|
|||
|
||||
// reset all aggregators
|
||||
for (auto& it : aggregators) {
|
||||
TRI_ASSERT(it != nullptr);
|
||||
it->reset();
|
||||
}
|
||||
}
|
||||
|
@ -621,8 +623,10 @@ int HashedCollectBlock::getOrSkipSome(size_t atLeast, size_t atMost,
|
|||
// cleanup function for group values
|
||||
auto cleanup = [&allGroups] () -> void {
|
||||
for (auto& it : allGroups) {
|
||||
for (auto& it2 : *(it.second)) {
|
||||
delete it2;
|
||||
if (it.second != nullptr) {
|
||||
for (auto& it2 : *(it.second)) {
|
||||
delete it2;
|
||||
}
|
||||
}
|
||||
delete it.second;
|
||||
}
|
||||
|
@ -811,7 +815,6 @@ int HashedCollectBlock::getOrSkipSome(size_t atLeast, size_t atMost,
|
|||
for (auto& it2 : it.first) {
|
||||
const_cast<AqlValue*>(&it2)->destroy();
|
||||
}
|
||||
delete it.second;
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
|
|
@ -1096,12 +1096,11 @@ ExecutionNode* ExecutionPlan::fromNodeCollectAggregate(ExecutionNode* previous,
|
|||
|
||||
auto options = createCollectOptions(node->getMember(0));
|
||||
|
||||
std::unordered_map<Variable const*, Variable const*> aliases;
|
||||
|
||||
// group variables
|
||||
std::vector<std::pair<Variable const*, Variable const*>> groupVariables;
|
||||
{
|
||||
auto list = node->getMember(1);
|
||||
TRI_ASSERT(list->type == NODE_TYPE_ARRAY);
|
||||
size_t const numVars = list->numMembers();
|
||||
|
||||
groupVariables.reserve(numVars);
|
||||
|
@ -1129,8 +1128,6 @@ ExecutionNode* ExecutionPlan::fromNodeCollectAggregate(ExecutionNode* previous,
|
|||
auto calc = createTemporaryCalculation(expression, previous);
|
||||
previous = calc;
|
||||
groupVariables.emplace_back(std::make_pair(v, getOutVariable(calc)));
|
||||
|
||||
aliases.emplace(v, groupVariables.back().second);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1138,18 +1135,10 @@ ExecutionNode* ExecutionPlan::fromNodeCollectAggregate(ExecutionNode* previous,
|
|||
// aggregate variables
|
||||
std::vector<std::pair<Variable const*, std::pair<Variable const*, std::string>>> aggregateVariables;
|
||||
{
|
||||
auto variableReplacer = [&aliases, this] (AstNode* node, void*) -> AstNode* {
|
||||
if (node->type == NODE_TYPE_REFERENCE) {
|
||||
auto it = aliases.find(static_cast<Variable const*>(node->getData()));
|
||||
|
||||
if (it != aliases.end()) {
|
||||
return _ast->createNodeReference((*it).second);
|
||||
}
|
||||
}
|
||||
return node;
|
||||
};
|
||||
|
||||
auto list = node->getMember(2);
|
||||
TRI_ASSERT(list->type == NODE_TYPE_AGGREGATIONS);
|
||||
list = list->getMember(0);
|
||||
TRI_ASSERT(list->type == NODE_TYPE_ARRAY);
|
||||
size_t const numVars = list->numMembers();
|
||||
|
||||
aggregateVariables.reserve(numVars);
|
||||
|
@ -1185,8 +1174,6 @@ ExecutionNode* ExecutionPlan::fromNodeCollectAggregate(ExecutionNode* previous,
|
|||
TRI_ASSERT(args->numMembers() == 1);
|
||||
|
||||
auto arg = args->getMember(0);
|
||||
arg = Ast::traverseAndModify(arg, variableReplacer, nullptr);
|
||||
|
||||
|
||||
if (arg->type == NODE_TYPE_REFERENCE) {
|
||||
// operand is a variable
|
||||
|
|
|
@ -177,7 +177,7 @@ class Parser {
|
|||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
QueryResult parse(bool);
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief register a parse error, position is specified as line / column
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -414,6 +414,26 @@ collect_statement:
|
|||
|
||||
if (member != nullptr) {
|
||||
TRI_ASSERT(member->type == NODE_TYPE_ASSIGN);
|
||||
auto func = member->getMember(1);
|
||||
|
||||
bool isValid = true;
|
||||
if (func->type != NODE_TYPE_FCALL) {
|
||||
// aggregate expression must be a function call
|
||||
isValid = false;
|
||||
}
|
||||
else {
|
||||
auto f = static_cast<triagens::aql::Function*>(func->getData());
|
||||
if (! Aggregator::isSupported(f->externalName)) {
|
||||
// aggregate expression must be a call to MIN|MAX|LENGTH...
|
||||
isValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (! isValid) {
|
||||
parser->registerError(TRI_ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION);
|
||||
YYABORT;
|
||||
}
|
||||
|
||||
auto v = static_cast<Variable*>(member->getMember(0)->getData());
|
||||
scopes->addVariable(v);
|
||||
}
|
||||
|
@ -437,6 +457,7 @@ collect_statement:
|
|||
scopes->start(triagens::aql::AQL_SCOPE_COLLECT);
|
||||
|
||||
// register all group variables
|
||||
std::unordered_set<Variable const*> groupVars;
|
||||
size_t n = $1->numMembers();
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
auto member = $1->getMember(i);
|
||||
|
@ -445,11 +466,14 @@ collect_statement:
|
|||
TRI_ASSERT(member->type == NODE_TYPE_ASSIGN);
|
||||
auto v = static_cast<Variable*>(member->getMember(0)->getData());
|
||||
scopes->addVariable(v);
|
||||
groupVars.emplace(v);
|
||||
}
|
||||
}
|
||||
|
||||
// register aggregate variables too
|
||||
n = $2->numMembers();
|
||||
std::unordered_set<Variable const*> variablesUsed;
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
auto member = $2->getMember(i);
|
||||
|
||||
|
@ -471,9 +495,20 @@ collect_statement:
|
|||
}
|
||||
|
||||
if (! isValid) {
|
||||
parser->registerParseError(TRI_ERROR_QUERY_PARSE,
|
||||
"aggregate expression must be a function call that uses a supported aggregate expression",
|
||||
yylloc.first_line, yylloc.first_column);
|
||||
parser->registerError(TRI_ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION);
|
||||
YYABORT;
|
||||
}
|
||||
else {
|
||||
variablesUsed.clear();
|
||||
Ast::getReferencedVariables(func, variablesUsed);
|
||||
|
||||
for (auto& it : groupVars) {
|
||||
if (variablesUsed.find(it) != variablesUsed.end()) {
|
||||
parser->registerParseError(TRI_ERROR_QUERY_VARIABLE_NAME_UNKNOWN,
|
||||
"use of unknown variable '%s' in aggregate expression", it->name.c_str(), yylloc.first_line, yylloc.first_column);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto v = static_cast<Variable*>(member->getMember(0)->getData());
|
||||
|
|
|
@ -178,6 +178,7 @@
|
|||
"ERROR_QUERY_FULLTEXT_INDEX_MISSING" : { "code" : 1571, "message" : "no suitable fulltext index found for fulltext query on '%s'" },
|
||||
"ERROR_QUERY_INVALID_DATE_VALUE" : { "code" : 1572, "message" : "invalid date value" },
|
||||
"ERROR_QUERY_MULTI_MODIFY" : { "code" : 1573, "message" : "multi-modify query" },
|
||||
"ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION" : { "code" : 1574, "message" : "invalid aggregate expression" },
|
||||
"ERROR_QUERY_COMPILE_TIME_OPTIONS" : { "code" : 1575, "message" : "query options must be readable at query compile time" },
|
||||
"ERROR_QUERY_EXCEPTION_OPTIONS" : { "code" : 1576, "message" : "query options expected" },
|
||||
"ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION" : { "code" : 1577, "message" : "collection '%s' used as expression operand" },
|
||||
|
|
|
@ -809,7 +809,10 @@ function processQuery (query, explain) {
|
|||
}).join(", ");
|
||||
|
||||
if (node.hasOwnProperty("aggregates")) {
|
||||
collect += " " + keyword("AGGREGATE") + " " +
|
||||
if (node.groups.length > 0) {
|
||||
collect += " ";
|
||||
}
|
||||
collect += keyword("AGGREGATE") + " " +
|
||||
node.aggregates.map(function(node) {
|
||||
return variableName(node.outVariable) + " = " + func(node.type) + "(" + variableName(node.inVariable) + ")";
|
||||
}).join(", ");
|
||||
|
|
|
@ -88,7 +88,7 @@ var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
|
|||
var AqlHighlightRules = function() {
|
||||
|
||||
var keywords = (
|
||||
"for|return|filter|sort|limit|let|collect|asc|desc|in|into|insert|update|remove|replace|upsert|options|with|and|or|not|distinct|graph|outbound|inbound|any|all"
|
||||
"for|return|filter|sort|limit|let|collect|asc|desc|in|into|insert|update|remove|replace|upsert|options|with|and|or|not|distinct|graph|outbound|inbound|any|all|none|aggregate"
|
||||
);
|
||||
|
||||
var builtinFunctions = (
|
||||
|
|
|
@ -178,6 +178,7 @@
|
|||
"ERROR_QUERY_FULLTEXT_INDEX_MISSING" : { "code" : 1571, "message" : "no suitable fulltext index found for fulltext query on '%s'" },
|
||||
"ERROR_QUERY_INVALID_DATE_VALUE" : { "code" : 1572, "message" : "invalid date value" },
|
||||
"ERROR_QUERY_MULTI_MODIFY" : { "code" : 1573, "message" : "multi-modify query" },
|
||||
"ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION" : { "code" : 1574, "message" : "invalid aggregate expression" },
|
||||
"ERROR_QUERY_COMPILE_TIME_OPTIONS" : { "code" : 1575, "message" : "query options must be readable at query compile time" },
|
||||
"ERROR_QUERY_EXCEPTION_OPTIONS" : { "code" : 1576, "message" : "query options expected" },
|
||||
"ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION" : { "code" : 1577, "message" : "collection '%s' used as expression operand" },
|
||||
|
|
|
@ -808,7 +808,10 @@ function processQuery (query, explain) {
|
|||
}).join(", ");
|
||||
|
||||
if (node.hasOwnProperty("aggregates")) {
|
||||
collect += " " + keyword("AGGREGATE") + " " +
|
||||
if (node.groups.length > 0) {
|
||||
collect += " ";
|
||||
}
|
||||
collect += keyword("AGGREGATE") + " " +
|
||||
node.aggregates.map(function(node) {
|
||||
return variableName(node.outVariable) + " = " + func(node.type) + "(" + variableName(node.inVariable) + ")";
|
||||
}).join(", ");
|
||||
|
|
|
@ -244,7 +244,7 @@ function assertQueryError (errorCode, query, bindVars) {
|
|||
catch (e) {
|
||||
assertTrue(e.errorNum !== undefined, "unexpected error format while calling [" + query + "]");
|
||||
assertEqual(errorCode, e.errorNum, "unexpected error code (" + e.errorMessage +
|
||||
"while executing: " + query + "expecting: " + errorCode + "): " );
|
||||
" while executing: '" + query + "' expecting: " + errorCode + "): ");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ function ahuacatlFailureSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testSortedAggregateBlock1 : function () {
|
||||
internal.debugSetFailAt("AggregatorGroup::addValues");
|
||||
internal.debugSetFailAt("CollectGroup::addValues");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value INTO g RETURN [ key, g ]");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value2 INTO g RETURN [ key, g ]");
|
||||
},
|
||||
|
@ -141,7 +141,7 @@ function ahuacatlFailureSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testSortedAggregateBlock2 : function () {
|
||||
internal.debugSetFailAt("SortedAggregateBlock::getOrSkipSome");
|
||||
internal.debugSetFailAt("SortedCollectBlock::getOrSkipSome");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value INTO g RETURN [ key, g ]");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value2 INTO g RETURN [ key, g ]");
|
||||
assertFailingQuery("FOR i IN 1..10000 COLLECT key = i INTO g RETURN [ key, g ]");
|
||||
|
@ -152,7 +152,7 @@ function ahuacatlFailureSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testSortedAggregateBlock3 : function () {
|
||||
internal.debugSetFailAt("SortedAggregateBlock::hasMore");
|
||||
internal.debugSetFailAt("SortedCollectBlock::hasMore");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value INTO g RETURN [ key, g ]");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value2 INTO g RETURN [ key, g ]");
|
||||
assertFailingQuery("FOR i IN 1..10000 COLLECT key = i INTO g RETURN [ key, g ]");
|
||||
|
@ -163,7 +163,7 @@ function ahuacatlFailureSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testHashedAggregateBlock : function () {
|
||||
internal.debugSetFailAt("HashedAggregateBlock::getOrSkipSome");
|
||||
internal.debugSetFailAt("HashedCollectBlock::getOrSkipSome");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value RETURN key");
|
||||
assertFailingQuery("FOR i IN " + c.name() + " COLLECT key = i.value2 RETURN key");
|
||||
assertFailingQuery("FOR i IN 1..10000 COLLECT key = i RETURN key");
|
||||
|
|
|
@ -0,0 +1,289 @@
|
|||
/*jshint globalstrict:false, strict:false, maxlen: 500 */
|
||||
/*global assertTrue, assertEqual, assertNotEqual, AQL_EXECUTE, AQL_EXPLAIN */
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief tests for COLLECT w/ COUNT
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2010-2012 triagens GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Steemann
|
||||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var jsunity = require("jsunity");
|
||||
var internal = require("internal");
|
||||
var errors = internal.errors;
|
||||
var db = require("@arangodb").db;
|
||||
var helper = require("@arangodb/aql-helper");
|
||||
var assertQueryError = helper.assertQueryError;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function optimizerAggregateTestSuite () {
|
||||
var c;
|
||||
|
||||
return {
|
||||
setUp : function () {
|
||||
db._drop("UnitTestsCollection");
|
||||
c = db._create("UnitTestsCollection");
|
||||
|
||||
for (var i = 0; i < 2000; ++i) {
|
||||
c.save({ group: "test" + (i % 10), value1: i, value2: i % 13 });
|
||||
}
|
||||
},
|
||||
|
||||
tearDown : function () {
|
||||
db._drop("UnitTestsCollection");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test invalid queries
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInvalidSyntax : function () {
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " AGGREGATE RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " AGGREGATE length = LENGTH(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT COUNT AGGREGATE length = LENGTH(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE length = LENGTH(i) WITH COUNT RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE length = LENGTH(i) WITH COUNT INTO x RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT WITH COUNT g AGGREGATE length = LENGTH(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group AGGREGATE WITH COUNT RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group AGGREGATE length = LENGTH(i) WITH COUNT RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group AGGREGATE length = LENGTH(i) INTO group RETURN 1");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test invalid queries
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInvalidAggregateFunctions : function () {
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = 1 RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = i.test RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = i.test + 1 RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = LENGTH(i) + 1 RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = 1 + LENGTH(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = IS_NUMBER(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = IS_STRING(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = IS_ARRAY(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE c = IS_OBJECT(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE length = LENGTH(i), c = IS_OBJECT(i) RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION.code, "FOR i IN " + c.name() + " COLLECT group = i.group AGGREGATE c = IS_OBJECT(i) RETURN 1");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateAll : function () {
|
||||
var query = "FOR i IN " + c.name() + " COLLECT group = i.group AGGREGATE length = LENGTH(i.value1), min = MIN(i.value1), max = MAX(i.value1), sum = SUM(i.value1), avg = AVERAGE(i.value1) RETURN { group, length, min, max, sum, avg }";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(10, results.json.length);
|
||||
for (var i = 0; i < 10; ++i) {
|
||||
assertEqual("test" + i, results.json[i].group);
|
||||
assertEqual(200, results.json[i].length);
|
||||
assertEqual(i, results.json[i].min);
|
||||
assertEqual(1990 + i, results.json[i].max);
|
||||
assertEqual(199000 + i * 200, results.json[i].sum);
|
||||
assertEqual(995 + i, results.json[i].avg);
|
||||
}
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must have a SortNode
|
||||
assertNotEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateExpression : function () {
|
||||
var query = "FOR i IN " + c.name() + " COLLECT group = i.group AGGREGATE length = LENGTH(1), min = MIN(i.value1 + 1), max = MAX(i.value1 * 2) RETURN { group, length, min, max }";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(10, results.json.length);
|
||||
for (var i = 0; i < 10; ++i) {
|
||||
assertEqual("test" + i, results.json[i].group);
|
||||
assertEqual(200, results.json[i].length);
|
||||
assertEqual(i + 1, results.json[i].min);
|
||||
assertEqual((1990 + i) * 2, results.json[i].max);
|
||||
}
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must have a SortNode
|
||||
assertNotEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateAllReferToCollectvariable : function () {
|
||||
assertQueryError(errors.ERROR_QUERY_VARIABLE_NAME_UNKNOWN.code, "FOR i IN " + c.name() + " COLLECT group = i.group AGGREGATE length = LENGTH(group) RETURN { group, length }");
|
||||
assertQueryError(errors.ERROR_QUERY_VARIABLE_NAME_UNKNOWN.code, "FOR j IN " + c.name() + " COLLECT doc = j AGGREGATE length = LENGTH(doc) RETURN doc");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateFiltered : function () {
|
||||
var query = "FOR i IN " + c.name() + " FILTER i.group == 'test4' COLLECT group = i.group AGGREGATE length = LENGTH(i.value1), min = MIN(i.value1), max = MAX(i.value1), sum = SUM(i.value1), avg = AVERAGE(i.value1) RETURN { group, length, min, max, sum, avg }";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(1, results.json.length);
|
||||
assertEqual("test4", results.json[0].group);
|
||||
assertEqual(200, results.json[0].length);
|
||||
assertEqual(4, results.json[0].min);
|
||||
assertEqual(1994, results.json[0].max);
|
||||
assertEqual(199800, results.json[0].sum);
|
||||
assertEqual(999, results.json[0].avg);
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must have a SortNode
|
||||
assertNotEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateFilteredMulti : function () {
|
||||
var query = "FOR i IN " + c.name() + " FILTER i.group >= 'test2' && i.group <= 'test4' COLLECT group = i.group AGGREGATE length = LENGTH(i.value1), min = MIN(i.value1), max = MAX(i.value1), sum = SUM(i.value1), avg = AVERAGE(i.value1) RETURN { group, length, min, max, sum, avg }";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(3, results.json.length);
|
||||
for (var i = 2; i <= 4; ++i) {
|
||||
assertEqual("test" + i, results.json[i - 2].group);
|
||||
assertEqual(200, results.json[i - 2].length);
|
||||
assertEqual(i, results.json[i - 2].min);
|
||||
assertEqual(1990 + i, results.json[i - 2].max);
|
||||
assertEqual(199000 + i * 200, results.json[i - 2].sum);
|
||||
assertEqual(995 + i, results.json[i - 2].avg);
|
||||
}
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must have a SortNode
|
||||
assertNotEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateFilteredEmpty : function () {
|
||||
var query = "FOR i IN " + c.name() + " FILTER i.group >= 'test99' COLLECT group = i.group AGGREGATE length = LENGTH(i.value1), min = MIN(i.value1), max = MAX(i.value1), sum = SUM(i.value1), avg = AVERAGE(i.value1) RETURN { group, length, min, max, sum, avg }";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(0, results.json.length);
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must have a SortNode
|
||||
assertNotEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateFilteredBig : function () {
|
||||
var i;
|
||||
for (i = 0; i < 10000; ++i) {
|
||||
c.save({ age: 10 + (i % 80), type: 1 });
|
||||
}
|
||||
for (i = 0; i < 10000; ++i) {
|
||||
c.save({ age: 10 + (i % 80), type: 2 });
|
||||
}
|
||||
|
||||
var query = "FOR i IN " + c.name() + " FILTER i.age >= 20 && i.age < 50 && i.type == 1 COLLECT AGGREGATE length = LENGTH(i) RETURN length";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(1, results.json.length);
|
||||
assertEqual(125 * 30, results.json[0]);
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must not have a SortNode
|
||||
assertEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateNested : function () {
|
||||
var query = "FOR i IN 1..2 FOR j IN " + c.name() + " COLLECT AGGREGATE length = LENGTH(j) RETURN length";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(1, results.json.length);
|
||||
assertEqual(4000, results.json[0]);
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must not have a SortNode
|
||||
assertEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateSimple : function () {
|
||||
var query = "FOR i IN " + c.name() + " COLLECT class = i.group AGGREGATE length = LENGTH(i) RETURN [ class, length ]";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
assertEqual(10, results.json.length);
|
||||
for (var i = 0; i < results.json.length; ++i) {
|
||||
var group = results.json[i];
|
||||
assertTrue(Array.isArray(group));
|
||||
assertEqual("test" + i, group[0]);
|
||||
assertEqual(200, group[1]);
|
||||
}
|
||||
|
||||
var plan = AQL_EXPLAIN(query).plan;
|
||||
// must have a SortNode
|
||||
assertNotEqual(-1, plan.nodes.map(function(node) { return node.type; }).indexOf("SortNode"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test aggregate shaped
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAggregateShaped : function () {
|
||||
var query = "FOR j IN " + c.name() + " COLLECT doc = j AGGREGATE length = LENGTH(j) RETURN doc";
|
||||
|
||||
var results = AQL_EXECUTE(query);
|
||||
// expectation is that we get 1000 different docs and do not crash (issue #1265)
|
||||
assertEqual(2000, results.json.length);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief executes the test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
jsunity.run(optimizerAggregateTestSuite);
|
||||
|
||||
return jsunity.done();
|
||||
|
|
@ -74,6 +74,9 @@ function optimizerCountTestSuite () {
|
|||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group WITH COUNT i RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group WITH COUNT i INTO group RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group WITH COUNT COUNT INTO group RETURN 1");
|
||||
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT AGGREGATE doc = MIN(i.group) WITH COUNT INTO group RETURN 1");
|
||||
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR i IN " + c.name() + " COLLECT class = i.group AGGREGATE doc = MIN(i.group) WITH COUNT INTO group RETURN 1");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -219,6 +219,7 @@ ERROR_QUERY_GEO_INDEX_MISSING,1570,"no suitable geo index found for geo restrict
|
|||
ERROR_QUERY_FULLTEXT_INDEX_MISSING,1571,"no suitable fulltext index found for fulltext query on '%s'","Will be raised when a fulltext query is performed on a collection without a suitable fulltext index."
|
||||
ERROR_QUERY_INVALID_DATE_VALUE,1572,"invalid date value","Will be raised when a value cannot be converted to a date."
|
||||
ERROR_QUERY_MULTI_MODIFY,1573,"multi-modify query", "Will be raised when an AQL query contains more than one data-modifying operation."
|
||||
ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION,1574,"invalid aggregate expression", "Will be raised when an AQL query contains an invalid aggregate expression."
|
||||
ERROR_QUERY_COMPILE_TIME_OPTIONS,1575,"query options must be readable at query compile time", "Will be raised when an AQL data-modification query contains options that cannot be figured out at query compile time."
|
||||
ERROR_QUERY_EXCEPTION_OPTIONS,1576,"query options expected", "Will be raised when an AQL data-modification query contains an invalid options specification."
|
||||
ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION,1577,"collection '%s' used as expression operand", "Will be raised when a collection is used as an operand in an AQL expression."
|
||||
|
|
|
@ -1,29 +1,11 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany
|
||||
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||
///
|
||||
/// @brief auto-generated file generated from errors.dat
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#include "Basics/Common.h"
|
||||
#include "./lib/Basics/voc-errors.h"
|
||||
|
||||
void TRI_InitializeErrorMessages() {
|
||||
void TRI_InitializeErrorMessages () {
|
||||
REG_ERROR(ERROR_NO_ERROR, "no error");
|
||||
REG_ERROR(ERROR_FAILED, "failed");
|
||||
REG_ERROR(ERROR_SYS_ERROR, "system error");
|
||||
|
@ -48,12 +30,9 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_REQUEST_CANCELED, "canceled request");
|
||||
REG_ERROR(ERROR_DEBUG, "intentional debug error");
|
||||
REG_ERROR(ERROR_AID_NOT_FOUND, "internal error with attribute ID in shaper");
|
||||
REG_ERROR(ERROR_LEGEND_INCOMPLETE,
|
||||
"internal error if a legend could not be created");
|
||||
REG_ERROR(ERROR_LEGEND_INCOMPLETE, "internal error if a legend could not be created");
|
||||
REG_ERROR(ERROR_IP_ADDRESS_INVALID, "IP address is invalid");
|
||||
REG_ERROR(ERROR_LEGEND_NOT_IN_WAL_FILE,
|
||||
"internal error if a legend for a marker does not yet exist in the "
|
||||
"same WAL file");
|
||||
REG_ERROR(ERROR_LEGEND_NOT_IN_WAL_FILE, "internal error if a legend for a marker does not yet exist in the same WAL file");
|
||||
REG_ERROR(ERROR_FILE_EXISTS, "file exists");
|
||||
REG_ERROR(ERROR_LOCKED, "locked");
|
||||
REG_ERROR(ERROR_DEADLOCK, "deadlock detected");
|
||||
|
@ -76,17 +55,14 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_ARANGO_DATAFILE_EMPTY, "datafile empty");
|
||||
REG_ERROR(ERROR_ARANGO_RECOVERY, "logfile recovery error");
|
||||
REG_ERROR(ERROR_ARANGO_CORRUPTED_DATAFILE, "corrupted datafile");
|
||||
REG_ERROR(ERROR_ARANGO_ILLEGAL_PARAMETER_FILE,
|
||||
"illegal or unreadable parameter file");
|
||||
REG_ERROR(ERROR_ARANGO_ILLEGAL_PARAMETER_FILE, "illegal or unreadable parameter file");
|
||||
REG_ERROR(ERROR_ARANGO_CORRUPTED_COLLECTION, "corrupted collection");
|
||||
REG_ERROR(ERROR_ARANGO_MMAP_FAILED, "mmap failed");
|
||||
REG_ERROR(ERROR_ARANGO_FILESYSTEM_FULL, "filesystem full");
|
||||
REG_ERROR(ERROR_ARANGO_NO_JOURNAL, "no journal");
|
||||
REG_ERROR(ERROR_ARANGO_DATAFILE_ALREADY_EXISTS,
|
||||
"cannot create/rename datafile because it already exists");
|
||||
REG_ERROR(ERROR_ARANGO_DATAFILE_ALREADY_EXISTS, "cannot create/rename datafile because it already exists");
|
||||
REG_ERROR(ERROR_ARANGO_DATADIR_LOCKED, "database directory is locked");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_DIRECTORY_ALREADY_EXISTS,
|
||||
"cannot create/rename collection because directory already exists");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_DIRECTORY_ALREADY_EXISTS, "cannot create/rename collection because directory already exists");
|
||||
REG_ERROR(ERROR_ARANGO_MSYNC_FAILED, "msync failed");
|
||||
REG_ERROR(ERROR_ARANGO_DATADIR_UNLOCKABLE, "cannot lock database directory");
|
||||
REG_ERROR(ERROR_ARANGO_SYNC_TIMEOUT, "sync timeout");
|
||||
|
@ -94,45 +70,35 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_ARANGO_DATADIR_INVALID, "invalid database directory");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_NOT_FOUND, "document not found");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_NOT_FOUND, "collection not found");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_PARAMETER_MISSING,
|
||||
"parameter 'collection' not found");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_PARAMETER_MISSING, "parameter 'collection' not found");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_HANDLE_BAD, "illegal document handle");
|
||||
REG_ERROR(ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL,
|
||||
"maximal size of journal too small");
|
||||
REG_ERROR(ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL, "maximal size of journal too small");
|
||||
REG_ERROR(ERROR_ARANGO_DUPLICATE_NAME, "duplicate name");
|
||||
REG_ERROR(ERROR_ARANGO_ILLEGAL_NAME, "illegal name");
|
||||
REG_ERROR(ERROR_ARANGO_NO_INDEX, "no suitable index known");
|
||||
REG_ERROR(ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED,
|
||||
"unique constraint violated");
|
||||
REG_ERROR(ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED, "unique constraint violated");
|
||||
REG_ERROR(ERROR_ARANGO_INDEX_NOT_FOUND, "index not found");
|
||||
REG_ERROR(ERROR_ARANGO_CROSS_COLLECTION_REQUEST,
|
||||
"cross collection request not allowed");
|
||||
REG_ERROR(ERROR_ARANGO_CROSS_COLLECTION_REQUEST, "cross collection request not allowed");
|
||||
REG_ERROR(ERROR_ARANGO_INDEX_HANDLE_BAD, "illegal index handle");
|
||||
REG_ERROR(ERROR_ARANGO_CAP_CONSTRAINT_ALREADY_DEFINED,
|
||||
"cap constraint already defined");
|
||||
REG_ERROR(ERROR_ARANGO_CAP_CONSTRAINT_ALREADY_DEFINED, "cap constraint already defined");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_TOO_LARGE, "document too large");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_NOT_UNLOADED,
|
||||
"collection must be unloaded");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_NOT_UNLOADED, "collection must be unloaded");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_TYPE_INVALID, "collection type invalid");
|
||||
REG_ERROR(ERROR_ARANGO_VALIDATION_FAILED, "validator failed");
|
||||
REG_ERROR(ERROR_ARANGO_ATTRIBUTE_PARSER_FAILED,
|
||||
"parsing attribute name definition failed");
|
||||
REG_ERROR(ERROR_ARANGO_ATTRIBUTE_PARSER_FAILED, "parsing attribute name definition failed");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_KEY_BAD, "illegal document key");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_KEY_UNEXPECTED, "unexpected document key");
|
||||
REG_ERROR(ERROR_ARANGO_DATADIR_NOT_WRITABLE,
|
||||
"server database directory not writable");
|
||||
REG_ERROR(ERROR_ARANGO_DATADIR_NOT_WRITABLE, "server database directory not writable");
|
||||
REG_ERROR(ERROR_ARANGO_OUT_OF_KEYS, "out of keys");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_KEY_MISSING, "missing document key");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_TYPE_INVALID, "invalid document type");
|
||||
REG_ERROR(ERROR_ARANGO_DATABASE_NOT_FOUND, "database not found");
|
||||
REG_ERROR(ERROR_ARANGO_DATABASE_NAME_INVALID, "database name invalid");
|
||||
REG_ERROR(ERROR_ARANGO_USE_SYSTEM_DATABASE,
|
||||
"operation only allowed in system database");
|
||||
REG_ERROR(ERROR_ARANGO_USE_SYSTEM_DATABASE, "operation only allowed in system database");
|
||||
REG_ERROR(ERROR_ARANGO_ENDPOINT_NOT_FOUND, "endpoint not found");
|
||||
REG_ERROR(ERROR_ARANGO_INVALID_KEY_GENERATOR, "invalid key generator");
|
||||
REG_ERROR(ERROR_ARANGO_INVALID_EDGE_ATTRIBUTE, "edge attribute missing");
|
||||
REG_ERROR(ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING,
|
||||
"index insertion warning - attribute missing in document");
|
||||
REG_ERROR(ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING, "index insertion warning - attribute missing in document");
|
||||
REG_ERROR(ERROR_ARANGO_INDEX_CREATION_FAILED, "index creation failed");
|
||||
REG_ERROR(ERROR_ARANGO_WRITE_THROTTLE_TIMEOUT, "write-throttling timeout");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_TYPE_MISMATCH, "collection type mismatch");
|
||||
|
@ -148,10 +114,8 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_REPLICATION_UNEXPECTED_MARKER, "unexpected marker");
|
||||
REG_ERROR(ERROR_REPLICATION_INVALID_APPLIER_STATE, "invalid applier state");
|
||||
REG_ERROR(ERROR_REPLICATION_UNEXPECTED_TRANSACTION, "invalid transaction");
|
||||
REG_ERROR(ERROR_REPLICATION_INVALID_APPLIER_CONFIGURATION,
|
||||
"invalid replication applier configuration");
|
||||
REG_ERROR(ERROR_REPLICATION_RUNNING,
|
||||
"cannot perform operation while applier is running");
|
||||
REG_ERROR(ERROR_REPLICATION_INVALID_APPLIER_CONFIGURATION, "invalid replication applier configuration");
|
||||
REG_ERROR(ERROR_REPLICATION_RUNNING, "cannot perform operation while applier is running");
|
||||
REG_ERROR(ERROR_REPLICATION_APPLIER_STOPPED, "replication stopped");
|
||||
REG_ERROR(ERROR_REPLICATION_NO_START_TICK, "no start tick");
|
||||
REG_ERROR(ERROR_REPLICATION_START_TICK_NOT_PRESENT, "start tick not present");
|
||||
|
@ -159,104 +123,67 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_CLUSTER_NO_COORDINATOR_HEADER, "missing coordinator header");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_LOCK_PLAN, "could not lock plan in agency");
|
||||
REG_ERROR(ERROR_CLUSTER_COLLECTION_ID_EXISTS, "collection ID already exists");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN,
|
||||
"could not create collection in plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_READ_CURRENT_VERSION,
|
||||
"could not read version in current in agency");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION,
|
||||
"could not create collection");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN, "could not create collection in plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_READ_CURRENT_VERSION, "could not read version in current in agency");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION, "could not create collection");
|
||||
REG_ERROR(ERROR_CLUSTER_TIMEOUT, "timeout in cluster operation");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_COLLECTION_IN_PLAN,
|
||||
"could not remove collection from plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_COLLECTION_IN_CURRENT,
|
||||
"could not remove collection from current");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_DATABASE_IN_PLAN,
|
||||
"could not create database in plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_DATABASE,
|
||||
"could not create database");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_DATABASE_IN_PLAN,
|
||||
"could not remove database from plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_DATABASE_IN_CURRENT,
|
||||
"could not remove database from current");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_COLLECTION_IN_PLAN, "could not remove collection from plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_COLLECTION_IN_CURRENT, "could not remove collection from current");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_DATABASE_IN_PLAN, "could not create database in plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_CREATE_DATABASE, "could not create database");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_DATABASE_IN_PLAN, "could not remove database from plan");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_REMOVE_DATABASE_IN_CURRENT, "could not remove database from current");
|
||||
REG_ERROR(ERROR_CLUSTER_SHARD_GONE, "no responsible shard found");
|
||||
REG_ERROR(ERROR_CLUSTER_CONNECTION_LOST,
|
||||
"cluster internal HTTP connection broken");
|
||||
REG_ERROR(ERROR_CLUSTER_MUST_NOT_SPECIFY_KEY,
|
||||
"must not specify _key for this collection");
|
||||
REG_ERROR(ERROR_CLUSTER_GOT_CONTRADICTING_ANSWERS,
|
||||
"got contradicting answers from different shards");
|
||||
REG_ERROR(ERROR_CLUSTER_NOT_ALL_SHARDING_ATTRIBUTES_GIVEN,
|
||||
"not all sharding attributes given");
|
||||
REG_ERROR(ERROR_CLUSTER_MUST_NOT_CHANGE_SHARDING_ATTRIBUTES,
|
||||
"must not change the value of a shard key attribute");
|
||||
REG_ERROR(ERROR_CLUSTER_CONNECTION_LOST, "cluster internal HTTP connection broken");
|
||||
REG_ERROR(ERROR_CLUSTER_MUST_NOT_SPECIFY_KEY, "must not specify _key for this collection");
|
||||
REG_ERROR(ERROR_CLUSTER_GOT_CONTRADICTING_ANSWERS, "got contradicting answers from different shards");
|
||||
REG_ERROR(ERROR_CLUSTER_NOT_ALL_SHARDING_ATTRIBUTES_GIVEN, "not all sharding attributes given");
|
||||
REG_ERROR(ERROR_CLUSTER_MUST_NOT_CHANGE_SHARDING_ATTRIBUTES, "must not change the value of a shard key attribute");
|
||||
REG_ERROR(ERROR_CLUSTER_UNSUPPORTED, "unsupported operation or parameter");
|
||||
REG_ERROR(ERROR_CLUSTER_ONLY_ON_COORDINATOR,
|
||||
"this operation is only valid on a coordinator in a cluster");
|
||||
REG_ERROR(ERROR_CLUSTER_ONLY_ON_COORDINATOR, "this operation is only valid on a coordinator in a cluster");
|
||||
REG_ERROR(ERROR_CLUSTER_READING_PLAN_AGENCY, "error reading Plan in agency");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_TRUNCATE_COLLECTION,
|
||||
"could not truncate collection");
|
||||
REG_ERROR(ERROR_CLUSTER_AQL_COMMUNICATION,
|
||||
"error in cluster internal communication for AQL");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_NOT_FOUND_OR_SHARDING_ATTRIBUTES_CHANGED,
|
||||
"document not found or sharding attributes changed");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_DETERMINE_ID,
|
||||
"could not determine my ID from my local info");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_TRUNCATE_COLLECTION, "could not truncate collection");
|
||||
REG_ERROR(ERROR_CLUSTER_AQL_COMMUNICATION, "error in cluster internal communication for AQL");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_NOT_FOUND_OR_SHARDING_ATTRIBUTES_CHANGED, "document not found or sharding attributes changed");
|
||||
REG_ERROR(ERROR_CLUSTER_COULD_NOT_DETERMINE_ID, "could not determine my ID from my local info");
|
||||
REG_ERROR(ERROR_QUERY_KILLED, "query killed");
|
||||
REG_ERROR(ERROR_QUERY_PARSE, "%s");
|
||||
REG_ERROR(ERROR_QUERY_EMPTY, "query is empty");
|
||||
REG_ERROR(ERROR_QUERY_SCRIPT, "runtime error '%s'");
|
||||
REG_ERROR(ERROR_QUERY_NUMBER_OUT_OF_RANGE, "number out of range");
|
||||
REG_ERROR(ERROR_QUERY_VARIABLE_NAME_INVALID,
|
||||
"variable name '%s' has an invalid format");
|
||||
REG_ERROR(ERROR_QUERY_VARIABLE_REDECLARED,
|
||||
"variable '%s' is assigned multiple times");
|
||||
REG_ERROR(ERROR_QUERY_VARIABLE_NAME_INVALID, "variable name '%s' has an invalid format");
|
||||
REG_ERROR(ERROR_QUERY_VARIABLE_REDECLARED, "variable '%s' is assigned multiple times");
|
||||
REG_ERROR(ERROR_QUERY_VARIABLE_NAME_UNKNOWN, "unknown variable '%s'");
|
||||
REG_ERROR(ERROR_QUERY_COLLECTION_LOCK_FAILED,
|
||||
"unable to read-lock collection %s");
|
||||
REG_ERROR(ERROR_QUERY_COLLECTION_LOCK_FAILED, "unable to read-lock collection %s");
|
||||
REG_ERROR(ERROR_QUERY_TOO_MANY_COLLECTIONS, "too many collections");
|
||||
REG_ERROR(ERROR_QUERY_DOCUMENT_ATTRIBUTE_REDECLARED,
|
||||
"document attribute '%s' is assigned multiple times");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_NAME_UNKNOWN,
|
||||
"usage of unknown function '%s()'");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH,
|
||||
"invalid number of arguments for function '%s()', expected number "
|
||||
"of arguments: minimum: %d, maximum: %d");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH,
|
||||
"invalid argument type in call to function '%s()'");
|
||||
REG_ERROR(ERROR_QUERY_DOCUMENT_ATTRIBUTE_REDECLARED, "document attribute '%s' is assigned multiple times");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_NAME_UNKNOWN, "usage of unknown function '%s()'");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH, "invalid number of arguments for function '%s()', expected number of arguments: minimum: %d, maximum: %d");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "invalid argument type in call to function '%s()'");
|
||||
REG_ERROR(ERROR_QUERY_INVALID_REGEX, "invalid regex value");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETERS_INVALID,
|
||||
"invalid structure of bind parameters");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETER_MISSING,
|
||||
"no value specified for declared bind parameter '%s'");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETER_UNDECLARED,
|
||||
"bind parameter '%s' was not declared in the query");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETER_TYPE,
|
||||
"bind parameter '%s' has an invalid value or type");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETERS_INVALID, "invalid structure of bind parameters");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETER_MISSING, "no value specified for declared bind parameter '%s'");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETER_UNDECLARED, "bind parameter '%s' was not declared in the query");
|
||||
REG_ERROR(ERROR_QUERY_BIND_PARAMETER_TYPE, "bind parameter '%s' has an invalid value or type");
|
||||
REG_ERROR(ERROR_QUERY_INVALID_LOGICAL_VALUE, "invalid logical value");
|
||||
REG_ERROR(ERROR_QUERY_INVALID_ARITHMETIC_VALUE, "invalid arithmetic value");
|
||||
REG_ERROR(ERROR_QUERY_DIVISION_BY_ZERO, "division by zero");
|
||||
REG_ERROR(ERROR_QUERY_ARRAY_EXPECTED, "array expected");
|
||||
REG_ERROR(ERROR_QUERY_FAIL_CALLED, "FAIL(%s) called");
|
||||
REG_ERROR(ERROR_QUERY_GEO_INDEX_MISSING,
|
||||
"no suitable geo index found for geo restriction on '%s'");
|
||||
REG_ERROR(ERROR_QUERY_FULLTEXT_INDEX_MISSING,
|
||||
"no suitable fulltext index found for fulltext query on '%s'");
|
||||
REG_ERROR(ERROR_QUERY_GEO_INDEX_MISSING, "no suitable geo index found for geo restriction on '%s'");
|
||||
REG_ERROR(ERROR_QUERY_FULLTEXT_INDEX_MISSING, "no suitable fulltext index found for fulltext query on '%s'");
|
||||
REG_ERROR(ERROR_QUERY_INVALID_DATE_VALUE, "invalid date value");
|
||||
REG_ERROR(ERROR_QUERY_MULTI_MODIFY, "multi-modify query");
|
||||
REG_ERROR(ERROR_QUERY_COMPILE_TIME_OPTIONS,
|
||||
"query options must be readable at query compile time");
|
||||
REG_ERROR(ERROR_QUERY_INVALID_AGGREGATE_EXPRESSION, "invalid aggregate expression");
|
||||
REG_ERROR(ERROR_QUERY_COMPILE_TIME_OPTIONS, "query options must be readable at query compile time");
|
||||
REG_ERROR(ERROR_QUERY_EXCEPTION_OPTIONS, "query options expected");
|
||||
REG_ERROR(ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION,
|
||||
"collection '%s' used as expression operand");
|
||||
REG_ERROR(ERROR_QUERY_DISALLOWED_DYNAMIC_CALL,
|
||||
"disallowed dynamic call to '%s'");
|
||||
REG_ERROR(ERROR_QUERY_ACCESS_AFTER_MODIFICATION,
|
||||
"access after data-modification");
|
||||
REG_ERROR(ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION, "collection '%s' used as expression operand");
|
||||
REG_ERROR(ERROR_QUERY_DISALLOWED_DYNAMIC_CALL, "disallowed dynamic call to '%s'");
|
||||
REG_ERROR(ERROR_QUERY_ACCESS_AFTER_MODIFICATION, "access after data-modification");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_INVALID_NAME, "invalid user function name");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_INVALID_CODE, "invalid user function code");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_NOT_FOUND, "user function '%s()' not found");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_RUNTIME_ERROR,
|
||||
"user function runtime error: %s");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_RUNTIME_ERROR, "user function runtime error: %s");
|
||||
REG_ERROR(ERROR_QUERY_BAD_JSON_PLAN, "bad execution plan JSON");
|
||||
REG_ERROR(ERROR_QUERY_NOT_FOUND, "query ID not found");
|
||||
REG_ERROR(ERROR_QUERY_IN_USE, "query with this ID is in use");
|
||||
|
@ -264,10 +191,8 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_CURSOR_BUSY, "cursor is busy");
|
||||
REG_ERROR(ERROR_TRANSACTION_INTERNAL, "internal transaction error");
|
||||
REG_ERROR(ERROR_TRANSACTION_NESTED, "nested transactions detected");
|
||||
REG_ERROR(ERROR_TRANSACTION_UNREGISTERED_COLLECTION,
|
||||
"unregistered collection used in transaction");
|
||||
REG_ERROR(ERROR_TRANSACTION_DISALLOWED_OPERATION,
|
||||
"disallowed operation inside transaction");
|
||||
REG_ERROR(ERROR_TRANSACTION_UNREGISTERED_COLLECTION, "unregistered collection used in transaction");
|
||||
REG_ERROR(ERROR_TRANSACTION_DISALLOWED_OPERATION, "disallowed operation inside transaction");
|
||||
REG_ERROR(ERROR_TRANSACTION_ABORTED, "transaction aborted");
|
||||
REG_ERROR(ERROR_USER_INVALID_NAME, "invalid user name");
|
||||
REG_ERROR(ERROR_USER_INVALID_PASSWORD, "invalid password");
|
||||
|
@ -296,44 +221,28 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_GRAPH_INVALID_EDGE, "invalid edge");
|
||||
REG_ERROR(ERROR_GRAPH_COULD_NOT_CREATE_EDGE, "could not create edge");
|
||||
REG_ERROR(ERROR_GRAPH_COULD_NOT_CHANGE_EDGE, "could not change edge");
|
||||
REG_ERROR(
|
||||
ERROR_GRAPH_TOO_MANY_ITERATIONS,
|
||||
"too many iterations - try increasing the value of 'maxIterations'");
|
||||
REG_ERROR(ERROR_GRAPH_TOO_MANY_ITERATIONS, "too many iterations - try increasing the value of 'maxIterations'");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_FILTER_RESULT, "invalid filter result");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_MULTI_USE,
|
||||
"multi use of edge collection in edge def");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_USE_IN_MULTI_GRAPHS,
|
||||
"edge collection already used in edge def");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_MULTI_USE, "multi use of edge collection in edge def");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_USE_IN_MULTI_GRAPHS, "edge collection already used in edge def");
|
||||
REG_ERROR(ERROR_GRAPH_CREATE_MISSING_NAME, "missing graph name");
|
||||
REG_ERROR(ERROR_GRAPH_CREATE_MALFORMED_EDGE_DEFINITION,
|
||||
"malformed edge definition");
|
||||
REG_ERROR(ERROR_GRAPH_CREATE_MALFORMED_EDGE_DEFINITION, "malformed edge definition");
|
||||
REG_ERROR(ERROR_GRAPH_NOT_FOUND, "graph not found");
|
||||
REG_ERROR(ERROR_GRAPH_DUPLICATE, "graph already exists");
|
||||
REG_ERROR(ERROR_GRAPH_VERTEX_COL_DOES_NOT_EXIST,
|
||||
"vertex collection does not exist or is not part of the graph");
|
||||
REG_ERROR(ERROR_GRAPH_WRONG_COLLECTION_TYPE_VERTEX,
|
||||
"not a vertex collection");
|
||||
REG_ERROR(ERROR_GRAPH_VERTEX_COL_DOES_NOT_EXIST, "vertex collection does not exist or is not part of the graph");
|
||||
REG_ERROR(ERROR_GRAPH_WRONG_COLLECTION_TYPE_VERTEX, "not a vertex collection");
|
||||
REG_ERROR(ERROR_GRAPH_NOT_IN_ORPHAN_COLLECTION, "not in orphan collection");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_USED_IN_EDGE_DEF,
|
||||
"collection already used in edge def");
|
||||
REG_ERROR(ERROR_GRAPH_EDGE_COLLECTION_NOT_USED,
|
||||
"edge collection not used in graph");
|
||||
REG_ERROR(ERROR_GRAPH_NOT_AN_ARANGO_COLLECTION,
|
||||
" is not an ArangoCollection");
|
||||
REG_ERROR(ERROR_GRAPH_NO_GRAPH_COLLECTION,
|
||||
"collection _graphs does not exist");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_EXAMPLE_ARRAY_OBJECT_STRING,
|
||||
"Invalid example type. Has to be String, Array or Object");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_EXAMPLE_ARRAY_OBJECT,
|
||||
"Invalid example type. Has to be Array or Object");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_NUMBER_OF_ARGUMENTS,
|
||||
"Invalid number of arguments. Expected: ");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_USED_IN_EDGE_DEF, "collection already used in edge def");
|
||||
REG_ERROR(ERROR_GRAPH_EDGE_COLLECTION_NOT_USED, "edge collection not used in graph");
|
||||
REG_ERROR(ERROR_GRAPH_NOT_AN_ARANGO_COLLECTION, " is not an ArangoCollection");
|
||||
REG_ERROR(ERROR_GRAPH_NO_GRAPH_COLLECTION, "collection _graphs does not exist");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_EXAMPLE_ARRAY_OBJECT_STRING, "Invalid example type. Has to be String, Array or Object");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_EXAMPLE_ARRAY_OBJECT, "Invalid example type. Has to be Array or Object");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_NUMBER_OF_ARGUMENTS, "Invalid number of arguments. Expected: ");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_PARAMETER, "Invalid parameter type.");
|
||||
REG_ERROR(ERROR_GRAPH_INVALID_ID, "Invalid id");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_USED_IN_ORPHANS,
|
||||
"collection used in orphans");
|
||||
REG_ERROR(ERROR_GRAPH_EDGE_COL_DOES_NOT_EXIST,
|
||||
"edge collection does not exist or is not part of the graph");
|
||||
REG_ERROR(ERROR_GRAPH_COLLECTION_USED_IN_ORPHANS, "collection used in orphans");
|
||||
REG_ERROR(ERROR_GRAPH_EDGE_COL_DOES_NOT_EXIST, "edge collection does not exist or is not part of the graph");
|
||||
REG_ERROR(ERROR_SESSION_UNKNOWN, "unknown session");
|
||||
REG_ERROR(ERROR_SESSION_EXPIRED, "session expired");
|
||||
REG_ERROR(SIMPLE_CLIENT_UNKNOWN_ERROR, "unknown client error");
|
||||
|
@ -342,10 +251,8 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(SIMPLE_CLIENT_COULD_NOT_READ, "could not read from server");
|
||||
REG_ERROR(ERROR_MALFORMED_MANIFEST_FILE, "malformed manifest file");
|
||||
REG_ERROR(ERROR_INVALID_APPLICATION_MANIFEST, "manifest file is invalid");
|
||||
REG_ERROR(ERROR_MANIFEST_FILE_ATTRIBUTE_MISSING,
|
||||
"missing manifest attribute");
|
||||
REG_ERROR(ERROR_CANNOT_EXTRACT_APPLICATION_ROOT,
|
||||
"unable to extract app root path");
|
||||
REG_ERROR(ERROR_MANIFEST_FILE_ATTRIBUTE_MISSING, "missing manifest attribute");
|
||||
REG_ERROR(ERROR_CANNOT_EXTRACT_APPLICATION_ROOT, "unable to extract app root path");
|
||||
REG_ERROR(ERROR_INVALID_FOXX_OPTIONS, "invalid foxx options");
|
||||
REG_ERROR(ERROR_FAILED_TO_EXECUTE_SCRIPT, "failed to execute script");
|
||||
REG_ERROR(ERROR_SYNTAX_ERROR_IN_SCRIPT, "syntax error in script");
|
||||
|
@ -359,19 +266,16 @@ void TRI_InitializeErrorMessages() {
|
|||
REG_ERROR(ERROR_MODULE_FAILURE, "failed to invoke module");
|
||||
REG_ERROR(ERROR_MODULE_UNKNOWN_FILE_TYPE, "unknown file type");
|
||||
REG_ERROR(ERROR_MODULE_PATH_MUST_BE_ABSOLUTE, "path must be absolute");
|
||||
REG_ERROR(ERROR_MODULE_CAN_NOT_ESCAPE,
|
||||
"cannot use '..' to escape top-level-directory");
|
||||
REG_ERROR(ERROR_MODULE_CAN_NOT_ESCAPE, "cannot use '..' to escape top-level-directory");
|
||||
REG_ERROR(ERROR_MODULE_DRIVE_LETTER, "drive local path is not supported");
|
||||
REG_ERROR(ERROR_MODULE_BAD_MODULE_ORIGIN, "corrupted module origin");
|
||||
REG_ERROR(ERROR_MODULE_BAD_PACKAGE_ORIGIN, "corrupted package origin");
|
||||
REG_ERROR(ERROR_MODULE_DOCUMENT_IS_EMPTY, "no content");
|
||||
REG_ERROR(ERROR_MODULE_MAIN_NOT_READABLE, "cannot read main file");
|
||||
REG_ERROR(ERROR_MODULE_MAIN_NOT_JS, "main file is not of type 'js'");
|
||||
REG_ERROR(RESULT_ELEMENT_EXISTS,
|
||||
"element not inserted into structure, because it already exists");
|
||||
REG_ERROR(RESULT_ELEMENT_EXISTS, "element not inserted into structure, because it already exists");
|
||||
REG_ERROR(RESULT_ELEMENT_NOT_FOUND, "element not found in structure");
|
||||
REG_ERROR(ERROR_APP_ALREADY_EXISTS,
|
||||
"newest version of app already installed");
|
||||
REG_ERROR(ERROR_APP_ALREADY_EXISTS, "newest version of app already installed");
|
||||
REG_ERROR(ERROR_QUEUE_ALREADY_EXISTS, "named queue already exists");
|
||||
REG_ERROR(ERROR_DISPATCHER_IS_STOPPING, "dispatcher stopped");
|
||||
REG_ERROR(ERROR_QUEUE_UNKNOWN, "named queue does not exist");
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue