mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of ssh://github.com/triAGENS/ArangoDB into devel
This commit is contained in:
commit
ba41701bf8
|
@ -494,13 +494,13 @@ Json AqlValue::toJson (triagens::arango::AqlTransaction* trx,
|
||||||
|
|
||||||
if (TRI_IS_EDGE_MARKER(_marker)) {
|
if (TRI_IS_EDGE_MARKER(_marker)) {
|
||||||
// _from
|
// _from
|
||||||
std::string from(trx->resolver()->getCollectionName(TRI_EXTRACT_MARKER_FROM_CID(_marker)));
|
std::string from(trx->resolver()->getCollectionNameCluster(TRI_EXTRACT_MARKER_FROM_CID(_marker)));
|
||||||
from.push_back('/');
|
from.push_back('/');
|
||||||
from.append(TRI_EXTRACT_MARKER_FROM_KEY(_marker));
|
from.append(TRI_EXTRACT_MARKER_FROM_KEY(_marker));
|
||||||
json(TRI_VOC_ATTRIBUTE_FROM, Json(from));
|
json(TRI_VOC_ATTRIBUTE_FROM, Json(from));
|
||||||
|
|
||||||
// _to
|
// _to
|
||||||
std::string to(trx->resolver()->getCollectionName(TRI_EXTRACT_MARKER_TO_CID(_marker)));
|
std::string to(trx->resolver()->getCollectionNameCluster(TRI_EXTRACT_MARKER_TO_CID(_marker)));
|
||||||
to.push_back('/');
|
to.push_back('/');
|
||||||
to.append(TRI_EXTRACT_MARKER_TO_KEY(_marker));
|
to.append(TRI_EXTRACT_MARKER_TO_KEY(_marker));
|
||||||
json(TRI_VOC_ATTRIBUTE_TO, Json(to));
|
json(TRI_VOC_ATTRIBUTE_TO, Json(to));
|
||||||
|
@ -605,13 +605,13 @@ Json AqlValue::extractArrayMember (triagens::arango::AqlTransaction* trx,
|
||||||
return Json(TRI_UNKNOWN_MEM_ZONE, JsonHelper::uint64String(TRI_UNKNOWN_MEM_ZONE, rid));
|
return Json(TRI_UNKNOWN_MEM_ZONE, JsonHelper::uint64String(TRI_UNKNOWN_MEM_ZONE, rid));
|
||||||
}
|
}
|
||||||
else if (strcmp(name, TRI_VOC_ATTRIBUTE_FROM) == 0) {
|
else if (strcmp(name, TRI_VOC_ATTRIBUTE_FROM) == 0) {
|
||||||
std::string from(trx->resolver()->getCollectionName(TRI_EXTRACT_MARKER_FROM_CID(_marker)));
|
std::string from(trx->resolver()->getCollectionNameCluster(TRI_EXTRACT_MARKER_FROM_CID(_marker)));
|
||||||
from.push_back('/');
|
from.push_back('/');
|
||||||
from.append(TRI_EXTRACT_MARKER_FROM_KEY(_marker));
|
from.append(TRI_EXTRACT_MARKER_FROM_KEY(_marker));
|
||||||
return Json(TRI_UNKNOWN_MEM_ZONE, from);
|
return Json(TRI_UNKNOWN_MEM_ZONE, from);
|
||||||
}
|
}
|
||||||
else if (strcmp(name, TRI_VOC_ATTRIBUTE_TO) == 0) {
|
else if (strcmp(name, TRI_VOC_ATTRIBUTE_TO) == 0) {
|
||||||
std::string to(trx->resolver()->getCollectionName(TRI_EXTRACT_MARKER_TO_CID(_marker)));
|
std::string to(trx->resolver()->getCollectionNameCluster(TRI_EXTRACT_MARKER_TO_CID(_marker)));
|
||||||
to.push_back('/');
|
to.push_back('/');
|
||||||
to.append(TRI_EXTRACT_MARKER_TO_KEY(_marker));
|
to.append(TRI_EXTRACT_MARKER_TO_KEY(_marker));
|
||||||
return Json(TRI_UNKNOWN_MEM_ZONE, to);
|
return Json(TRI_UNKNOWN_MEM_ZONE, to);
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
#include "Aql/ExecutionEngine.h"
|
#include "Aql/ExecutionEngine.h"
|
||||||
#include "Basics/StringUtils.h"
|
#include "Basics/StringUtils.h"
|
||||||
#include "Cluster/ClusterInfo.h"
|
#include "Cluster/ClusterInfo.h"
|
||||||
|
#include "Cluster/ClusterMethods.h"
|
||||||
#include "Utils/Exception.h"
|
#include "Utils/Exception.h"
|
||||||
#include "VocBase/document-collection.h"
|
#include "VocBase/document-collection.h"
|
||||||
#include "VocBase/transaction.h"
|
#include "VocBase/transaction.h"
|
||||||
|
@ -80,8 +81,12 @@ Collection::~Collection () {
|
||||||
size_t Collection::count () const {
|
size_t Collection::count () const {
|
||||||
if (numDocuments == UNINITIALIZED) {
|
if (numDocuments == UNINITIALIZED) {
|
||||||
if (ExecutionEngine::isCoordinator()) {
|
if (ExecutionEngine::isCoordinator()) {
|
||||||
/// TODO: determine the proper number of documents in the coordinator case
|
uint64_t result;
|
||||||
numDocuments = 1000;
|
int res = triagens::arango::countOnCoordinator(vocbase->_name, name, result);
|
||||||
|
if (res != TRI_ERROR_NO_ERROR) {
|
||||||
|
THROW_ARANGO_EXCEPTION_MESSAGE(res, "could not determine number of documents in collection");
|
||||||
|
}
|
||||||
|
numDocuments = static_cast<int64_t>(result);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
auto document = documentCollection();
|
auto document = documentCollection();
|
||||||
|
@ -197,6 +202,29 @@ void Collection::fillIndexes () const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (ExecutionEngine::isDBServer()) {
|
||||||
|
TRI_ASSERT(collection != nullptr);
|
||||||
|
auto document = documentCollection();
|
||||||
|
|
||||||
|
// lookup collection in agency by plan id
|
||||||
|
auto clusterInfo = triagens::arango::ClusterInfo::instance();
|
||||||
|
auto collectionInfo = clusterInfo->getCollection(std::string(vocbase->_name), triagens::basics::StringUtils::itoa(document->_info._planId));
|
||||||
|
if (collectionInfo.get() == nullptr || (*collectionInfo).empty()) {
|
||||||
|
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "collection not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
TRI_json_t const* json = (*collectionInfo).getIndexes();
|
||||||
|
size_t const n = document->_allIndexes._length;
|
||||||
|
indexes.reserve(n);
|
||||||
|
|
||||||
|
// register indexes
|
||||||
|
for (size_t i = 0; i < n; ++i) {
|
||||||
|
TRI_json_t const* v = TRI_LookupListJson(json, i);
|
||||||
|
if (v != nullptr) {
|
||||||
|
indexes.emplace_back(new Index(v));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
// local collection
|
// local collection
|
||||||
TRI_ASSERT(collection != nullptr);
|
TRI_ASSERT(collection != nullptr);
|
||||||
|
|
|
@ -235,19 +235,28 @@ int ExecutionBlock::initialize () {
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
int ExecutionBlock::shutdown () {
|
int ExecutionBlock::shutdown () {
|
||||||
for (auto it = _dependencies.begin(); it != _dependencies.end(); ++it) {
|
int ret = TRI_ERROR_NO_ERROR;
|
||||||
int res = (*it)->shutdown();
|
int res;
|
||||||
|
|
||||||
if (res != TRI_ERROR_NO_ERROR) {
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (auto it = _buffer.begin(); it != _buffer.end(); ++it) {
|
for (auto it = _buffer.begin(); it != _buffer.end(); ++it) {
|
||||||
delete *it;
|
delete *it;
|
||||||
}
|
}
|
||||||
_buffer.clear();
|
_buffer.clear();
|
||||||
return TRI_ERROR_NO_ERROR;
|
|
||||||
|
for (auto it = _dependencies.begin(); it != _dependencies.end(); ++it) {
|
||||||
|
try {
|
||||||
|
res = (*it)->shutdown();
|
||||||
|
}
|
||||||
|
catch (...) {
|
||||||
|
ret = TRI_ERROR_INTERNAL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res != TRI_ERROR_NO_ERROR) {
|
||||||
|
ret = res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -803,6 +812,7 @@ IndexRangeBlock::IndexRangeBlock (ExecutionEngine* engine,
|
||||||
_allBoundsConstant(true) {
|
_allBoundsConstant(true) {
|
||||||
|
|
||||||
std::vector<std::vector<RangeInfo>> const& orRanges = en->_ranges;
|
std::vector<std::vector<RangeInfo>> const& orRanges = en->_ranges;
|
||||||
|
TRI_ASSERT(en->_index != nullptr);
|
||||||
|
|
||||||
TRI_ASSERT(orRanges.size() == 1); // OR expressions not yet implemented
|
TRI_ASSERT(orRanges.size() == 1); // OR expressions not yet implemented
|
||||||
|
|
||||||
|
@ -888,7 +898,6 @@ int IndexRangeBlock::initialize () {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IndexRangeBlock::readIndex () {
|
bool IndexRangeBlock::readIndex () {
|
||||||
|
|
||||||
// This is either called from initialize if all bounds are constant,
|
// This is either called from initialize if all bounds are constant,
|
||||||
// in this case it is never called again. If there is at least one
|
// in this case it is never called again. If there is at least one
|
||||||
// variable bound, then readIndex is called once for every item coming
|
// variable bound, then readIndex is called once for every item coming
|
||||||
|
@ -908,6 +917,8 @@ bool IndexRangeBlock::readIndex () {
|
||||||
auto en = static_cast<IndexRangeNode const*>(getPlanNode());
|
auto en = static_cast<IndexRangeNode const*>(getPlanNode());
|
||||||
IndexOrCondition const* condition = &en->_ranges;
|
IndexOrCondition const* condition = &en->_ranges;
|
||||||
|
|
||||||
|
TRI_ASSERT(en->_index != nullptr);
|
||||||
|
|
||||||
std::unique_ptr<IndexOrCondition> newCondition;
|
std::unique_ptr<IndexOrCondition> newCondition;
|
||||||
|
|
||||||
// Find out about the actual values for the bounds in the variable bound case:
|
// Find out about the actual values for the bounds in the variable bound case:
|
||||||
|
@ -4227,20 +4238,33 @@ size_t DistributeBlock::sendToClient (AqlValue val) {
|
||||||
/// @brief local helper to throw an exception if a HTTP request went wrong
|
/// @brief local helper to throw an exception if a HTTP request went wrong
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
static void throwExceptionAfterBadSyncRequest (ClusterCommResult* res,
|
static bool throwExceptionAfterBadSyncRequest (ClusterCommResult* res,
|
||||||
bool isShutdown) {
|
bool isShutdown) {
|
||||||
if (res->status == CL_COMM_TIMEOUT) {
|
if (res->status == CL_COMM_TIMEOUT) {
|
||||||
|
std::string errorMessage;
|
||||||
|
errorMessage += std::string("Timeout in communication with shard '") +
|
||||||
|
std::string(res->shardID) +
|
||||||
|
std::string("' on cluster node '") +
|
||||||
|
std::string(res->serverID) +
|
||||||
|
std::string("' failed.");
|
||||||
|
|
||||||
// No reply, we give up:
|
// No reply, we give up:
|
||||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_CLUSTER_TIMEOUT,
|
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_CLUSTER_TIMEOUT,
|
||||||
"timeout in cluster AQL operation");
|
errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (res->status == CL_COMM_ERROR) {
|
if (res->status == CL_COMM_ERROR) {
|
||||||
|
std::string errorMessage;
|
||||||
// This could be a broken connection or an Http error:
|
// This could be a broken connection or an Http error:
|
||||||
if (res->result == nullptr || ! res->result->isComplete()) {
|
if (res->result == nullptr || ! res->result->isComplete()) {
|
||||||
// there is no result
|
// there is no result
|
||||||
|
errorMessage += std::string("Empty result in communication with shard '") +
|
||||||
|
std::string(res->shardID) +
|
||||||
|
std::string("' on cluster node '") +
|
||||||
|
std::string(res->serverID) +
|
||||||
|
std::string("' failed.");
|
||||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_CLUSTER_CONNECTION_LOST,
|
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_CLUSTER_CONNECTION_LOST,
|
||||||
"lost connection within cluster");
|
errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
StringBuffer const& responseBodyBuf(res->result->getBody());
|
StringBuffer const& responseBodyBuf(res->result->getBody());
|
||||||
|
@ -4248,20 +4272,39 @@ static void throwExceptionAfterBadSyncRequest (ClusterCommResult* res,
|
||||||
|
|
||||||
// extract error number and message from response
|
// extract error number and message from response
|
||||||
int errorNum = TRI_ERROR_NO_ERROR;
|
int errorNum = TRI_ERROR_NO_ERROR;
|
||||||
std::string errorMessage;
|
|
||||||
TRI_json_t* json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, responseBodyBuf.c_str());
|
TRI_json_t* json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, responseBodyBuf.c_str());
|
||||||
|
|
||||||
if (TRI_IsArrayJson(json)) {
|
if (JsonHelper::getBooleanValue(json, "error", true)) {
|
||||||
TRI_json_t const* v;
|
errorNum = TRI_ERROR_INTERNAL;
|
||||||
|
|
||||||
|
errorMessage += std::string("Error message received from shard '") +
|
||||||
|
std::string(res->shardID) +
|
||||||
|
std::string("' on cluster node '") +
|
||||||
|
std::string(res->serverID) +
|
||||||
|
std::string("': ");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (TRI_IsArrayJson(json)) {
|
||||||
|
TRI_json_t const* v = TRI_LookupArrayJson(json, "errorNum");
|
||||||
|
|
||||||
v = TRI_LookupArrayJson(json, "errorNum");
|
|
||||||
if (TRI_IsNumberJson(v)) {
|
if (TRI_IsNumberJson(v)) {
|
||||||
errorNum = static_cast<int>(v->_value._number);
|
if (static_cast<int>(v->_value._number) != TRI_ERROR_NO_ERROR) {
|
||||||
|
/* if we've got an error num, error has to be true. */
|
||||||
|
TRI_ASSERT(errorNum == TRI_ERROR_INTERNAL);
|
||||||
|
errorNum = static_cast<int>(v->_value._number);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
v = TRI_LookupArrayJson(json, "errorMessage");
|
v = TRI_LookupArrayJson(json, "errorMessage");
|
||||||
if (TRI_IsStringJson(v)) {
|
if (TRI_IsStringJson(v)) {
|
||||||
errorMessage = std::string(v->_value._string.data, v->_value._string.length - 1);
|
errorMessage += std::string(v->_value._string.data, v->_value._string.length - 1);
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
errorMessage += std::string("(No valid error in response)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
errorMessage += std::string("(No valid response)");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (json != nullptr) {
|
if (json != nullptr) {
|
||||||
|
@ -4271,10 +4314,10 @@ static void throwExceptionAfterBadSyncRequest (ClusterCommResult* res,
|
||||||
if (isShutdown &&
|
if (isShutdown &&
|
||||||
errorNum == TRI_ERROR_QUERY_NOT_FOUND) {
|
errorNum == TRI_ERROR_QUERY_NOT_FOUND) {
|
||||||
// this error may happen on shutdown and is thus tolerated
|
// this error may happen on shutdown and is thus tolerated
|
||||||
return;
|
// pass the info to the caller who can opt to ignore this error
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// In this case a proper HTTP error was reported by the DBserver,
|
// In this case a proper HTTP error was reported by the DBserver,
|
||||||
if (errorNum > 0 && ! errorMessage.empty()) {
|
if (errorNum > 0 && ! errorMessage.empty()) {
|
||||||
THROW_ARANGO_EXCEPTION_MESSAGE(errorNum, errorMessage);
|
THROW_ARANGO_EXCEPTION_MESSAGE(errorNum, errorMessage);
|
||||||
|
@ -4283,6 +4326,8 @@ static void throwExceptionAfterBadSyncRequest (ClusterCommResult* res,
|
||||||
// default error
|
// default error
|
||||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_CLUSTER_AQL_COMMUNICATION);
|
THROW_ARANGO_EXCEPTION(TRI_ERROR_CLUSTER_AQL_COMMUNICATION);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -4406,7 +4451,10 @@ int RemoteBlock::shutdown () {
|
||||||
res.reset(sendRequest(rest::HttpRequest::HTTP_REQUEST_PUT,
|
res.reset(sendRequest(rest::HttpRequest::HTTP_REQUEST_PUT,
|
||||||
"/_api/aql/shutdown/",
|
"/_api/aql/shutdown/",
|
||||||
string()));
|
string()));
|
||||||
throwExceptionAfterBadSyncRequest(res.get(), true);
|
if (throwExceptionAfterBadSyncRequest(res.get(), true)) {
|
||||||
|
// artificially ignore error in case query was not found during shutdown
|
||||||
|
return TRI_ERROR_NO_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
// If we get here, then res->result is the response which will be
|
// If we get here, then res->result is the response which will be
|
||||||
// a serialized AqlItemBlock:
|
// a serialized AqlItemBlock:
|
||||||
|
@ -4414,6 +4462,7 @@ int RemoteBlock::shutdown () {
|
||||||
Json responseBodyJson(TRI_UNKNOWN_MEM_ZONE,
|
Json responseBodyJson(TRI_UNKNOWN_MEM_ZONE,
|
||||||
TRI_JsonString(TRI_UNKNOWN_MEM_ZONE,
|
TRI_JsonString(TRI_UNKNOWN_MEM_ZONE,
|
||||||
responseBodyBuf.begin()));
|
responseBodyBuf.begin()));
|
||||||
|
|
||||||
return JsonHelper::getNumericValue<int>
|
return JsonHelper::getNumericValue<int>
|
||||||
(responseBodyJson.json(), "code", TRI_ERROR_INTERNAL);
|
(responseBodyJson.json(), "code", TRI_ERROR_INTERNAL);
|
||||||
}
|
}
|
||||||
|
|
|
@ -188,6 +188,14 @@ bool ExecutionEngine::isCoordinator () {
|
||||||
return triagens::arango::ServerState::instance()->isCoordinator();
|
return triagens::arango::ServerState::instance()->isCoordinator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// @brief whether or not we are a db server
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
bool ExecutionEngine::isDBServer () {
|
||||||
|
return triagens::arango::ServerState::instance()->isDBserver();
|
||||||
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// --SECTION-- walker class for ExecutionNode to instanciate
|
// --SECTION-- walker class for ExecutionNode to instanciate
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
|
@ -331,6 +339,11 @@ struct CoordinatorInstanciator : public WalkerWorker<ExecutionNode> {
|
||||||
Query* otherQuery = query->clone(PART_DEPENDENT);
|
Query* otherQuery = query->clone(PART_DEPENDENT);
|
||||||
otherQuery->engine(engine);
|
otherQuery->engine(engine);
|
||||||
|
|
||||||
|
int res = otherQuery->trx()->begin();
|
||||||
|
if (res != TRI_ERROR_NO_ERROR) {
|
||||||
|
THROW_ARANGO_EXCEPTION_MESSAGE(res, "could not begin transaction");
|
||||||
|
}
|
||||||
|
|
||||||
auto* newPlan = new ExecutionPlan(otherQuery->ast());
|
auto* newPlan = new ExecutionPlan(otherQuery->ast());
|
||||||
otherQuery->setPlan(newPlan);
|
otherQuery->setPlan(newPlan);
|
||||||
|
|
||||||
|
@ -514,6 +527,7 @@ struct CoordinatorInstanciator : public WalkerWorker<ExecutionNode> {
|
||||||
// pick up the remote query ids
|
// pick up the remote query ids
|
||||||
std::unordered_map<std::string, std::string> queryIds;
|
std::unordered_map<std::string, std::string> queryIds;
|
||||||
|
|
||||||
|
std::string error;
|
||||||
int count = 0;
|
int count = 0;
|
||||||
int nrok = 0;
|
int nrok = 0;
|
||||||
for (count = (int) shardIds.size(); count > 0; count--) {
|
for (count = (int) shardIds.size(); count > 0; count--) {
|
||||||
|
@ -538,6 +552,13 @@ struct CoordinatorInstanciator : public WalkerWorker<ExecutionNode> {
|
||||||
std::cout << "DB SERVER ANSWERED WITH ERROR: " << res->answer->body() << "\n";
|
std::cout << "DB SERVER ANSWERED WITH ERROR: " << res->answer->body() << "\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
error += std::string("Communication with shard '") +
|
||||||
|
std::string(res->shardID) +
|
||||||
|
std::string("' on cluster node '") +
|
||||||
|
std::string(res->serverID) +
|
||||||
|
std::string("' failed.");
|
||||||
|
}
|
||||||
delete res;
|
delete res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -545,7 +566,7 @@ struct CoordinatorInstanciator : public WalkerWorker<ExecutionNode> {
|
||||||
|
|
||||||
if (nrok != (int) shardIds.size()) {
|
if (nrok != (int) shardIds.size()) {
|
||||||
// TODO: provide sensible error message with more details
|
// TODO: provide sensible error message with more details
|
||||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "did not receive response from all shards");
|
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, error);
|
||||||
}
|
}
|
||||||
|
|
||||||
return queryIds;
|
return queryIds;
|
||||||
|
@ -743,10 +764,10 @@ ExecutionEngine* ExecutionEngine::instanciateFromPlan (QueryRegistry* queryRegis
|
||||||
}
|
}
|
||||||
|
|
||||||
TRI_ASSERT(root != nullptr);
|
TRI_ASSERT(root != nullptr);
|
||||||
|
engine->_root = root;
|
||||||
root->initialize();
|
root->initialize();
|
||||||
root->initializeCursor(nullptr, 0);
|
root->initializeCursor(nullptr, 0);
|
||||||
|
|
||||||
engine->_root = root;
|
|
||||||
|
|
||||||
return engine;
|
return engine;
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,6 +78,12 @@ namespace triagens {
|
||||||
|
|
||||||
static bool isCoordinator ();
|
static bool isCoordinator ();
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// @brief whether or not we are a DB server
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
static bool isDBServer ();
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
// @brief create an execution engine from a plan
|
// @brief create an execution engine from a plan
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -126,7 +132,10 @@ namespace triagens {
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
int shutdown () {
|
int shutdown () {
|
||||||
return _root->shutdown();
|
if (_root != nullptr) {
|
||||||
|
return _root->shutdown();
|
||||||
|
}
|
||||||
|
else return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -1304,6 +1304,7 @@ IndexRangeNode::IndexRangeNode (ExecutionPlan* plan,
|
||||||
_collection(plan->getAst()->query()->collections()->get(JsonHelper::checkAndGetStringValue(json.json(),
|
_collection(plan->getAst()->query()->collections()->get(JsonHelper::checkAndGetStringValue(json.json(),
|
||||||
"collection"))),
|
"collection"))),
|
||||||
_outVariable(varFromJson(plan->getAst(), json, "outVariable")),
|
_outVariable(varFromJson(plan->getAst(), json, "outVariable")),
|
||||||
|
_index(nullptr),
|
||||||
_ranges(),
|
_ranges(),
|
||||||
_reverse(false) {
|
_reverse(false) {
|
||||||
|
|
||||||
|
@ -1324,6 +1325,10 @@ IndexRangeNode::IndexRangeNode (ExecutionPlan* plan,
|
||||||
|
|
||||||
_index = _collection->getIndex(iid);
|
_index = _collection->getIndex(iid);
|
||||||
_reverse = JsonHelper::checkAndGetBooleanValue(json.json(), "reverse");
|
_reverse = JsonHelper::checkAndGetBooleanValue(json.json(), "reverse");
|
||||||
|
|
||||||
|
if (_index == nullptr) {
|
||||||
|
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "index not found");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ExecutionNode::IndexMatch IndexRangeNode::MatchesIndex (IndexMatchVec const& pattern) const {
|
ExecutionNode::IndexMatch IndexRangeNode::MatchesIndex (IndexMatchVec const& pattern) const {
|
||||||
|
|
|
@ -299,6 +299,8 @@ Query* Query::clone (QueryPart part) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TRI_ASSERT(clone->_trx == nullptr);
|
||||||
|
|
||||||
clone->_trx = _trx->clone(); // A daughter transaction which does not
|
clone->_trx = _trx->clone(); // A daughter transaction which does not
|
||||||
// actually lock the collections
|
// actually lock the collections
|
||||||
return clone.release();
|
return clone.release();
|
||||||
|
@ -550,7 +552,7 @@ QueryResult Query::execute (QueryRegistry* registry) {
|
||||||
AqlValue val = value->getValue(i, 0);
|
AqlValue val = value->getValue(i, 0);
|
||||||
|
|
||||||
if (! val.isEmpty()) {
|
if (! val.isEmpty()) {
|
||||||
json.add(val.toJson(trx(), doc));
|
json.add(val.toJson(_trx, doc));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
delete value;
|
delete value;
|
||||||
|
@ -974,16 +976,16 @@ std::string Query::getStateString () const {
|
||||||
|
|
||||||
void Query::cleanupPlanAndEngine () {
|
void Query::cleanupPlanAndEngine () {
|
||||||
if (_engine != nullptr) {
|
if (_engine != nullptr) {
|
||||||
|
_engine->shutdown();
|
||||||
delete _engine;
|
delete _engine;
|
||||||
_engine = nullptr;
|
_engine = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_trx != nullptr) {
|
if (_trx != nullptr) {
|
||||||
// TODO: this doesn't unblock the collection on the coordinator. Y?
|
|
||||||
_trx->abort();
|
_trx->abort();
|
||||||
|
delete _trx;
|
||||||
|
_trx = nullptr;
|
||||||
}
|
}
|
||||||
delete _trx;
|
|
||||||
_trx = nullptr;
|
|
||||||
|
|
||||||
if (_parser != nullptr) {
|
if (_parser != nullptr) {
|
||||||
delete _parser;
|
delete _parser;
|
||||||
|
|
|
@ -344,8 +344,8 @@ namespace triagens {
|
||||||
/// @brief return the transaction, if prepared
|
/// @brief return the transaction, if prepared
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
triagens::arango::AqlTransaction* trx () {
|
inline triagens::arango::AqlTransaction* trx () {
|
||||||
return &*_trx;
|
return _trx;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -40,7 +40,6 @@
|
||||||
#include "GeneralServer/GeneralServer.h"
|
#include "GeneralServer/GeneralServer.h"
|
||||||
|
|
||||||
#include "VocBase/server.h"
|
#include "VocBase/server.h"
|
||||||
//#include "V8Server/v8-vocbaseprivate.h"
|
|
||||||
|
|
||||||
#include "Aql/ExecutionEngine.h"
|
#include "Aql/ExecutionEngine.h"
|
||||||
#include "Aql/ExecutionBlock.h"
|
#include "Aql/ExecutionBlock.h"
|
||||||
|
@ -111,16 +110,18 @@ std::string const& RestAqlHandler::queue () const {
|
||||||
void RestAqlHandler::createQueryFromJson () {
|
void RestAqlHandler::createQueryFromJson () {
|
||||||
Json queryJson(TRI_UNKNOWN_MEM_ZONE, parseJsonBody());
|
Json queryJson(TRI_UNKNOWN_MEM_ZONE, parseJsonBody());
|
||||||
if (queryJson.isEmpty()) {
|
if (queryJson.isEmpty()) {
|
||||||
|
LOG_ERROR("Invalid JSON Plan in Query");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::cout << "createQueryFromJson" << queryJson.toString() << std::endl;
|
std::cout << "createQueryFromJson: " << queryJson.toString() << std::endl;
|
||||||
|
|
||||||
Json plan;
|
Json plan;
|
||||||
Json options;
|
Json options;
|
||||||
|
|
||||||
plan = queryJson.get("plan").copy(); // cannot throw
|
plan = queryJson.get("plan").copy(); // cannot throw
|
||||||
if (plan.isEmpty()) {
|
if (plan.isEmpty()) {
|
||||||
|
LOG_ERROR("Invalid JSON: \"plan\"-Attribute missing.");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"body must be an object with attribute \"plan\"");
|
"body must be an object with attribute \"plan\"");
|
||||||
return;
|
return;
|
||||||
|
@ -132,6 +133,8 @@ void RestAqlHandler::createQueryFromJson () {
|
||||||
auto query = new Query(_applicationV8, false, _vocbase, plan, options.steal(), (part == "main" ? PART_MAIN : PART_DEPENDENT));
|
auto query = new Query(_applicationV8, false, _vocbase, plan, options.steal(), (part == "main" ? PART_MAIN : PART_DEPENDENT));
|
||||||
QueryResult res = query->prepare(_queryRegistry);
|
QueryResult res = query->prepare(_queryRegistry);
|
||||||
if (res.code != TRI_ERROR_NO_ERROR) {
|
if (res.code != TRI_ERROR_NO_ERROR) {
|
||||||
|
LOG_ERROR("Failed to instanciate the Query: %s", res.details.c_str());
|
||||||
|
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_QUERY_BAD_JSON_PLAN,
|
generateError(HttpResponse::BAD, TRI_ERROR_QUERY_BAD_JSON_PLAN,
|
||||||
res.details);
|
res.details);
|
||||||
delete query;
|
delete query;
|
||||||
|
@ -153,6 +156,8 @@ void RestAqlHandler::createQueryFromJson () {
|
||||||
_queryRegistry->insert(_vocbase, _qId, query, ttl);
|
_queryRegistry->insert(_vocbase, _qId, query, ttl);
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("could not keep query in registry");
|
||||||
|
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"could not keep query in registry");
|
"could not keep query in registry");
|
||||||
delete query;
|
delete query;
|
||||||
|
@ -180,11 +185,13 @@ void RestAqlHandler::createQueryFromJson () {
|
||||||
void RestAqlHandler::parseQuery () {
|
void RestAqlHandler::parseQuery () {
|
||||||
Json queryJson(TRI_UNKNOWN_MEM_ZONE, parseJsonBody());
|
Json queryJson(TRI_UNKNOWN_MEM_ZONE, parseJsonBody());
|
||||||
if (queryJson.isEmpty()) {
|
if (queryJson.isEmpty()) {
|
||||||
|
LOG_ERROR("Invalid JSON Plan in Query");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string const queryString = JsonHelper::getStringValue(queryJson.json(), "query", "");
|
std::string const queryString = JsonHelper::getStringValue(queryJson.json(), "query", "");
|
||||||
if (queryString.empty()) {
|
if (queryString.empty()) {
|
||||||
|
LOG_ERROR("body must be an object with attribute \"query\"");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"body must be an object with attribute \"query\"");
|
"body must be an object with attribute \"query\"");
|
||||||
return;
|
return;
|
||||||
|
@ -194,6 +201,7 @@ void RestAqlHandler::parseQuery () {
|
||||||
nullptr, nullptr, PART_MAIN);
|
nullptr, nullptr, PART_MAIN);
|
||||||
QueryResult res = query->parse();
|
QueryResult res = query->parse();
|
||||||
if (res.code != TRI_ERROR_NO_ERROR) {
|
if (res.code != TRI_ERROR_NO_ERROR) {
|
||||||
|
LOG_ERROR("Failed to instanciate the Query: %s", res.details.c_str());
|
||||||
generateError(HttpResponse::BAD, res.code, res.details);
|
generateError(HttpResponse::BAD, res.code, res.details);
|
||||||
delete query;
|
delete query;
|
||||||
return;
|
return;
|
||||||
|
@ -236,6 +244,7 @@ void RestAqlHandler::explainQuery () {
|
||||||
|
|
||||||
std::string queryString = JsonHelper::getStringValue(queryJson.json(), "query", "");
|
std::string queryString = JsonHelper::getStringValue(queryJson.json(), "query", "");
|
||||||
if (queryString.empty()) {
|
if (queryString.empty()) {
|
||||||
|
LOG_ERROR("body must be an object with attribute \"query\"");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"body must be an object with attribute \"query\"");
|
"body must be an object with attribute \"query\"");
|
||||||
return;
|
return;
|
||||||
|
@ -250,6 +259,7 @@ void RestAqlHandler::explainQuery () {
|
||||||
parameters.steal(), options.steal(), PART_MAIN);
|
parameters.steal(), options.steal(), PART_MAIN);
|
||||||
QueryResult res = query->explain();
|
QueryResult res = query->explain();
|
||||||
if (res.code != TRI_ERROR_NO_ERROR) {
|
if (res.code != TRI_ERROR_NO_ERROR) {
|
||||||
|
LOG_ERROR("Failed to instanciate the Query: %s", res.details.c_str());
|
||||||
generateError(HttpResponse::BAD, res.code, res.details);
|
generateError(HttpResponse::BAD, res.code, res.details);
|
||||||
delete query;
|
delete query;
|
||||||
return;
|
return;
|
||||||
|
@ -289,6 +299,7 @@ void RestAqlHandler::createQueryFromString () {
|
||||||
|
|
||||||
std::string const queryString = JsonHelper::getStringValue(queryJson.json(), "query", "");
|
std::string const queryString = JsonHelper::getStringValue(queryJson.json(), "query", "");
|
||||||
if (queryString.empty()) {
|
if (queryString.empty()) {
|
||||||
|
LOG_ERROR("body must be an object with attribute \"query\"");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"body must be an object with attribute \"query\"");
|
"body must be an object with attribute \"query\"");
|
||||||
return;
|
return;
|
||||||
|
@ -296,6 +307,7 @@ void RestAqlHandler::createQueryFromString () {
|
||||||
|
|
||||||
std::string const part = JsonHelper::getStringValue(queryJson.json(), "part", "");
|
std::string const part = JsonHelper::getStringValue(queryJson.json(), "part", "");
|
||||||
if (part.empty()) {
|
if (part.empty()) {
|
||||||
|
LOG_ERROR("body must be an object with attribute \"part\"");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"body must be an object with attribute \"part\"");
|
"body must be an object with attribute \"part\"");
|
||||||
return;
|
return;
|
||||||
|
@ -310,6 +322,7 @@ void RestAqlHandler::createQueryFromString () {
|
||||||
parameters.steal(), options.steal(), (part == "main" ? PART_MAIN : PART_DEPENDENT));
|
parameters.steal(), options.steal(), (part == "main" ? PART_MAIN : PART_DEPENDENT));
|
||||||
QueryResult res = query->prepare(_queryRegistry);
|
QueryResult res = query->prepare(_queryRegistry);
|
||||||
if (res.code != TRI_ERROR_NO_ERROR) {
|
if (res.code != TRI_ERROR_NO_ERROR) {
|
||||||
|
LOG_ERROR("Failed to instanciate the Query: %s", res.details.c_str());
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_QUERY_BAD_JSON_PLAN,
|
generateError(HttpResponse::BAD, TRI_ERROR_QUERY_BAD_JSON_PLAN,
|
||||||
res.details);
|
res.details);
|
||||||
delete query;
|
delete query;
|
||||||
|
@ -329,6 +342,7 @@ void RestAqlHandler::createQueryFromString () {
|
||||||
_queryRegistry->insert(_vocbase, _qId, query, ttl);
|
_queryRegistry->insert(_vocbase, _qId, query, ttl);
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("could not keep query in registry");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
generateError(HttpResponse::BAD, TRI_ERROR_INTERNAL,
|
||||||
"could not keep query in registry");
|
"could not keep query in registry");
|
||||||
delete query;
|
delete query;
|
||||||
|
@ -432,7 +446,7 @@ void RestAqlHandler::useQuery (std::string const& operation,
|
||||||
}
|
}
|
||||||
catch (triagens::arango::Exception const& ex) {
|
catch (triagens::arango::Exception const& ex) {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
LOG_ERROR("Failed during use of Query: %s", ex.message().c_str());
|
||||||
generateError(HttpResponse::SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR,
|
||||||
ex.code(),
|
ex.code(),
|
||||||
ex.message());
|
ex.message());
|
||||||
|
@ -440,12 +454,15 @@ void RestAqlHandler::useQuery (std::string const& operation,
|
||||||
catch (std::exception const& ex) {
|
catch (std::exception const& ex) {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
|
||||||
|
LOG_ERROR("Failed during use of Query: %s", ex.what());
|
||||||
|
|
||||||
generateError(HttpResponse::SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR,
|
||||||
TRI_ERROR_HTTP_SERVER_ERROR,
|
TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
ex.what());
|
ex.what());
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
LOG_ERROR("Failed during use of Query: Unknown exeption occured");
|
||||||
|
|
||||||
generateError(HttpResponse::SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR,
|
||||||
TRI_ERROR_HTTP_SERVER_ERROR,
|
TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
|
@ -531,13 +548,14 @@ void RestAqlHandler::getInfoQuery (std::string const& operation,
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
LOG_ERROR("Referenced qery not found");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (triagens::arango::Exception const& ex) {
|
catch (triagens::arango::Exception const& ex) {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
LOG_ERROR("Failed during use of Query: %s", ex.message().c_str());
|
||||||
generateError(HttpResponse::SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR,
|
||||||
ex.code(),
|
ex.code(),
|
||||||
ex.message());
|
ex.message());
|
||||||
|
@ -545,6 +563,8 @@ void RestAqlHandler::getInfoQuery (std::string const& operation,
|
||||||
catch (std::exception const& ex) {
|
catch (std::exception const& ex) {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
|
||||||
|
LOG_ERROR("Failed during use of Query: %s", ex.what());
|
||||||
|
|
||||||
generateError(HttpResponse::SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR,
|
||||||
TRI_ERROR_HTTP_SERVER_ERROR,
|
TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
ex.what());
|
ex.what());
|
||||||
|
@ -552,6 +572,8 @@ void RestAqlHandler::getInfoQuery (std::string const& operation,
|
||||||
catch (...) {
|
catch (...) {
|
||||||
_queryRegistry->close(_vocbase, _qId);
|
_queryRegistry->close(_vocbase, _qId);
|
||||||
|
|
||||||
|
LOG_ERROR("Failed during use of Query: Unknown exeption occured");
|
||||||
|
|
||||||
generateError(HttpResponse::SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR,
|
||||||
TRI_ERROR_HTTP_SERVER_ERROR,
|
TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
"an unknown exception occurred");
|
"an unknown exception occurred");
|
||||||
|
@ -582,6 +604,7 @@ triagens::rest::HttpHandler::status_t RestAqlHandler::execute () {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case HttpRequest::HTTP_REQUEST_POST: {
|
case HttpRequest::HTTP_REQUEST_POST: {
|
||||||
if (suffix.size() != 1) {
|
if (suffix.size() != 1) {
|
||||||
|
LOG_ERROR("Empty POST!");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
||||||
}
|
}
|
||||||
else if (suffix[0] == "instanciate") {
|
else if (suffix[0] == "instanciate") {
|
||||||
|
@ -597,12 +620,14 @@ triagens::rest::HttpHandler::status_t RestAqlHandler::execute () {
|
||||||
createQueryFromString();
|
createQueryFromString();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
LOG_ERROR("Unknown API");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HttpRequest::HTTP_REQUEST_PUT: {
|
case HttpRequest::HTTP_REQUEST_PUT: {
|
||||||
if (suffix.size() != 2) {
|
if (suffix.size() != 2) {
|
||||||
|
LOG_ERROR("unknown PUT API");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -612,6 +637,7 @@ triagens::rest::HttpHandler::status_t RestAqlHandler::execute () {
|
||||||
}
|
}
|
||||||
case HttpRequest::HTTP_REQUEST_GET: {
|
case HttpRequest::HTTP_REQUEST_GET: {
|
||||||
if (suffix.size() != 2) {
|
if (suffix.size() != 2) {
|
||||||
|
LOG_ERROR("Unknown GET API");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -624,6 +650,7 @@ triagens::rest::HttpHandler::status_t RestAqlHandler::execute () {
|
||||||
case HttpRequest::HTTP_REQUEST_PATCH:
|
case HttpRequest::HTTP_REQUEST_PATCH:
|
||||||
case HttpRequest::HTTP_REQUEST_OPTIONS:
|
case HttpRequest::HTTP_REQUEST_OPTIONS:
|
||||||
case HttpRequest::HTTP_REQUEST_ILLEGAL: {
|
case HttpRequest::HTTP_REQUEST_ILLEGAL: {
|
||||||
|
LOG_ERROR("Unknown HTTP-method for /_api/aql");
|
||||||
generateError(HttpResponse::METHOD_NOT_ALLOWED,
|
generateError(HttpResponse::METHOD_NOT_ALLOWED,
|
||||||
TRI_ERROR_NOT_IMPLEMENTED,
|
TRI_ERROR_NOT_IMPLEMENTED,
|
||||||
"illegal method for /_api/aql");
|
"illegal method for /_api/aql");
|
||||||
|
@ -651,12 +678,14 @@ bool RestAqlHandler::findQuery (std::string const& idString,
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
_qId = 0;
|
_qId = 0;
|
||||||
|
LOG_ERROR("Query not found.");
|
||||||
generateError(HttpResponse::FORBIDDEN, TRI_ERROR_QUERY_IN_USE);
|
generateError(HttpResponse::FORBIDDEN, TRI_ERROR_QUERY_IN_USE);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (query == nullptr) {
|
if (query == nullptr) {
|
||||||
_qId = 0;
|
_qId = 0;
|
||||||
|
LOG_ERROR("Query not found.");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_QUERY_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_QUERY_NOT_FOUND);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -712,6 +741,7 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
||||||
//std::cout << "ANSWERBODY: " << JsonHelper::toString(answerBody.json()) << "\n\n";
|
//std::cout << "ANSWERBODY: " << JsonHelper::toString(answerBody.json()) << "\n\n";
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("cannot transform AqlItemBlock to Json");
|
||||||
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
"cannot transform AqlItemBlock to Json");
|
"cannot transform AqlItemBlock to Json");
|
||||||
return;
|
return;
|
||||||
|
@ -737,6 +767,7 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("skipSome lead to an exception");
|
||||||
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
"skipSome lead to an exception");
|
"skipSome lead to an exception");
|
||||||
return;
|
return;
|
||||||
|
@ -764,6 +795,7 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
||||||
("error", Json(false));
|
("error", Json(false));
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("skip lead to an exception");
|
||||||
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
"skip lead to an exception");
|
"skip lead to an exception");
|
||||||
return;
|
return;
|
||||||
|
@ -784,6 +816,7 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("initializeCursor lead to an exception");
|
||||||
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
"initializeCursor lead to an exception");
|
"initializeCursor lead to an exception");
|
||||||
return;
|
return;
|
||||||
|
@ -798,6 +831,7 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
||||||
_queryRegistry->destroy(_vocbase, _qId);
|
_queryRegistry->destroy(_vocbase, _qId);
|
||||||
}
|
}
|
||||||
catch (...) {
|
catch (...) {
|
||||||
|
LOG_ERROR("shutdown lead to an exception");
|
||||||
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_HTTP_SERVER_ERROR,
|
||||||
"shutdown lead to an exception");
|
"shutdown lead to an exception");
|
||||||
return;
|
return;
|
||||||
|
@ -806,6 +840,7 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
||||||
("code", Json(static_cast<double>(res)));
|
("code", Json(static_cast<double>(res)));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
LOG_ERROR("Unknown operation!");
|
||||||
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
generateError(HttpResponse::NOT_FOUND, TRI_ERROR_HTTP_NOT_FOUND);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -825,11 +860,13 @@ TRI_json_t* RestAqlHandler::parseJsonBody () {
|
||||||
|
|
||||||
if (json == nullptr) {
|
if (json == nullptr) {
|
||||||
if (errmsg == nullptr) {
|
if (errmsg == nullptr) {
|
||||||
|
LOG_ERROR("cannot parse json object");
|
||||||
generateError(HttpResponse::BAD,
|
generateError(HttpResponse::BAD,
|
||||||
TRI_ERROR_HTTP_CORRUPTED_JSON,
|
TRI_ERROR_HTTP_CORRUPTED_JSON,
|
||||||
"cannot parse json object");
|
"cannot parse json object");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
LOG_ERROR("cannot parse json object: %s", errmsg);
|
||||||
generateError(HttpResponse::BAD,
|
generateError(HttpResponse::BAD,
|
||||||
TRI_ERROR_HTTP_CORRUPTED_JSON,
|
TRI_ERROR_HTTP_CORRUPTED_JSON,
|
||||||
errmsg);
|
errmsg);
|
||||||
|
@ -844,6 +881,7 @@ TRI_json_t* RestAqlHandler::parseJsonBody () {
|
||||||
|
|
||||||
if (! TRI_IsArrayJson(json)) {
|
if (! TRI_IsArrayJson(json)) {
|
||||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);
|
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);
|
||||||
|
LOG_ERROR("body of request must be a JSON array");
|
||||||
generateError(HttpResponse::BAD, TRI_ERROR_HTTP_BAD_PARAMETER,
|
generateError(HttpResponse::BAD, TRI_ERROR_HTTP_BAD_PARAMETER,
|
||||||
"body of request must be a JSON array");
|
"body of request must be a JSON array");
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
|
@ -338,7 +338,7 @@ ClusterCommResult* ClusterComm::syncRequest (
|
||||||
|
|
||||||
res->result = client->request(reqtype, path, body.c_str(), body.size(),
|
res->result = client->request(reqtype, path, body.c_str(), body.size(),
|
||||||
headersCopy);
|
headersCopy);
|
||||||
if (! res->result->isComplete()) {
|
if (res->result == nullptr || ! res->result->isComplete()) {
|
||||||
cm->brokenConnection(connection);
|
cm->brokenConnection(connection);
|
||||||
if (client->getErrorMessage() == "Request timeout reached") {
|
if (client->getErrorMessage() == "Request timeout reached") {
|
||||||
res->status = CL_COMM_TIMEOUT;
|
res->status = CL_COMM_TIMEOUT;
|
||||||
|
@ -723,7 +723,7 @@ void ClusterComm::asyncAnswer (string& coordinatorHeader,
|
||||||
httpclient::SimpleHttpResult* result =
|
httpclient::SimpleHttpResult* result =
|
||||||
client->request(rest::HttpRequest::HTTP_REQUEST_PUT,
|
client->request(rest::HttpRequest::HTTP_REQUEST_PUT,
|
||||||
"/_api/shard-comm", body, len, headers);
|
"/_api/shard-comm", body, len, headers);
|
||||||
if (! result->isComplete()) {
|
if (result == nullptr || ! result->isComplete()) {
|
||||||
cm->brokenConnection(connection);
|
cm->brokenConnection(connection);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -1011,17 +1011,17 @@ void ClusterCommThread::run () {
|
||||||
|
|
||||||
// We add this result to the operation struct without acquiring
|
// We add this result to the operation struct without acquiring
|
||||||
// a lock, since we know that only we do such a thing:
|
// a lock, since we know that only we do such a thing:
|
||||||
if (0 != op->body) {
|
if (nullptr != op->body) {
|
||||||
op->result = client->request(op->reqtype, op->path,
|
op->result = client->request(op->reqtype, op->path,
|
||||||
op->body->c_str(), op->body->size(),
|
op->body->c_str(), op->body->size(),
|
||||||
*(op->headerFields));
|
*(op->headerFields));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
op->result = client->request(op->reqtype, op->path,
|
op->result = client->request(op->reqtype, op->path,
|
||||||
NULL, 0, *(op->headerFields));
|
nullptr, 0, *(op->headerFields));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! op->result->isComplete()) {
|
if (op->result == nullptr || ! op->result->isComplete()) {
|
||||||
cm->brokenConnection(connection);
|
cm->brokenConnection(connection);
|
||||||
if (client->getErrorMessage() == "Request timeout reached") {
|
if (client->getErrorMessage() == "Request timeout reached") {
|
||||||
op->status = CL_COMM_TIMEOUT;
|
op->status = CL_COMM_TIMEOUT;
|
||||||
|
@ -1042,7 +1042,7 @@ void ClusterCommThread::run () {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!cc->moveFromSendToReceived(op->operationID)) {
|
if (! cc->moveFromSendToReceived(op->operationID)) {
|
||||||
// It was dropped in the meantime, so forget about it:
|
// It was dropped in the meantime, so forget about it:
|
||||||
delete op;
|
delete op;
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ namespace triagens {
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
bool empty () const {
|
bool empty () const {
|
||||||
return (0 == _json); //|| (id() == 0);
|
return (nullptr == _json); //|| (id() == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -189,11 +189,11 @@ namespace triagens {
|
||||||
TRI_json_t* keyOptions () const {
|
TRI_json_t* keyOptions () const {
|
||||||
TRI_json_t const* keyOptions = triagens::basics::JsonHelper::getArrayElement(_json, "keyOptions");
|
TRI_json_t const* keyOptions = triagens::basics::JsonHelper::getArrayElement(_json, "keyOptions");
|
||||||
|
|
||||||
if (keyOptions != 0) {
|
if (keyOptions != nullptr) {
|
||||||
return TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, keyOptions);
|
return TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, keyOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -203,7 +203,7 @@ namespace triagens {
|
||||||
bool allowUserKeys () const {
|
bool allowUserKeys () const {
|
||||||
TRI_json_t const* keyOptions = triagens::basics::JsonHelper::getArrayElement(_json, "keyOptions");
|
TRI_json_t const* keyOptions = triagens::basics::JsonHelper::getArrayElement(_json, "keyOptions");
|
||||||
|
|
||||||
if (keyOptions != 0) {
|
if (keyOptions != nullptr) {
|
||||||
return triagens::basics::JsonHelper::getBooleanValue(keyOptions, "allowUserKeys", true);
|
return triagens::basics::JsonHelper::getBooleanValue(keyOptions, "allowUserKeys", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -422,7 +422,7 @@ namespace triagens {
|
||||||
TRI_json_t* _json = it->second;
|
TRI_json_t* _json = it->second;
|
||||||
b = triagens::basics::JsonHelper::getBooleanValue(_json,
|
b = triagens::basics::JsonHelper::getBooleanValue(_json,
|
||||||
name, false);
|
name, false);
|
||||||
m.insert(make_pair(it->first,b));
|
m.insert(make_pair(it->first, b));
|
||||||
}
|
}
|
||||||
return m;
|
return m;
|
||||||
}
|
}
|
||||||
|
@ -539,15 +539,12 @@ namespace triagens {
|
||||||
= triagens::basics::JsonHelper::getArrayElement
|
= triagens::basics::JsonHelper::getArrayElement
|
||||||
(_json, "keyOptions");
|
(_json, "keyOptions");
|
||||||
|
|
||||||
if (keyOptions != 0) {
|
if (keyOptions != nullptr) {
|
||||||
return TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, keyOptions);
|
return TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, keyOptions);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return 0;
|
return nullptr;
|
||||||
}
|
|
||||||
else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -117,7 +117,6 @@ bool ServerJob::cancel (bool running) {
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
bool ServerJob::execute () {
|
bool ServerJob::execute () {
|
||||||
|
|
||||||
// default to system database
|
// default to system database
|
||||||
TRI_vocbase_t* vocbase = TRI_UseDatabaseServer(_server, TRI_VOC_SYSTEM_DATABASE);
|
TRI_vocbase_t* vocbase = TRI_UseDatabaseServer(_server, TRI_VOC_SYSTEM_DATABASE);
|
||||||
|
|
||||||
|
@ -126,6 +125,7 @@ bool ServerJob::execute () {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// only one plan change at a time
|
||||||
MUTEX_LOCKER(ExecutorLock);
|
MUTEX_LOCKER(ExecutorLock);
|
||||||
|
|
||||||
ApplicationV8::V8Context* context = _applicationV8->enterContext("STANDARD", vocbase, false, true);
|
ApplicationV8::V8Context* context = _applicationV8->enterContext("STANDARD", vocbase, false, true);
|
||||||
|
@ -135,14 +135,17 @@ bool ServerJob::execute () {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
try {
|
||||||
v8::HandleScope scope;
|
v8::HandleScope scope;
|
||||||
// execute script inside the context
|
// execute script inside the context
|
||||||
char const* file = "handle-plan-change";
|
char const* file = "handle-plan-change";
|
||||||
char const* content = "require('org/arangodb/cluster').handlePlanChange();";
|
char const* content = "require('org/arangodb/cluster').handlePlanChange();";
|
||||||
|
|
||||||
TRI_ExecuteJavaScriptString(v8::Context::GetCurrent(), v8::String::New(content), v8::String::New(file), false);
|
TRI_ExecuteJavaScriptString(v8::Context::GetCurrent(), v8::String::New(content, (int) strlen(content)), v8::String::New(file), false);
|
||||||
}
|
}
|
||||||
|
catch (...) {
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// get the pointer to the last used vocbase
|
// get the pointer to the last used vocbase
|
||||||
TRI_v8_global_t* v8g = static_cast<TRI_v8_global_t*>(context->_isolate->GetData());
|
TRI_v8_global_t* v8g = static_cast<TRI_v8_global_t*>(context->_isolate->GetData());
|
||||||
|
|
|
@ -263,6 +263,7 @@ namespace triagens {
|
||||||
}
|
}
|
||||||
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
||||||
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
||||||
|
TRI_ASSERT(_numberTrxActive > 0);
|
||||||
_numberTrxActive--; // Every transaction gets here at most once
|
_numberTrxActive--; // Every transaction gets here at most once
|
||||||
#endif
|
#endif
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
|
@ -272,6 +273,7 @@ namespace triagens {
|
||||||
|
|
||||||
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
||||||
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
||||||
|
TRI_ASSERT(_numberTrxActive > 0);
|
||||||
_numberTrxActive--; // Every transaction gets here at most once
|
_numberTrxActive--; // Every transaction gets here at most once
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -295,6 +297,7 @@ namespace triagens {
|
||||||
|
|
||||||
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
||||||
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
||||||
|
TRI_ASSERT(_numberTrxActive > 0);
|
||||||
_numberTrxActive--; // Every transaction gets here at most once
|
_numberTrxActive--; // Every transaction gets here at most once
|
||||||
#endif
|
#endif
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
|
@ -304,6 +307,7 @@ namespace triagens {
|
||||||
|
|
||||||
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
||||||
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
TRI_ASSERT(_numberTrxActive == _numberTrxInScope);
|
||||||
|
TRI_ASSERT(_numberTrxActive > 0);
|
||||||
_numberTrxActive--; // Every transaction gets here at most once
|
_numberTrxActive--; // Every transaction gets here at most once
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
#define ARANGODB_VOC_BASE_VOC__TYPES_H 1
|
#define ARANGODB_VOC_BASE_VOC__TYPES_H 1
|
||||||
|
|
||||||
#include "Basics/Common.h"
|
#include "Basics/Common.h"
|
||||||
|
#include "Cluster/ServerState.h"
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// --SECTION-- public defines
|
// --SECTION-- public defines
|
||||||
|
@ -225,6 +226,8 @@ namespace triagens {
|
||||||
|
|
||||||
static void increaseNumbers (int numberInScope, int numberActive) {
|
static void increaseNumbers (int numberInScope, int numberActive) {
|
||||||
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
#ifdef TRI_ENABLE_MAINTAINER_MODE
|
||||||
|
TRI_ASSERT(_numberTrxInScope + numberInScope >= 0);
|
||||||
|
TRI_ASSERT(_numberTrxActive + numberActive >= 0);
|
||||||
_numberTrxInScope += numberInScope;
|
_numberTrxInScope += numberInScope;
|
||||||
_numberTrxActive += numberActive;
|
_numberTrxActive += numberActive;
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/*jshint browser: true */
|
/*jshint browser: true */
|
||||||
/*jshint unused: false */
|
/*jshint unused: false */
|
||||||
/*global describe, beforeEach, afterEach, it, spyOn, expect, jQuery, _, jqconsole, $*/
|
/*global describe, beforeEach, afterEach, it, spyOn, expect, jQuery, _, jqconsole, $*/
|
||||||
/*global arangoHelper, ace*/
|
/*global arangoHelper, ace, window, document, localStorage, Joi*/
|
||||||
|
|
||||||
|
|
||||||
(function() {
|
(function() {
|
||||||
|
@ -9,9 +9,11 @@
|
||||||
|
|
||||||
describe("The query view", function() {
|
describe("The query view", function() {
|
||||||
|
|
||||||
var view, div, div2, jQueryDummy;
|
var view, div, div2, jQueryDummy, collectionDummy,
|
||||||
|
localStorageFake;
|
||||||
|
|
||||||
beforeEach(function() {
|
beforeEach(function() {
|
||||||
|
spyOn($, "ajax");
|
||||||
window.App = {
|
window.App = {
|
||||||
notificationList: {
|
notificationList: {
|
||||||
add: function() {
|
add: function() {
|
||||||
|
@ -19,6 +21,38 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
localStorageFake = {
|
||||||
|
value: undefined
|
||||||
|
};
|
||||||
|
spyOn(localStorage, "getItem").andCallFake(function() {
|
||||||
|
return localStorageFake.value;
|
||||||
|
});
|
||||||
|
var DummyModel = function(vals) {
|
||||||
|
this.get = function (attr) {
|
||||||
|
return vals[attr];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
collectionDummy = {
|
||||||
|
list: [],
|
||||||
|
fetch: function() {
|
||||||
|
throw "Should be a spy";
|
||||||
|
},
|
||||||
|
add: function(item) {
|
||||||
|
this.list.push(new DummyModel(item));
|
||||||
|
},
|
||||||
|
each: function(func) {
|
||||||
|
return this.list.forEach(func);
|
||||||
|
},
|
||||||
|
saveCollectionQueries: function() {
|
||||||
|
throw "Should be a spy";
|
||||||
|
},
|
||||||
|
findWhere: function(ex) {
|
||||||
|
|
||||||
|
}
|
||||||
|
};
|
||||||
|
spyOn(collectionDummy, "fetch");
|
||||||
|
spyOn(collectionDummy, "saveCollectionQueries");
|
||||||
|
|
||||||
spyOn(window.App.notificationList, "add");
|
spyOn(window.App.notificationList, "add");
|
||||||
|
|
||||||
|
@ -27,6 +61,7 @@
|
||||||
document.body.appendChild(div);
|
document.body.appendChild(div);
|
||||||
|
|
||||||
view = new window.queryView({
|
view = new window.queryView({
|
||||||
|
collection: collectionDummy
|
||||||
});
|
});
|
||||||
|
|
||||||
window.modalView = new window.ModalView();
|
window.modalView = new window.ModalView();
|
||||||
|
@ -60,24 +95,23 @@
|
||||||
'click #clearQueryButton': 'clearInput',
|
'click #clearQueryButton': 'clearInput',
|
||||||
'click #addAQL': 'addAQL',
|
'click #addAQL': 'addAQL',
|
||||||
'change #querySelect': 'importSelected',
|
'change #querySelect': 'importSelected',
|
||||||
'change #querySize': 'changeSize',
|
|
||||||
'keypress #aqlEditor': 'aqlShortcuts',
|
'keypress #aqlEditor': 'aqlShortcuts',
|
||||||
'click #arangoQueryTable .table-cell0': 'editCustomQuery',
|
'click #arangoQueryTable .table-cell0': 'editCustomQuery',
|
||||||
'click #arangoQueryTable .table-cell1': 'editCustomQuery',
|
'click #arangoQueryTable .table-cell1': 'editCustomQuery',
|
||||||
'click #arangoQueryTable .table-cell2 a': 'deleteAQL',
|
'click #arangoQueryTable .table-cell2 a': 'deleteAQL',
|
||||||
'click #confirmQueryImport': 'importCustomQueries',
|
'click #confirmQueryImport': 'importCustomQueries',
|
||||||
'click #confirmQueryExport': 'exportCustomQueries'
|
'click #confirmQueryExport': 'exportCustomQueries',
|
||||||
|
'click #downloadQueryResult': 'downloadQueryResult',
|
||||||
|
'click #importQueriesToggle': 'showImportMenu'
|
||||||
};
|
};
|
||||||
expect(events).toEqual(view.events);
|
expect(events).toEqual(view.events);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should execute all functions when view initializes", function () {
|
it("should execute all functions when view initializes", function () {
|
||||||
spyOn(view, "getAQL");
|
spyOn(view, "getAQL");
|
||||||
spyOn(localStorage, "setItem");
|
|
||||||
view.initialize();
|
view.initialize();
|
||||||
expect(view.tableDescription.rows).toEqual(view.customQueries);
|
expect(view.tableDescription.rows).toEqual(view.customQueries);
|
||||||
expect(view.getAQL).toHaveBeenCalled();
|
expect(view.getAQL).toHaveBeenCalled();
|
||||||
expect(localStorage.setItem).toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should create a custom query modal", function() {
|
it("should create a custom query modal", function() {
|
||||||
|
@ -86,7 +120,13 @@
|
||||||
spyOn(window.modalView, "show");
|
spyOn(window.modalView, "show");
|
||||||
view.createCustomQueryModal();
|
view.createCustomQueryModal();
|
||||||
expect(window.modalView.createTextEntry).toHaveBeenCalledWith(
|
expect(window.modalView.createTextEntry).toHaveBeenCalledWith(
|
||||||
'new-query-name', 'Name', '', undefined, undefined, false, /[<>&'"]/
|
'new-query-name', 'Name', '', undefined, undefined, false,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
rule: Joi.string().required(),
|
||||||
|
msg: "No query name given."
|
||||||
|
}
|
||||||
|
]
|
||||||
);
|
);
|
||||||
expect(window.modalView.createSuccessButton).toHaveBeenCalled();
|
expect(window.modalView.createSuccessButton).toHaveBeenCalled();
|
||||||
expect(window.modalView.show).toHaveBeenCalled();
|
expect(window.modalView.show).toHaveBeenCalled();
|
||||||
|
@ -117,9 +157,7 @@
|
||||||
name: "123123123",
|
name: "123123123",
|
||||||
value: "for var yx do something"
|
value: "for var yx do something"
|
||||||
}];
|
}];
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake.value = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
|
||||||
spyOn(localStorage, "getItem");
|
|
||||||
view.getAQL();
|
view.getAQL();
|
||||||
expect(localStorage.getItem).toHaveBeenCalledWith("customQueries");
|
expect(localStorage.getItem).toHaveBeenCalledWith("customQueries");
|
||||||
expect(view.customQueries).toEqual(customQueries);
|
expect(view.customQueries).toEqual(customQueries);
|
||||||
|
@ -225,8 +263,10 @@
|
||||||
name: "myname",
|
name: "myname",
|
||||||
value: "for var yx do something"
|
value: "for var yx do something"
|
||||||
}];
|
}];
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake.value = JSON.stringify(customQueries);
|
||||||
|
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
expect(localStorage.getItem).toHaveBeenCalledWith("customQueries");
|
||||||
|
|
||||||
jQueryDummy = {
|
jQueryDummy = {
|
||||||
removeClass: function () {
|
removeClass: function () {
|
||||||
|
@ -263,7 +303,7 @@
|
||||||
}], e = {
|
}], e = {
|
||||||
target: "dontcare"
|
target: "dontcare"
|
||||||
};
|
};
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake.value = JSON.stringify(customQueries);
|
||||||
|
|
||||||
spyOn(view, "switchTab");
|
spyOn(view, "switchTab");
|
||||||
spyOn(view, "deselect");
|
spyOn(view, "deselect");
|
||||||
|
@ -283,7 +323,8 @@
|
||||||
}], e = {
|
}], e = {
|
||||||
target: "dontcare"
|
target: "dontcare"
|
||||||
};
|
};
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake.value = JSON.stringify(customQueries);
|
||||||
|
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
spyOn(view, "renderSelectboxes");
|
spyOn(view, "renderSelectboxes");
|
||||||
|
@ -306,7 +347,7 @@
|
||||||
target: "dontcare",
|
target: "dontcare",
|
||||||
stopPropagation: function() {throw "Should be a spy";}
|
stopPropagation: function() {throw "Should be a spy";}
|
||||||
};
|
};
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake.value = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
div2 = document.createElement("div");
|
div2 = document.createElement("div");
|
||||||
|
@ -338,7 +379,7 @@
|
||||||
throw "Should be a spy";
|
throw "Should be a spy";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake.value = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
div2 = document.createElement("div");
|
div2 = document.createElement("div");
|
||||||
|
@ -370,7 +411,7 @@
|
||||||
throw "Should be a spy";
|
throw "Should be a spy";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
div2 = document.createElement("div");
|
div2 = document.createElement("div");
|
||||||
|
@ -402,7 +443,7 @@
|
||||||
throw "Should be a spy";
|
throw "Should be a spy";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
div2 = document.createElement("div");
|
div2 = document.createElement("div");
|
||||||
|
@ -430,7 +471,7 @@
|
||||||
value: "for var yx do something"
|
value: "for var yx do something"
|
||||||
}],
|
}],
|
||||||
returnValue;
|
returnValue;
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
returnValue = view.getCustomQueryValueByName("hallotest");
|
returnValue = view.getCustomQueryValueByName("hallotest");
|
||||||
|
@ -442,7 +483,7 @@
|
||||||
div2.id = "test123";
|
div2.id = "test123";
|
||||||
document.body.appendChild(div2);
|
document.body.appendChild(div2);
|
||||||
|
|
||||||
localStorage.setItem("querySize", 5000);
|
localStorageFake = 5000;
|
||||||
|
|
||||||
view.initialize();
|
view.initialize();
|
||||||
spyOn(localStorage, "getItem");
|
spyOn(localStorage, "getItem");
|
||||||
|
@ -489,7 +530,7 @@
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
$('#findme').val('findme');
|
$('#findme').val('findme');
|
||||||
localStorage.setItem("customQueries", JSON.stringify(customQueries));
|
localStorageFake = JSON.stringify(customQueries);
|
||||||
view.initialize();
|
view.initialize();
|
||||||
|
|
||||||
view.importSelected(e);
|
view.importSelected(e);
|
||||||
|
|
|
@ -5860,6 +5860,7 @@ function GENERAL_GRAPH_NEIGHBORS (graphName,
|
||||||
|
|
||||||
if (options.hasOwnProperty("neighborExamples") && typeof options.neighborExamples === "string") {
|
if (options.hasOwnProperty("neighborExamples") && typeof options.neighborExamples === "string") {
|
||||||
options.neighborExamples = {_id : options.neighborExamples};
|
options.neighborExamples = {_id : options.neighborExamples};
|
||||||
|
}
|
||||||
var neighbors = [],
|
var neighbors = [],
|
||||||
params = TRAVERSAL_PARAMS(),
|
params = TRAVERSAL_PARAMS(),
|
||||||
factory = TRAVERSAL.generalGraphDatasourceFactory(graphName);
|
factory = TRAVERSAL.generalGraphDatasourceFactory(graphName);
|
||||||
|
|
|
@ -166,12 +166,12 @@ function printUsage () {
|
||||||
function filterTestcaseByOptions (testname, options, whichFilter)
|
function filterTestcaseByOptions (testname, options, whichFilter)
|
||||||
{
|
{
|
||||||
if ((testname.indexOf("-cluster") !== -1) && (options.cluster === false)) {
|
if ((testname.indexOf("-cluster") !== -1) && (options.cluster === false)) {
|
||||||
whichFilter.filter = 'cluster';
|
whichFilter.filter = 'noncluster';
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (testname.indexOf("-noncluster") !== -1 && (options.cluster === true)) {
|
if (testname.indexOf("-noncluster") !== -1 && (options.cluster === true)) {
|
||||||
whichFilter.filter = 'noncluster';
|
whichFilter.filter = 'cluster';
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2881,6 +2881,112 @@ testGRAPH_DIAMETER_AND_RADIUS: function () {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function ahuacatlQueryMultiCollectionMadnessTestSuite() {
|
||||||
|
var gN = "UnitTestsAhuacatlGraph";
|
||||||
|
var v1 = "UnitTestsAhuacatlVertex1";
|
||||||
|
var v2 = "UnitTestsAhuacatlVertex2";
|
||||||
|
var v3 = "UnitTestsAhuacatlVertex3";
|
||||||
|
var e1 = "UnitTestsAhuacatlEdge1";
|
||||||
|
var e2 = "UnitTestsAhuacatlEdge2";
|
||||||
|
|
||||||
|
var s1;
|
||||||
|
var c1;
|
||||||
|
var t1;
|
||||||
|
var s2;
|
||||||
|
var c2;
|
||||||
|
var t2;
|
||||||
|
|
||||||
|
var AQL_NEIGHBORS = "FOR e IN GRAPH_NEIGHBORS(@name, @example, @options) SORT e.vertex._id, e.path.edges[0].what RETURN e";
|
||||||
|
|
||||||
|
return {
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief set up
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
setUp: function () {
|
||||||
|
db._drop(v1);
|
||||||
|
db._drop(v2);
|
||||||
|
db._drop(v3);
|
||||||
|
db._drop(e1);
|
||||||
|
db._drop(e2);
|
||||||
|
|
||||||
|
var vertex1 = db._create(v1);
|
||||||
|
var vertex2 = db._create(v2);
|
||||||
|
var vertex3 = db._create(v3);
|
||||||
|
|
||||||
|
var edge1 = db._createEdgeCollection(e1);
|
||||||
|
var edge2 = db._createEdgeCollection(e2);
|
||||||
|
|
||||||
|
s1 = vertex1.save({ _key: "start"})._id;
|
||||||
|
c1 = vertex2.save({ _key: "center"})._id;
|
||||||
|
t1 = vertex3.save({ _key: "target"})._id;
|
||||||
|
s2 = vertex1.save({ _key: "start2"})._id;
|
||||||
|
c2 = vertex2.save({ _key: "center2"})._id;
|
||||||
|
t2 = vertex3.save({ _key: "target2"})._id;
|
||||||
|
|
||||||
|
function makeEdge(from, to, collection) {
|
||||||
|
collection.save(from, to, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
makeEdge(s1, c1, edge1);
|
||||||
|
makeEdge(t1, c1, edge2);
|
||||||
|
makeEdge(s2, c2, edge1);
|
||||||
|
makeEdge(t2, c2, edge2);
|
||||||
|
makeEdge(t1, c2, edge2);
|
||||||
|
try {
|
||||||
|
graph._drop(gN);
|
||||||
|
} catch (ignore) {
|
||||||
|
}
|
||||||
|
graph._create(
|
||||||
|
gN,
|
||||||
|
graph._edgeDefinitions(
|
||||||
|
graph._relation(e1, v1, v2),
|
||||||
|
graph._relation(e2, v3, v2)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
|
||||||
|
tearDown: function () {
|
||||||
|
graph._drop(gN, true);
|
||||||
|
},
|
||||||
|
|
||||||
|
testRestrictedPathHops1: function() {
|
||||||
|
var bindVars = {
|
||||||
|
name: gN,
|
||||||
|
example: s1,
|
||||||
|
options: {
|
||||||
|
direction : 'any',
|
||||||
|
minDepth: 2,
|
||||||
|
maxDepth: 2,
|
||||||
|
vertexCollectionRestriction: v3,
|
||||||
|
edgeCollectionRestriction: [e1, e2]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
var actual = getRawQueryResults(AQL_NEIGHBORS, bindVars);
|
||||||
|
assertEqual(actual.length, 1);
|
||||||
|
assertEqual(actual[0].vertex._id, t1);
|
||||||
|
},
|
||||||
|
|
||||||
|
testRestrictedPathHops2: function() {
|
||||||
|
var bindVars = {
|
||||||
|
name: gN,
|
||||||
|
example: s2,
|
||||||
|
options: {
|
||||||
|
direction : 'any',
|
||||||
|
minDepth: 2,
|
||||||
|
maxDepth: 2,
|
||||||
|
vertexCollectionRestriction: v3,
|
||||||
|
edgeCollectionRestriction: [e1, e2]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
var actual = getRawQueryResults(AQL_NEIGHBORS, bindVars);
|
||||||
|
assertEqual(actual.length, 2);
|
||||||
|
assertEqual(actual[0].vertex._id, t1);
|
||||||
|
assertEqual(actual[1].vertex._id, t2);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -2891,6 +2997,7 @@ jsunity.run(ahuacatlQueryGeneralCyclesSuite);
|
||||||
jsunity.run(ahuacatlQueryGeneralTraversalTestSuite);
|
jsunity.run(ahuacatlQueryGeneralTraversalTestSuite);
|
||||||
jsunity.run(ahuacatlQueryGeneralPathsTestSuite);
|
jsunity.run(ahuacatlQueryGeneralPathsTestSuite);
|
||||||
jsunity.run(ahuacatlQueryGeneralEdgesTestSuite);
|
jsunity.run(ahuacatlQueryGeneralEdgesTestSuite);
|
||||||
|
jsunity.run(ahuacatlQueryMultiCollectionMadnessTestSuite);
|
||||||
|
|
||||||
|
|
||||||
return jsunity.done();
|
return jsunity.done();
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue