mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of github.com:triAGENS/ArangoDB into devel
This commit is contained in:
commit
a03ce44254
|
@ -144,6 +144,21 @@ instead.
|
|||
Note: The *connections* function parameter value will contain the edges connected to the
|
||||
vertex only if *order* was set to *preorder-expander*. Otherwise, the value of this parameter
|
||||
will be *undefined*.
|
||||
|
||||
The following custom visitor functions are predefined and can be used by specifying the function
|
||||
name in the *visitor* attribute:
|
||||
|
||||
- *"_AQL::PROJECTINGVISITOR"*: this visitor will produce an object with the attributes
|
||||
specified in *data.attributes* for each visited vertex. This can be used to create a
|
||||
projection of each visited vertex' document.
|
||||
|
||||
- *"_AQL::IDVISITOR"*: this visitor will return the _id attribute of each visited vertex.
|
||||
|
||||
- *"_AQL::KEYVISITOR"*: this visitor will return the _key attribute of each visited vertex.
|
||||
|
||||
- *"_AQL::COUNTINGVISITOR"*: this visitor will return a single number indicating the number
|
||||
of vertices visited.
|
||||
|
||||
|
||||
- *visitorReturnsResults*: only useful in combination with a custom AQL visitor function. If
|
||||
set to *true*, the data returned by the visitor will be appended to the result. If set to
|
||||
|
@ -151,6 +166,11 @@ instead.
|
|||
function can modify its *result* parameter value in-place. At the end of the traversal,
|
||||
*result* is expected to be an array.
|
||||
|
||||
- *data*: only useful in combination with a custom AQL visitor function. This attribute can
|
||||
be used to pass arbitrary data into the custom visitor function. The value contained in the
|
||||
*data* attribute will be made available to the *visitor* function in the *config.data*
|
||||
attribute.
|
||||
|
||||
By default, the result of the TRAVERSAL function is an array of traversed points. Each point
|
||||
is an object consisting of the following attributes:
|
||||
- *vertex*: The vertex at the traversal point
|
||||
|
|
|
@ -82,6 +82,44 @@ describe ArangoDB do
|
|||
|
||||
ArangoDB.drop_collection(cn)
|
||||
end
|
||||
|
||||
it "returns an error if an object sub-attribute in the JSON body is corrupted" do
|
||||
cn = "UnitTestsCollectionBasics"
|
||||
id = ArangoDB.create_collection(cn)
|
||||
|
||||
cmd = "/_api/document?collection=#{id}"
|
||||
body = "{ \"foo\" : { \"bar\" : \"baz\", \"blue\" : moo } }"
|
||||
doc = ArangoDB.log_post("#{prefix}-bad-json", cmd, :body => body)
|
||||
|
||||
doc.code.should eq(400)
|
||||
doc.parsed_response['error'].should eq(true)
|
||||
doc.parsed_response['errorNum'].should eq(600)
|
||||
doc.parsed_response['code'].should eq(400)
|
||||
doc.headers['content-type'].should eq("application/json; charset=utf-8")
|
||||
|
||||
ArangoDB.size_collection(cn).should eq(0)
|
||||
|
||||
ArangoDB.drop_collection(cn)
|
||||
end
|
||||
|
||||
it "returns an error if an array attribute in the JSON body is corrupted" do
|
||||
cn = "UnitTestsCollectionBasics"
|
||||
id = ArangoDB.create_collection(cn)
|
||||
|
||||
cmd = "/_api/document?collection=#{id}"
|
||||
body = "{ \"foo\" : [ 1, 2, \"bar\", moo ] }"
|
||||
doc = ArangoDB.log_post("#{prefix}-bad-json", cmd, :body => body)
|
||||
|
||||
doc.code.should eq(400)
|
||||
doc.parsed_response['error'].should eq(true)
|
||||
doc.parsed_response['errorNum'].should eq(600)
|
||||
doc.parsed_response['code'].should eq(400)
|
||||
doc.headers['content-type'].should eq("application/json; charset=utf-8")
|
||||
|
||||
ArangoDB.size_collection(cn).should eq(0)
|
||||
|
||||
ArangoDB.drop_collection(cn)
|
||||
end
|
||||
end
|
||||
|
||||
################################################################################
|
||||
|
|
|
@ -217,98 +217,126 @@ void Collection::fillIndexes () const {
|
|||
}
|
||||
|
||||
if (ExecutionEngine::isCoordinator()) {
|
||||
// coordinator case, remote collection
|
||||
auto clusterInfo = triagens::arango::ClusterInfo::instance();
|
||||
auto collectionInfo = clusterInfo->getCollection(std::string(vocbase->_name), name);
|
||||
if (collectionInfo.get() == nullptr || (*collectionInfo).empty()) {
|
||||
THROW_ARANGO_EXCEPTION_FORMAT(TRI_ERROR_INTERNAL,
|
||||
"collection not found '%s' -> '%s'",
|
||||
vocbase->_name, name.c_str());
|
||||
}
|
||||
|
||||
TRI_json_t const* json = (*collectionInfo).getIndexes();
|
||||
|
||||
if (TRI_IsArrayJson(json)) {
|
||||
size_t const n = TRI_LengthArrayJson(json);
|
||||
indexes.reserve(n);
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
TRI_json_t const* v = TRI_LookupArrayJson(json, i);
|
||||
if (v != nullptr) {
|
||||
indexes.emplace_back(new Index(v));
|
||||
}
|
||||
}
|
||||
}
|
||||
fillIndexesCoordinator();
|
||||
return;
|
||||
}
|
||||
else if (ExecutionEngine::isDBServer()) {
|
||||
TRI_ASSERT(collection != nullptr);
|
||||
auto document = documentCollection();
|
||||
|
||||
// lookup collection in agency by plan id
|
||||
auto clusterInfo = triagens::arango::ClusterInfo::instance();
|
||||
auto collectionInfo = clusterInfo->getCollection(std::string(vocbase->_name), triagens::basics::StringUtils::itoa(document->_info._planId));
|
||||
if (collectionInfo.get() == nullptr || (*collectionInfo).empty()) {
|
||||
THROW_ARANGO_EXCEPTION_FORMAT(TRI_ERROR_INTERNAL,
|
||||
"collection not found '%s' -> '%s'",
|
||||
vocbase->_name, name.c_str());
|
||||
}
|
||||
// must have a collection
|
||||
TRI_ASSERT(collection != nullptr);
|
||||
|
||||
TRI_json_t const* json = (*collectionInfo).getIndexes();
|
||||
if (! TRI_IsArrayJson(json)) {
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "unexpected index list format");
|
||||
}
|
||||
if (ExecutionEngine::isDBServer() && documentCollection()->_info._planId > 0) {
|
||||
fillIndexesDBServer();
|
||||
return;
|
||||
}
|
||||
|
||||
fillIndexesLocal();
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief fills the index list, cluster coordinator case
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void Collection::fillIndexesCoordinator () const {
|
||||
// coordinator case, remote collection
|
||||
auto clusterInfo = triagens::arango::ClusterInfo::instance();
|
||||
auto collectionInfo = clusterInfo->getCollection(std::string(vocbase->_name), name);
|
||||
if (collectionInfo.get() == nullptr || (*collectionInfo).empty()) {
|
||||
THROW_ARANGO_EXCEPTION_FORMAT(TRI_ERROR_INTERNAL,
|
||||
"collection not found '%s' in database '%s'",
|
||||
name.c_str(), vocbase->_name);
|
||||
}
|
||||
|
||||
TRI_json_t const* json = (*collectionInfo).getIndexes();
|
||||
|
||||
if (TRI_IsArrayJson(json)) {
|
||||
size_t const n = TRI_LengthArrayJson(json);
|
||||
indexes.reserve(n);
|
||||
|
||||
// register indexes
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
TRI_json_t const* v = TRI_LookupArrayJson(json, i);
|
||||
if (TRI_IsObjectJson(v)) {
|
||||
// lookup index id
|
||||
TRI_json_t const* id = TRI_LookupObjectJson(v, "id");
|
||||
if (! TRI_IsStringJson(id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// use numeric index id
|
||||
uint64_t iid = triagens::basics::StringUtils::uint64(id->_value._string.data, id->_value._string.length - 1);
|
||||
TRI_index_t* data = nullptr;
|
||||
|
||||
// now check if we can find the local index and map it
|
||||
for (size_t j = 0; j < document->_allIndexes._length; ++j) {
|
||||
auto localIndex = static_cast<TRI_index_t*>(document->_allIndexes._buffer[j]);
|
||||
if (localIndex != nullptr && localIndex->_iid == iid) {
|
||||
// found
|
||||
data = localIndex;
|
||||
break;
|
||||
}
|
||||
else if (localIndex->_type == TRI_IDX_TYPE_PRIMARY_INDEX ||
|
||||
localIndex->_type == TRI_IDX_TYPE_EDGE_INDEX) {
|
||||
}
|
||||
}
|
||||
|
||||
auto idx = new Index(v);
|
||||
// assign the found local index
|
||||
idx->data = data;
|
||||
|
||||
indexes.push_back(idx);
|
||||
if (v != nullptr) {
|
||||
indexes.emplace_back(new Index(v));
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// local collection
|
||||
TRI_ASSERT(collection != nullptr);
|
||||
auto document = documentCollection();
|
||||
size_t const n = document->_allIndexes._length;
|
||||
indexes.reserve(n);
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
indexes.emplace_back(new Index(static_cast<TRI_index_t*>(document->_allIndexes._buffer[i])));
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief fills the index list, cluster DB server case
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void Collection::fillIndexesDBServer () const {
|
||||
auto document = documentCollection();
|
||||
|
||||
// lookup collection in agency by plan id
|
||||
auto clusterInfo = triagens::arango::ClusterInfo::instance();
|
||||
auto collectionInfo = clusterInfo->getCollection(std::string(vocbase->_name), triagens::basics::StringUtils::itoa(document->_info._planId));
|
||||
if (collectionInfo.get() == nullptr || (*collectionInfo).empty()) {
|
||||
THROW_ARANGO_EXCEPTION_FORMAT(TRI_ERROR_INTERNAL,
|
||||
"collection not found '%s' in database '%s'",
|
||||
name.c_str(), vocbase->_name);
|
||||
}
|
||||
|
||||
TRI_json_t const* json = (*collectionInfo).getIndexes();
|
||||
if (! TRI_IsArrayJson(json)) {
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "unexpected indexes definition format");
|
||||
}
|
||||
|
||||
size_t const n = TRI_LengthArrayJson(json);
|
||||
indexes.reserve(n);
|
||||
|
||||
// register indexes
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
TRI_json_t const* v = TRI_LookupArrayJson(json, i);
|
||||
if (TRI_IsObjectJson(v)) {
|
||||
// lookup index id
|
||||
TRI_json_t const* id = TRI_LookupObjectJson(v, "id");
|
||||
if (! TRI_IsStringJson(id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// use numeric index id
|
||||
uint64_t iid = triagens::basics::StringUtils::uint64(id->_value._string.data, id->_value._string.length - 1);
|
||||
TRI_index_t* data = nullptr;
|
||||
|
||||
// now check if we can find the local index and map it
|
||||
for (size_t j = 0; j < document->_allIndexes._length; ++j) {
|
||||
auto localIndex = static_cast<TRI_index_t*>(document->_allIndexes._buffer[j]);
|
||||
if (localIndex != nullptr && localIndex->_iid == iid) {
|
||||
// found
|
||||
data = localIndex;
|
||||
break;
|
||||
}
|
||||
else if (localIndex->_type == TRI_IDX_TYPE_PRIMARY_INDEX ||
|
||||
localIndex->_type == TRI_IDX_TYPE_EDGE_INDEX) {
|
||||
}
|
||||
}
|
||||
|
||||
auto idx = new Index(v);
|
||||
// assign the found local index
|
||||
idx->setInternals(data);
|
||||
|
||||
indexes.push_back(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief fills the index list, local server case
|
||||
/// note: this will also be called for local collection on the DB server
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void Collection::fillIndexesLocal () const {
|
||||
// local collection
|
||||
auto document = documentCollection();
|
||||
size_t const n = document->_allIndexes._length;
|
||||
indexes.reserve(n);
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
indexes.emplace_back(new Index(static_cast<TRI_index_t*>(document->_allIndexes._buffer[i])));
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- END-OF-FILE
|
||||
// -----------------------------------------------------------------------------
|
||||
|
|
|
@ -192,12 +192,36 @@ namespace triagens {
|
|||
|
||||
void fillIndexes () const;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief fills the index list, cluster coordinator case
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void fillIndexesCoordinator () const;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief fills the index list, cluster DB server case
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void fillIndexesDBServer () const;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief fills the index list, local server case
|
||||
/// note: this will also be called for local collection on the DB server
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void fillIndexesLocal () const;
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- private variables
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
private:
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief currently handled shard. this is a temporary variable that will
|
||||
/// only be filled during plan creation
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
std::string currentShard;
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
|
|
@ -44,6 +44,7 @@ namespace triagens {
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
class Collections {
|
||||
|
||||
public:
|
||||
|
||||
Collections& operator= (Collections const& other) = delete;
|
||||
|
|
|
@ -1803,7 +1803,7 @@ void IndexRangeBlock::destroyHashIndexSearchValues () {
|
|||
|
||||
bool IndexRangeBlock::setupHashIndexSearchValue (IndexAndCondition const& range) {
|
||||
auto en = static_cast<IndexRangeNode const*>(getPlanNode());
|
||||
TRI_index_t* idx = en->_index->data;
|
||||
TRI_index_t* idx = en->_index->getInternals();
|
||||
TRI_ASSERT(idx != nullptr);
|
||||
TRI_hash_index_t* hashIndex = (TRI_hash_index_t*) idx;
|
||||
|
||||
|
@ -1873,7 +1873,7 @@ void IndexRangeBlock::readHashIndex (size_t atMost) {
|
|||
}
|
||||
|
||||
auto en = static_cast<IndexRangeNode const*>(getPlanNode());
|
||||
TRI_index_t* idx = en->_index->data;
|
||||
TRI_index_t* idx = en->_index->getInternals();
|
||||
TRI_ASSERT(idx != nullptr);
|
||||
|
||||
size_t nrSent = 0;
|
||||
|
@ -1942,7 +1942,7 @@ void IndexRangeBlock::getSkiplistIterator (IndexAndCondition const& ranges) {
|
|||
TRI_ASSERT(_skiplistIterator == nullptr);
|
||||
|
||||
auto en = static_cast<IndexRangeNode const*>(getPlanNode());
|
||||
TRI_index_t* idx = en->_index->data;
|
||||
TRI_index_t* idx = en->_index->getInternals();
|
||||
TRI_ASSERT(idx != nullptr);
|
||||
|
||||
TRI_shaper_t* shaper = _collection->documentCollection()->getShaper();
|
||||
|
|
|
@ -1117,6 +1117,7 @@ void EnumerateCollectionNode::getIndexesForIndexRangeNode (std::unordered_set<st
|
|||
std::vector<size_t>& prefixes) const {
|
||||
|
||||
auto&& indexes = _collection->getIndexes();
|
||||
|
||||
for (auto idx : indexes) {
|
||||
TRI_ASSERT(idx != nullptr);
|
||||
|
||||
|
@ -1392,16 +1393,17 @@ IndexRangeNode::IndexRangeNode (ExecutionPlan* plan,
|
|||
triagens::basics::Json const& json)
|
||||
: ExecutionNode(plan, json),
|
||||
_vocbase(plan->getAst()->query()->vocbase()),
|
||||
_collection(plan->getAst()->query()->collections()->get(JsonHelper::checkAndGetStringValue(json.json(),
|
||||
"collection"))),
|
||||
_collection(plan->getAst()->query()->collections()->get(JsonHelper::checkAndGetStringValue(json.json(), "collection"))),
|
||||
_outVariable(varFromJson(plan->getAst(), json, "outVariable")),
|
||||
_index(nullptr),
|
||||
_ranges(),
|
||||
_reverse(false) {
|
||||
|
||||
triagens::basics::Json rangeArrayJson(TRI_UNKNOWN_MEM_ZONE, JsonHelper::checkAndGetArrayValue(json.json(), "ranges"));
|
||||
|
||||
for (size_t i = 0; i < rangeArrayJson.size(); i++) { //loop over the ranges . . .
|
||||
_ranges.emplace_back();
|
||||
|
||||
triagens::basics::Json rangeJson(rangeArrayJson.at(static_cast<int>(i)));
|
||||
for (size_t j = 0; j < rangeJson.size(); j++) {
|
||||
_ranges.at(i).emplace_back(rangeJson.at(static_cast<int>(j)));
|
||||
|
|
|
@ -110,12 +110,12 @@ ExecutionPlan* ExecutionPlan::instanciateFromAst (Ast* ast) {
|
|||
/// @brief create an execution plan from JSON
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void ExecutionPlan::getCollectionsFromJson (Ast *ast,
|
||||
void ExecutionPlan::getCollectionsFromJson (Ast* ast,
|
||||
triagens::basics::Json const& json) {
|
||||
Json jsonCollectionList = json.get("collections");
|
||||
|
||||
if (! jsonCollectionList.isArray()) {
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "json node \"collections\" not found or not a list");
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "json node \"collections\" not found or not an array");
|
||||
}
|
||||
|
||||
auto const size = jsonCollectionList.size();
|
||||
|
@ -125,9 +125,10 @@ void ExecutionPlan::getCollectionsFromJson (Ast *ast,
|
|||
auto typeStr = triagens::basics::JsonHelper::checkAndGetStringValue(oneJsonCollection.json(), "type");
|
||||
|
||||
ast->query()->collections()->add(
|
||||
triagens::basics::JsonHelper::checkAndGetStringValue(oneJsonCollection.json(), "name"),
|
||||
TRI_GetTransactionTypeFromStr(triagens::basics::JsonHelper::checkAndGetStringValue(oneJsonCollection.json(), "type").c_str()));
|
||||
}
|
||||
triagens::basics::JsonHelper::checkAndGetStringValue(oneJsonCollection.json(), "name"),
|
||||
TRI_GetTransactionTypeFromStr(triagens::basics::JsonHelper::checkAndGetStringValue(oneJsonCollection.json(), "type").c_str())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ExecutionPlan* ExecutionPlan::instanciateFromJson (Ast* ast,
|
||||
|
|
|
@ -34,6 +34,7 @@
|
|||
#include "Basics/json.h"
|
||||
#include "Basics/JsonHelper.h"
|
||||
#include "HashIndex/hash-index.h"
|
||||
#include "Utils/Exception.h"
|
||||
#include "VocBase/index.h"
|
||||
|
||||
namespace triagens {
|
||||
|
@ -56,7 +57,7 @@ namespace triagens {
|
|||
type(idx->_type),
|
||||
unique(idx->_unique),
|
||||
fields(),
|
||||
data(idx) {
|
||||
internals(idx) {
|
||||
|
||||
size_t const n = idx->_fields._length;
|
||||
fields.reserve(n);
|
||||
|
@ -66,7 +67,7 @@ namespace triagens {
|
|||
fields.emplace_back(std::string(field));
|
||||
}
|
||||
|
||||
TRI_ASSERT(data != nullptr);
|
||||
TRI_ASSERT(internals != nullptr);
|
||||
}
|
||||
|
||||
Index (TRI_json_t const* json)
|
||||
|
@ -74,7 +75,7 @@ namespace triagens {
|
|||
type(TRI_TypeIndex(triagens::basics::JsonHelper::checkAndGetStringValue(json, "type").c_str())),
|
||||
unique(triagens::basics::JsonHelper::checkAndGetBooleanValue(json, "unique")),
|
||||
fields(),
|
||||
data(nullptr) {
|
||||
internals(nullptr) {
|
||||
|
||||
TRI_json_t const* f = TRI_LookupObjectJson(json, "fields");
|
||||
|
||||
|
@ -129,12 +130,24 @@ namespace triagens {
|
|||
return 1.0;
|
||||
}
|
||||
if (type == TRI_IDX_TYPE_HASH_INDEX) {
|
||||
return TRI_SelectivityHashIndex(data);
|
||||
return TRI_SelectivityHashIndex(getInternals());
|
||||
}
|
||||
|
||||
TRI_ASSERT(false);
|
||||
}
|
||||
|
||||
TRI_index_t* getInternals () const {
|
||||
if (internals == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "accessing undefined index internals");
|
||||
}
|
||||
return internals;
|
||||
}
|
||||
|
||||
void setInternals (TRI_index_t* idx) {
|
||||
TRI_ASSERT(internals == nullptr);
|
||||
internals = idx;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- public variables
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -143,7 +156,10 @@ namespace triagens {
|
|||
TRI_idx_type_e const type;
|
||||
bool const unique;
|
||||
std::vector<std::string> fields;
|
||||
TRI_index_t* data;
|
||||
|
||||
private:
|
||||
|
||||
TRI_index_t* internals;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -709,6 +709,11 @@ void ArangoServer::buildApplicationServer () {
|
|||
LOG_INFO("please use the '--pid-file' option");
|
||||
LOG_FATAL_AND_EXIT("no pid-file defined, but daemon or supervisor mode was requested");
|
||||
}
|
||||
|
||||
OperationMode::server_operation_mode_e mode = OperationMode::determineMode(_applicationServer->programOptions());
|
||||
if (mode != OperationMode::MODE_SERVER) {
|
||||
LOG_FATAL_AND_EXIT("invalid mode. must not specify --console together with --daemon or --supervisor");
|
||||
}
|
||||
|
||||
// make the pid filename absolute
|
||||
int err = 0;
|
||||
|
|
|
@ -147,6 +147,7 @@ LogfileManager::LogfileManager (TRI_server_t* server,
|
|||
_allowWrites(false), // start in read-only mode
|
||||
_hasFoundLastTick(false),
|
||||
_inRecovery(true),
|
||||
_startCalled(false),
|
||||
_slots(nullptr),
|
||||
_synchroniserThread(nullptr),
|
||||
_allocatorThread(nullptr),
|
||||
|
@ -388,6 +389,7 @@ bool LogfileManager::open () {
|
|||
}
|
||||
|
||||
opened = true;
|
||||
_startCalled = true;
|
||||
|
||||
int res = runRecovery();
|
||||
|
||||
|
@ -518,6 +520,10 @@ void LogfileManager::close () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void LogfileManager::stop () {
|
||||
if (! _startCalled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (_shutdown > 0) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -1042,6 +1042,13 @@ namespace triagens {
|
|||
|
||||
bool _inRecovery;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief whether or not the logfile manager was properly initialized and
|
||||
/// started
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool _startCalled;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief the slots manager
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -34,6 +34,7 @@ var generalGraph = require("org/arangodb/general-graph");
|
|||
var arangodb = require("org/arangodb");
|
||||
var BinaryHeap = require("org/arangodb/heap").BinaryHeap;
|
||||
var ArangoError = arangodb.ArangoError;
|
||||
var ShapedJson = require("internal").ShapedJson; // this may be undefined/null on the client
|
||||
|
||||
var db = arangodb.db;
|
||||
|
||||
|
@ -52,26 +53,23 @@ function clone (obj) {
|
|||
return obj;
|
||||
}
|
||||
|
||||
var copy, i;
|
||||
|
||||
var copy;
|
||||
if (Array.isArray(obj)) {
|
||||
copy = [ ];
|
||||
|
||||
for (i = 0; i < obj.length; ++i) {
|
||||
copy[i] = clone(obj[i]);
|
||||
}
|
||||
obj.forEach(function (i) {
|
||||
copy.push(clone(i));
|
||||
});
|
||||
}
|
||||
else if (obj instanceof Object) {
|
||||
copy = { };
|
||||
|
||||
if (obj.hasOwnProperty) {
|
||||
for (i in obj) {
|
||||
if (obj.hasOwnProperty(i)) {
|
||||
copy[i] = clone(obj[i]);
|
||||
}
|
||||
}
|
||||
if (ShapedJson && obj instanceof ShapedJson) {
|
||||
return obj;
|
||||
}
|
||||
copy = { };
|
||||
Object.keys(obj).forEach(function(k) {
|
||||
copy[k] = clone(obj[k]);
|
||||
});
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,6 +51,46 @@ var RegexCache = { };
|
|||
|
||||
var UserFunctions = { };
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief prefab traversal visitors
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var DefaultVisitors = {
|
||||
"_AQL::PROJECTINGVISITOR" : {
|
||||
visitorReturnsResults: true,
|
||||
func: function (config, result, vertex) {
|
||||
var values = { };
|
||||
if (typeof config.data === "object" && Array.isArray(config.data.attributes)) {
|
||||
config.data.attributes.forEach(function (attribute) {
|
||||
values[attribute] = vertex[attribute];
|
||||
});
|
||||
}
|
||||
return values;
|
||||
}
|
||||
},
|
||||
"_AQL::IDVISITOR" : {
|
||||
visitorReturnsResults: true,
|
||||
func: function (config, result, vertex) {
|
||||
return vertex._id;
|
||||
}
|
||||
},
|
||||
"_AQL::KEYVISITOR" : {
|
||||
visitorReturnsResults: true,
|
||||
func: function (config, result, vertex) {
|
||||
return vertex._key;
|
||||
}
|
||||
},
|
||||
"_AQL::COUNTINGVISITOR" : {
|
||||
visitorReturnsResults: false,
|
||||
func: function (config, result) {
|
||||
if (result.length === 0) {
|
||||
result.push(0);
|
||||
}
|
||||
result[0]++;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief type weight used for sorting and comparing
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -194,25 +234,34 @@ function reloadUserFunctions () {
|
|||
/// @brief get a user-function by name
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function GET_USERFUNCTION (name) {
|
||||
function GET_USERFUNCTION (name, config) {
|
||||
var prefix = DB_PREFIX(), reloaded = false;
|
||||
var key = name.toUpperCase();
|
||||
|
||||
if (! UserFunctions.hasOwnProperty(prefix)) {
|
||||
reloadUserFunctions();
|
||||
reloaded = true;
|
||||
}
|
||||
|
||||
if (! UserFunctions[prefix].hasOwnProperty(key) && ! reloaded) {
|
||||
// last chance
|
||||
reloadUserFunctions();
|
||||
}
|
||||
|
||||
if (! UserFunctions[prefix].hasOwnProperty(key)) {
|
||||
THROW(null, INTERNAL.errors.ERROR_QUERY_FUNCTION_NOT_FOUND, name);
|
||||
}
|
||||
var func;
|
||||
|
||||
var func = UserFunctions[prefix][key].func;
|
||||
if (DefaultVisitors.hasOwnProperty(key)) {
|
||||
var visitor = DefaultVisitors[key];
|
||||
func = visitor.func;
|
||||
config.visitorReturnsResults = visitor.visitorReturnsResults;
|
||||
}
|
||||
else {
|
||||
if (! UserFunctions.hasOwnProperty(prefix)) {
|
||||
reloadUserFunctions();
|
||||
reloaded = true;
|
||||
}
|
||||
|
||||
if (! UserFunctions[prefix].hasOwnProperty(key) && ! reloaded) {
|
||||
// last chance
|
||||
reloadUserFunctions();
|
||||
}
|
||||
|
||||
if (! UserFunctions[prefix].hasOwnProperty(key)) {
|
||||
THROW(null, INTERNAL.errors.ERROR_QUERY_FUNCTION_NOT_FOUND, name);
|
||||
}
|
||||
|
||||
func = UserFunctions[prefix][key].func;
|
||||
}
|
||||
|
||||
if (typeof func !== "function") {
|
||||
THROW(null, INTERNAL.errors.ERROR_QUERY_FUNCTION_NOT_FOUND, name);
|
||||
|
@ -225,8 +274,8 @@ function GET_USERFUNCTION (name) {
|
|||
/// @brief create a user-defined visitor from a function name
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function GET_VISITOR (name) {
|
||||
var func = GET_USERFUNCTION(name);
|
||||
function GET_VISITOR (name, config) {
|
||||
var func = GET_USERFUNCTION(name, config);
|
||||
|
||||
return function (config, result, vertex, path) {
|
||||
try {
|
||||
|
@ -250,8 +299,8 @@ function GET_VISITOR (name) {
|
|||
/// @brief create a user-defined filter from a function name
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function GET_FILTER (name) {
|
||||
var func = GET_USERFUNCTION(name);
|
||||
function GET_FILTER (name, config) {
|
||||
var func = GET_USERFUNCTION(name, config);
|
||||
|
||||
return function (config, vertex, path) {
|
||||
try {
|
||||
|
@ -389,7 +438,7 @@ function TO_LIST (param, isStringHash) {
|
|||
function CLONE (obj) {
|
||||
"use strict";
|
||||
|
||||
if (obj === null || typeof(obj) !== "object") {
|
||||
if (obj === null || typeof(obj) !== "object" || obj instanceof ShapedJson) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
@ -5059,7 +5108,8 @@ function TRAVERSAL_FUNC (func,
|
|||
endVertex : endVertex,
|
||||
weight : params.weight,
|
||||
defaultWeight : params.defaultWeight,
|
||||
prefill : params.prefill
|
||||
prefill : params.prefill,
|
||||
data: params.data
|
||||
};
|
||||
|
||||
if (typeof params.filter === "function") {
|
||||
|
@ -5456,7 +5506,7 @@ function SHORTEST_PATH_PARAMS (params) {
|
|||
|
||||
// add user-defined visitor, if specified
|
||||
if (typeof params.visitor === "string") {
|
||||
params.visitor = GET_VISITOR(params.visitor);
|
||||
params.visitor = GET_VISITOR(params.visitor, params);
|
||||
}
|
||||
else {
|
||||
params.visitor = TRAVERSAL_VISITOR;
|
||||
|
@ -5464,7 +5514,7 @@ function SHORTEST_PATH_PARAMS (params) {
|
|||
|
||||
// add user-defined filter, if specified
|
||||
if (typeof params.filter === "string") {
|
||||
params.filter = GET_FILTER(params.filter);
|
||||
params.filter = GET_FILTER(params.filter, params);
|
||||
}
|
||||
|
||||
if (typeof params.distance === "string") {
|
||||
|
@ -5686,7 +5736,7 @@ function TRAVERSAL_PARAMS (params) {
|
|||
|
||||
// add user-defined visitor, if specified
|
||||
if (typeof params.visitor === "string") {
|
||||
params.visitor = GET_VISITOR(params.visitor);
|
||||
params.visitor = GET_VISITOR(params.visitor, params);
|
||||
}
|
||||
else {
|
||||
params.visitor = TRAVERSAL_VISITOR;
|
||||
|
@ -5694,7 +5744,7 @@ function TRAVERSAL_PARAMS (params) {
|
|||
|
||||
// add user-defined filter, if specified
|
||||
if (typeof params.filter === "string") {
|
||||
params.filter = GET_FILTER(params.filter);
|
||||
params.filter = GET_FILTER(params.filter, params);
|
||||
}
|
||||
|
||||
return params;
|
||||
|
@ -6045,7 +6095,7 @@ function TRAVERSAL_TREE_PARAMS (params, connectName, func) {
|
|||
|
||||
// add user-defined visitor, if specified
|
||||
if (typeof params.visitor === "string") {
|
||||
params.visitor = GET_VISITOR(params.visitor);
|
||||
params.visitor = GET_VISITOR(params.visitor, params);
|
||||
}
|
||||
else {
|
||||
params.visitor = TRAVERSAL_TREE_VISITOR;
|
||||
|
@ -6053,7 +6103,7 @@ function TRAVERSAL_TREE_PARAMS (params, connectName, func) {
|
|||
|
||||
// add user-defined filter, if specified
|
||||
if (typeof params.filter === "string") {
|
||||
params.filter = GET_FILTER(params.filter);
|
||||
params.filter = GET_FILTER(params.filter, params);
|
||||
}
|
||||
|
||||
params.connect = AQL_TO_STRING(connectName);
|
||||
|
|
|
@ -2146,10 +2146,12 @@ static bool ParseArray (yyscan_t scanner, TRI_json_t* result) {
|
|||
yyextra._message = "out-of-memory";
|
||||
return false;
|
||||
}
|
||||
|
||||
// be paranoid and initialize the memory
|
||||
TRI_InitNullJson(next);
|
||||
|
||||
if (! ParseValue(scanner, next, c)) {
|
||||
// be paranoid
|
||||
TRI_InitNullJson(next);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -2256,10 +2258,12 @@ static bool ParseObject (yyscan_t scanner, TRI_json_t* result) {
|
|||
next = static_cast<TRI_json_t*>(TRI_NextVector(&result->_value._objects));
|
||||
// we made sure with the reserve call that we haven't run out of memory
|
||||
TRI_ASSERT_EXPENSIVE(next != nullptr);
|
||||
|
||||
// be paranoid and initialize the memory
|
||||
TRI_InitNullJson(next);
|
||||
|
||||
if (! ParseValue(scanner, next, c)) {
|
||||
// be paranoid
|
||||
TRI_InitNullJson(next);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -229,10 +229,12 @@ static bool ParseArray (yyscan_t scanner, TRI_json_t* result) {
|
|||
yyextra._message = "out-of-memory";
|
||||
return false;
|
||||
}
|
||||
|
||||
// be paranoid and initialize the memory
|
||||
TRI_InitNullJson(next);
|
||||
|
||||
if (! ParseValue(scanner, next, c)) {
|
||||
// be paranoid
|
||||
TRI_InitNullJson(next);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -339,10 +341,12 @@ static bool ParseObject (yyscan_t scanner, TRI_json_t* result) {
|
|||
next = static_cast<TRI_json_t*>(TRI_NextVector(&result->_value._objects));
|
||||
// we made sure with the reserve call that we haven't run out of memory
|
||||
TRI_ASSERT_EXPENSIVE(next != nullptr);
|
||||
|
||||
// be paranoid and initialize the memory
|
||||
TRI_InitNullJson(next);
|
||||
|
||||
if (! ParseValue(scanner, next, c)) {
|
||||
// be paranoid
|
||||
TRI_InitNullJson(next);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,6 +122,7 @@ bool ClientConnection::connectSocket () {
|
|||
_socket = _endpoint->connect(_connectTimeout, _requestTimeout);
|
||||
|
||||
if (! TRI_isvalidsocket(_socket)) {
|
||||
_errorDetails = std::string("failed to connect : ") + std::string(strerror(errno));
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -167,6 +167,14 @@ namespace triagens {
|
|||
bool handleRead (double, triagens::basics::StringBuffer&,
|
||||
bool& connectionClosed);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief return the endpoint
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const std::string& getErrorDetails () const {
|
||||
return _errorDetails;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- protected virtual methods
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -215,6 +223,12 @@ namespace triagens {
|
|||
|
||||
protected:
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief details to errors
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
std::string _errorDetails;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief endpoint to connect to
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -288,7 +288,11 @@ namespace triagens {
|
|||
TRI_ASSERT(_connection != nullptr);
|
||||
|
||||
if (! _connection->connect()) {
|
||||
setErrorMessage("Could not connect to '" + _connection->getEndpoint()->getSpecification() + "'", errno);
|
||||
setErrorMessage("Could not connect to '" +
|
||||
_connection->getEndpoint()->getSpecification() +
|
||||
"' '" +
|
||||
_connection->getErrorDetails() +
|
||||
"' '");
|
||||
_state = DEAD;
|
||||
}
|
||||
else {
|
||||
|
@ -348,7 +352,9 @@ namespace triagens {
|
|||
case IN_CONNECT:
|
||||
default: {
|
||||
_result->setResultType(SimpleHttpResult::COULD_NOT_CONNECT);
|
||||
setErrorMessage("Could not connect");
|
||||
if (!haveErrorMessage()) {
|
||||
setErrorMessage("Could not connect");
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -186,6 +186,12 @@ namespace triagens {
|
|||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks whether an error message is already there
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool haveErrorMessage () { return _errorMessage.size() > 0;}
|
||||
|
||||
private:
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -146,17 +146,21 @@ bool SslClientConnection::connectSocket () {
|
|||
_socket = _endpoint->connect(_connectTimeout, _requestTimeout);
|
||||
|
||||
if (! TRI_isvalidsocket(_socket) || _ctx == nullptr) {
|
||||
_errorDetails = std::string("failed to connect : ") + std::string(strerror(errno));
|
||||
return false;
|
||||
}
|
||||
|
||||
_ssl = SSL_new(_ctx);
|
||||
if (_ssl == nullptr) {
|
||||
_errorDetails = std::string("failed to create ssl context");
|
||||
_endpoint->disconnect();
|
||||
TRI_invalidatesocket(&_socket);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (SSL_set_fd(_ssl, (int) TRI_get_fd_or_handle_of_socket(_socket)) != 1) {
|
||||
_errorDetails = std::string("SSL: failed to create context ") +
|
||||
ERR_error_string(ERR_get_error(), NULL);
|
||||
_endpoint->disconnect();
|
||||
SSL_free(_ssl);
|
||||
_ssl = nullptr;
|
||||
|
@ -166,8 +170,49 @@ bool SslClientConnection::connectSocket () {
|
|||
|
||||
SSL_set_verify(_ssl, SSL_VERIFY_NONE, NULL);
|
||||
|
||||
ERR_clear_error();
|
||||
int ret = SSL_connect(_ssl);
|
||||
if (ret != 1) {
|
||||
int errorDetail;
|
||||
int certError;
|
||||
|
||||
errorDetail = SSL_get_error(_ssl, ret);
|
||||
if ( (errorDetail == SSL_ERROR_WANT_READ) ||
|
||||
(errorDetail == SSL_ERROR_WANT_WRITE)) {
|
||||
return true;
|
||||
}
|
||||
errorDetail = ERR_get_error(); /* Gets the earliest error code from the
|
||||
thread's error queue and removes the
|
||||
entry. */
|
||||
switch(errorDetail) {
|
||||
case 0x1407E086:
|
||||
/* 1407E086:
|
||||
SSL routines:
|
||||
SSL2_SET_CERTIFICATE:
|
||||
certificate verify failed */
|
||||
/* fall-through */
|
||||
case 0x14090086:
|
||||
/* 14090086:
|
||||
SSL routines:
|
||||
SSL3_GET_SERVER_CERTIFICATE:
|
||||
certificate verify failed */
|
||||
|
||||
certError = SSL_get_verify_result(_ssl);
|
||||
if(certError != X509_V_OK) {
|
||||
_errorDetails = std::string("SSL: certificate problem: ") +
|
||||
X509_verify_cert_error_string(certError);
|
||||
}
|
||||
else {
|
||||
_errorDetails = std::string("SSL: certificate problem, verify that the CA cert is OK.");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
char errorBuffer[256];
|
||||
ERR_error_string_n(errorDetail, errorBuffer, sizeof(errorBuffer));
|
||||
_errorDetails = std::string("SSL: ") + errorBuffer;
|
||||
break;
|
||||
}
|
||||
|
||||
_endpoint->disconnect();
|
||||
SSL_free(_ssl);
|
||||
_ssl = 0;
|
||||
|
@ -238,28 +283,42 @@ bool SslClientConnection::writeClientConnection (void* buffer, size_t length, si
|
|||
if (_ssl == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int errorDetail;
|
||||
int written = SSL_write(_ssl, buffer, (int) length);
|
||||
switch (SSL_get_error(_ssl, written)) {
|
||||
case SSL_ERROR_NONE:
|
||||
*bytesWritten = written;
|
||||
int err = SSL_get_error(_ssl, written);
|
||||
switch (err) {
|
||||
case SSL_ERROR_NONE:
|
||||
*bytesWritten = written;
|
||||
|
||||
return true;
|
||||
return true;
|
||||
|
||||
case SSL_ERROR_ZERO_RETURN:
|
||||
SSL_shutdown(_ssl);
|
||||
break;
|
||||
case SSL_ERROR_ZERO_RETURN:
|
||||
SSL_shutdown(_ssl);
|
||||
break;
|
||||
|
||||
case SSL_ERROR_WANT_READ:
|
||||
case SSL_ERROR_WANT_WRITE:
|
||||
case SSL_ERROR_WANT_CONNECT:
|
||||
case SSL_ERROR_SYSCALL:
|
||||
default: {
|
||||
/* fall through */
|
||||
}
|
||||
}
|
||||
case SSL_ERROR_WANT_READ:
|
||||
case SSL_ERROR_WANT_WRITE:
|
||||
case SSL_ERROR_WANT_CONNECT:
|
||||
break;
|
||||
case SSL_ERROR_SYSCALL:
|
||||
_errorDetails = std::string("SSL: while writing: SYSCALL returned errno = ") +
|
||||
std::to_string(errno) + std::string(" - ") + strerror(errno);
|
||||
break;
|
||||
case SSL_ERROR_SSL:
|
||||
/* A failure in the SSL library occurred, usually a protocol error.
|
||||
The OpenSSL error queue contains more information on the error. */
|
||||
errorDetail = ERR_get_error();
|
||||
char errorBuffer[256];
|
||||
ERR_error_string_n(errorDetail, errorBuffer, sizeof(errorBuffer));
|
||||
_errorDetails = std::string("SSL: while writing: ") + errorBuffer;
|
||||
|
||||
break;
|
||||
default:
|
||||
/* a true error */
|
||||
_errorDetails = std::string("SSL: while writing: error ") + std::to_string(err);
|
||||
}
|
||||
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -308,6 +367,12 @@ again:
|
|||
case SSL_ERROR_WANT_CONNECT:
|
||||
case SSL_ERROR_SYSCALL:
|
||||
default:
|
||||
int errorDetail = ERR_get_error();
|
||||
char errorBuffer[256];
|
||||
ERR_error_string_n(errorDetail, errorBuffer, sizeof(errorBuffer));
|
||||
_errorDetails = std::string("SSL: while reading: error '") + std::to_string(errno) +
|
||||
std::string("' - ") + errorBuffer;
|
||||
|
||||
/* unexpected */
|
||||
connectionClosed = true;
|
||||
return false;
|
||||
|
|
Loading…
Reference in New Issue