mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'spdvpk' of https://github.com/arangodb/arangodb into spdvpk
This commit is contained in:
commit
7c245bd3a3
|
@ -228,7 +228,7 @@ void Parser::parseString() {
|
|||
|
||||
while (true) {
|
||||
size_t remainder = _size - _pos;
|
||||
if (remainder >= 16) {
|
||||
if (remainder >= 16 + 16) {
|
||||
_b->reserveSpace(remainder);
|
||||
size_t count;
|
||||
if (options->validateUtf8Strings) {
|
||||
|
|
|
@ -449,6 +449,10 @@ void ApplicationCluster::stop() {
|
|||
}
|
||||
}
|
||||
|
||||
while (_heartbeat->isRunning()) {
|
||||
usleep(50000);
|
||||
}
|
||||
|
||||
// ClusterComm::cleanup();
|
||||
AgencyComm::cleanup();
|
||||
}
|
||||
|
|
|
@ -454,7 +454,7 @@ SkiplistIterator* SkiplistIndex::lookup(arangodb::Transaction* trx,
|
|||
|
||||
// Check if the interval is valid and not empty
|
||||
if (intervalValid(leftBorder, rightBorder)) {
|
||||
auto iterator = std::make_unique<SkiplistIterator>(this, reverse, leftBorder, rightBorder);
|
||||
auto iterator = std::make_unique<SkiplistIterator>(reverse, leftBorder, rightBorder);
|
||||
if (iterator == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
|
@ -462,7 +462,7 @@ SkiplistIterator* SkiplistIndex::lookup(arangodb::Transaction* trx,
|
|||
}
|
||||
|
||||
// Creates an empty iterator
|
||||
auto iterator = std::make_unique<SkiplistIterator>(this, reverse, nullptr, nullptr);
|
||||
auto iterator = std::make_unique<SkiplistIterator>(reverse, nullptr, nullptr);
|
||||
if (iterator == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
|
|
|
@ -61,7 +61,6 @@ class SkiplistIterator : public IndexIterator {
|
|||
TRI_index_element_t> Node;
|
||||
|
||||
private:
|
||||
SkiplistIndex const* _index;
|
||||
bool _reverse;
|
||||
Node* _cursor;
|
||||
|
||||
|
@ -69,10 +68,9 @@ class SkiplistIterator : public IndexIterator {
|
|||
Node* _rightEndPoint; // Interval right border, first excluded element
|
||||
|
||||
public:
|
||||
SkiplistIterator(SkiplistIndex const* idx, bool reverse, Node* left,
|
||||
SkiplistIterator(bool reverse, Node* left,
|
||||
Node* right)
|
||||
: _index(idx),
|
||||
_reverse(reverse),
|
||||
: _reverse(reverse),
|
||||
_leftEndPoint(left),
|
||||
_rightEndPoint(right) {
|
||||
reset(); // Initializes the cursor
|
||||
|
|
|
@ -909,7 +909,7 @@ int ContinuousSyncer::applyLog(SimpleHttpResult* response,
|
|||
|
||||
processedMarkers++;
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
|
||||
try {
|
||||
VPackParser parser(builder);
|
||||
|
@ -1242,7 +1242,7 @@ int ContinuousSyncer::fetchMasterState(std::string& errorMsg,
|
|||
startTick = toTick;
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
|
|
@ -228,7 +228,7 @@ int InitialSyncer::run(std::string& errorMsg, bool incremental) {
|
|||
StringUtils::itoa(response->getHttpReturnCode()) + ": " +
|
||||
response->getHttpReturnMessage();
|
||||
} else {
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
@ -346,7 +346,7 @@ int InitialSyncer::sendStartBatch(std::string& errorMsg) {
|
|||
return TRI_ERROR_REPLICATION_MASTER_ERROR;
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
@ -495,7 +495,7 @@ int InitialSyncer::applyCollectionDump(
|
|||
|
||||
TRI_ASSERT(q <= end);
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
|
||||
try {
|
||||
VPackParser parser(builder);
|
||||
|
@ -923,7 +923,7 @@ int InitialSyncer::handleCollectionSync(
|
|||
this->sleep(static_cast<uint64_t>(sleepTime * 1000.0 * 1000.0));
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
@ -1148,7 +1148,7 @@ int InitialSyncer::handleSyncKeys(TRI_vocbase_col_t* col,
|
|||
return TRI_ERROR_REPLICATION_MASTER_ERROR;
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
@ -1350,7 +1350,7 @@ int InitialSyncer::handleSyncKeys(TRI_vocbase_col_t* col,
|
|||
return TRI_ERROR_REPLICATION_MASTER_ERROR;
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
@ -1520,7 +1520,7 @@ int InitialSyncer::handleSyncKeys(TRI_vocbase_col_t* col,
|
|||
return TRI_ERROR_REPLICATION_MASTER_ERROR;
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
|
|
|
@ -206,7 +206,7 @@ int Syncer::sendCreateBarrier(std::string& errorMsg, TRI_voc_tick_t minTick) {
|
|||
": HTTP " + StringUtils::itoa(response->getHttpReturnCode()) +
|
||||
": " + response->getHttpReturnMessage();
|
||||
} else {
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
res = parseResponse(builder, response.get());
|
||||
|
||||
if (res == TRI_ERROR_NO_ERROR) {
|
||||
|
@ -642,7 +642,7 @@ int Syncer::getMasterState(std::string& errorMsg) {
|
|||
}
|
||||
|
||||
|
||||
std::shared_ptr<VPackBuilder> builder;
|
||||
auto builder = std::make_shared<VPackBuilder>();
|
||||
int res = parseResponse(builder, response.get());
|
||||
|
||||
if (res == TRI_ERROR_NO_ERROR) {
|
||||
|
|
|
@ -64,6 +64,59 @@ static OperationResult FetchDocumentById(arangodb::Transaction* trx,
|
|||
return opRes;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Local class to expand edges.
|
||||
/// Will be handed over to the path finder
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
struct BasicExpander {
|
||||
private:
|
||||
std::vector<EdgeCollectionInfo*> const _colls;
|
||||
arangodb::Transaction* _trx;
|
||||
TRI_edge_direction_e _dir;
|
||||
std::shared_ptr<OperationResult> _opRes;
|
||||
|
||||
public:
|
||||
BasicExpander(std::vector<EdgeCollectionInfo*> const& colls,
|
||||
arangodb::Transaction* trx, TRI_edge_direction_e dir)
|
||||
: _colls(colls),
|
||||
_trx(trx),
|
||||
_dir(dir),
|
||||
_opRes(std::make_shared<OperationResult>(TRI_ERROR_NO_ERROR)){};
|
||||
|
||||
void operator()(std::string const& v, std::vector<std::string>& res_edges,
|
||||
std::vector<std::string>& neighbors) {
|
||||
for (auto const& edgeCollection : _colls) {
|
||||
TRI_ASSERT(edgeCollection != nullptr);
|
||||
std::shared_ptr<OperationCursor> edgeCursor = edgeCollection->getEdges(_dir, v);
|
||||
while (edgeCursor->hasMore()) {
|
||||
edgeCursor->getMore(_opRes);
|
||||
if (_opRes->failed()) {
|
||||
THROW_ARANGO_EXCEPTION(_opRes->code);
|
||||
}
|
||||
VPackSlice edges = _opRes->slice();
|
||||
|
||||
for (auto const& edge : VPackArrayIterator(edges)) {
|
||||
std::string edgeId = _trx->extractIdString(edge);
|
||||
std::string from = edge.get(TRI_VOC_ATTRIBUTE_FROM).copyString();
|
||||
if (from == v) {
|
||||
std::string to = edge.get(TRI_VOC_ATTRIBUTE_TO).copyString();
|
||||
if (to != v) {
|
||||
res_edges.emplace_back(edgeId);
|
||||
neighbors.emplace_back(to);
|
||||
}
|
||||
} else {
|
||||
res_edges.emplace_back(edgeId);
|
||||
neighbors.emplace_back(from);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
EdgeCollectionInfo::EdgeCollectionInfo(arangodb::Transaction* trx,
|
||||
std::string const& collectionName,
|
||||
WeightCalculatorFunction weighter)
|
||||
|
@ -527,70 +580,8 @@ TRI_RunSimpleShortestPathSearch(
|
|||
backward = TRI_EDGE_ANY;
|
||||
}
|
||||
|
||||
auto fwExpander =
|
||||
[&collectionInfos, forward, trx](std::string const& v, std::vector<std::string>& res_edges,
|
||||
std::vector<std::string>& neighbors) {
|
||||
for (auto const& edgeCollection : collectionInfos) {
|
||||
TRI_ASSERT(edgeCollection != nullptr);
|
||||
std::shared_ptr<OperationCursor> edgeCursor = edgeCollection->getEdges(forward, v);
|
||||
auto opRes = std::make_shared<OperationResult>(TRI_ERROR_NO_ERROR);
|
||||
while (edgeCursor->hasMore()) {
|
||||
edgeCursor->getMore(opRes);
|
||||
if (opRes->failed()) {
|
||||
THROW_ARANGO_EXCEPTION(opRes->code);
|
||||
}
|
||||
VPackSlice edges = opRes->slice();
|
||||
|
||||
for (auto const& edge : VPackArrayIterator(edges)) {
|
||||
std::string edgeId = trx->extractIdString(edge);
|
||||
std::string from = edge.get(TRI_VOC_ATTRIBUTE_FROM).copyString();
|
||||
if (from == v) {
|
||||
std::string to = edge.get(TRI_VOC_ATTRIBUTE_TO).copyString();
|
||||
if (to == v) {
|
||||
res_edges.emplace_back(edgeId);
|
||||
neighbors.emplace_back(to);
|
||||
}
|
||||
} else {
|
||||
res_edges.emplace_back(edgeId);
|
||||
neighbors.emplace_back(from);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
auto bwExpander =
|
||||
[&collectionInfos, backward, trx](std::string const& v, std::vector<std::string>& res_edges,
|
||||
std::vector<std::string>& neighbors) {
|
||||
auto opRes = std::make_shared<OperationResult>(TRI_ERROR_NO_ERROR);
|
||||
for (auto const& edgeCollection : collectionInfos) {
|
||||
TRI_ASSERT(edgeCollection != nullptr);
|
||||
std::shared_ptr<OperationCursor> edgeCursor = edgeCollection->getEdges(backward, v);
|
||||
while (edgeCursor->hasMore()) {
|
||||
edgeCursor->getMore(opRes);
|
||||
if (opRes->failed()) {
|
||||
THROW_ARANGO_EXCEPTION(opRes->code);
|
||||
}
|
||||
VPackSlice edges = opRes->slice();
|
||||
|
||||
for (auto const& edge : VPackArrayIterator(edges)) {
|
||||
std::string edgeId = trx->extractIdString(edge);
|
||||
std::string from = edge.get(TRI_VOC_ATTRIBUTE_FROM).copyString();
|
||||
if (from == v) {
|
||||
std::string to = edge.get(TRI_VOC_ATTRIBUTE_TO).copyString();
|
||||
if (to == v) {
|
||||
res_edges.emplace_back(edgeId);
|
||||
neighbors.emplace_back(to);
|
||||
}
|
||||
} else {
|
||||
res_edges.emplace_back(edgeId);
|
||||
neighbors.emplace_back(from);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
auto fwExpander = BasicExpander(collectionInfos, trx, forward);
|
||||
auto bwExpander = BasicExpander(collectionInfos, trx, backward);
|
||||
|
||||
ArangoDBConstDistancePathFinder pathFinder(fwExpander, bwExpander);
|
||||
auto path = std::make_unique<ArangoDBConstDistancePathFinder::Path>();
|
||||
|
@ -814,24 +805,34 @@ void SingleServerTraversalPath::pathToVelocyPack(Transaction* trx,
|
|||
result.add(VPackValue("edges"));
|
||||
result.openArray();
|
||||
for (auto const& it : _path.edges) {
|
||||
getDocumentByIdentifier(trx, it, result);
|
||||
auto cached = _traverser->_edges.find(it);
|
||||
// All edges are cached!!
|
||||
TRI_ASSERT(cached != _traverser->_edges.end());
|
||||
result.add(VPackSlice(cached->second->data()));
|
||||
}
|
||||
result.close();
|
||||
result.add(VPackValue("vertices"));
|
||||
result.openArray();
|
||||
for (auto const& it : _path.vertices) {
|
||||
getDocumentByIdentifier(trx, it, result);
|
||||
std::shared_ptr<VPackBuffer<uint8_t>> vertex =
|
||||
_traverser->fetchVertexData(it);
|
||||
result.add(VPackSlice(vertex->data()));
|
||||
}
|
||||
result.close();
|
||||
result.close();
|
||||
}
|
||||
|
||||
void SingleServerTraversalPath::lastEdgeToVelocyPack(Transaction* trx, VPackBuilder& result) {
|
||||
getDocumentByIdentifier(trx, _path.edges.back(), result);
|
||||
auto cached = _traverser->_edges.find(_path.edges.back());
|
||||
// All edges are cached!!
|
||||
TRI_ASSERT(cached != _traverser->_edges.end());
|
||||
result.add(VPackSlice(cached->second->data()));
|
||||
}
|
||||
|
||||
void SingleServerTraversalPath::lastVertexToVelocyPack(Transaction* trx, VPackBuilder& result) {
|
||||
getDocumentByIdentifier(trx, _path.vertices.back(), result);
|
||||
std::shared_ptr<VPackBuffer<uint8_t>> vertex =
|
||||
_traverser->fetchVertexData(_path.vertices.back());
|
||||
result.add(VPackSlice(vertex->data()));
|
||||
}
|
||||
|
||||
DepthFirstTraverser::DepthFirstTraverser(
|
||||
|
@ -852,6 +853,7 @@ bool DepthFirstTraverser::edgeMatchesConditions(VPackSlice e, size_t depth) {
|
|||
TRI_ASSERT(exp != nullptr);
|
||||
|
||||
if (exp->isEdgeAccess && !exp->matchesCheck(_trx, e)) {
|
||||
LOG(INFO) << "Edge Match Condition";
|
||||
++_filteredPaths;
|
||||
return false;
|
||||
}
|
||||
|
@ -875,25 +877,10 @@ bool DepthFirstTraverser::vertexMatchesConditions(std::string const& v,
|
|||
if (!exp->isEdgeAccess) {
|
||||
if (fetchVertex) {
|
||||
fetchVertex = false;
|
||||
auto it = _vertices.find(v);
|
||||
if (it == _vertices.end()) {
|
||||
OperationResult opRes =
|
||||
FetchDocumentById(_trx, v, _builder, _operationOptions);
|
||||
++_readDocuments;
|
||||
if (opRes.failed()) {
|
||||
TRI_ASSERT(opRes.code == TRI_ERROR_ARANGO_DOCUMENT_NOT_FOUND);
|
||||
VPackBuilder tmp;
|
||||
tmp.add(VPackValue(VPackValueType::Null));
|
||||
vertex = tmp.steal();
|
||||
} else {
|
||||
vertex = opRes.buffer;
|
||||
}
|
||||
_vertices.emplace(v, vertex);
|
||||
} else {
|
||||
vertex = it->second;
|
||||
}
|
||||
vertex = fetchVertexData(v);
|
||||
}
|
||||
if (!exp->matchesCheck(_trx, VPackSlice(vertex->data()))) {
|
||||
LOG(INFO) << "Vertex Match Condition";
|
||||
++_filteredPaths;
|
||||
return false;
|
||||
}
|
||||
|
@ -903,6 +890,25 @@ bool DepthFirstTraverser::vertexMatchesConditions(std::string const& v,
|
|||
return true;
|
||||
}
|
||||
|
||||
std::shared_ptr<VPackBuffer<uint8_t>> DepthFirstTraverser::fetchVertexData(
|
||||
std::string const& id) {
|
||||
auto it = _vertices.find(id);
|
||||
if (it == _vertices.end()) {
|
||||
OperationResult opRes =
|
||||
FetchDocumentById(_trx, id, _builder, _operationOptions);
|
||||
++_readDocuments;
|
||||
if (opRes.failed()) {
|
||||
TRI_ASSERT(opRes.code == TRI_ERROR_ARANGO_DOCUMENT_NOT_FOUND);
|
||||
VPackBuilder tmp;
|
||||
tmp.add(VPackValue(VPackValueType::Null));
|
||||
return tmp.steal();
|
||||
}
|
||||
_vertices.emplace(id, opRes.buffer);
|
||||
return opRes.buffer;
|
||||
}
|
||||
return it->second;
|
||||
}
|
||||
|
||||
void DepthFirstTraverser::_defInternalFunctions() {
|
||||
_getVertex = [this](std::string const& edge, std::string const& vertex, size_t depth,
|
||||
std::string& result) -> bool {
|
||||
|
@ -946,6 +952,7 @@ void DepthFirstTraverser::setStartVertex(std::string const& v) {
|
|||
_vertices.emplace(v, vertex);
|
||||
}
|
||||
if (!exp->matchesCheck(_trx, VPackSlice(vertex->data()))) {
|
||||
LOG(INFO) << "Start Vertex Match Condition";
|
||||
++_filteredPaths;
|
||||
_done = true;
|
||||
return;
|
||||
|
@ -974,7 +981,7 @@ TraversalPath* DepthFirstTraverser::next() {
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
auto p = std::make_unique<SingleServerTraversalPath>(path);
|
||||
auto p = std::make_unique<SingleServerTraversalPath>(path, this);
|
||||
if (countEdges >= _opts.maxDepth) {
|
||||
_pruneNext = true;
|
||||
}
|
||||
|
@ -1036,6 +1043,9 @@ void DepthFirstTraverser::EdgeGetter::nextEdge(
|
|||
cursor->getMore(opRes);
|
||||
TRI_ASSERT(last != nullptr);
|
||||
*last = 0;
|
||||
edge = opRes->slice();
|
||||
TRI_ASSERT(edge.isArray());
|
||||
_traverser->_readDocuments += edge.length();
|
||||
continue;
|
||||
}
|
||||
eColIdx++;
|
||||
|
@ -1053,14 +1063,32 @@ void DepthFirstTraverser::EdgeGetter::nextEdge(
|
|||
}
|
||||
edge = edge.at(*last);
|
||||
if (!_traverser->edgeMatchesConditions(edge, edges.size())) {
|
||||
++_traverser->_filteredPaths;
|
||||
TRI_ASSERT(last != nullptr);
|
||||
(*last)++;
|
||||
continue;
|
||||
}
|
||||
std::string id = _trx->extractIdString(edge);
|
||||
// test if edge is already on this path
|
||||
auto found = std::find(edges.begin(), edges.end(), id);
|
||||
if (found != edges.end()) {
|
||||
// This edge is already on the path, next
|
||||
TRI_ASSERT(last != nullptr);
|
||||
(*last)++;
|
||||
continue;
|
||||
}
|
||||
|
||||
VPackBuilder tmpBuilder = VPackBuilder::clone(edge);
|
||||
_traverser->_edges.emplace(id, tmpBuilder.steal());
|
||||
|
||||
std::string other;
|
||||
_traverser->_getVertex(id, startVertex, 0, other);
|
||||
if (!_traverser->vertexMatchesConditions(other, edges.size() + 1)) {
|
||||
// Retry with the next element
|
||||
TRI_ASSERT(last != nullptr);
|
||||
(*last)++;
|
||||
continue;
|
||||
}
|
||||
|
||||
edges.emplace_back(id);
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -50,6 +50,10 @@ typedef arangodb::basics::ConstDistanceFinder<
|
|||
namespace arangodb {
|
||||
namespace traverser {
|
||||
|
||||
// Forward declaration
|
||||
|
||||
class DepthFirstTraverser;
|
||||
|
||||
// A collection of shared options used in several functions.
|
||||
// Should not be used directly, use specialization instead.
|
||||
struct BasicOptions {
|
||||
|
@ -146,11 +150,13 @@ struct ShortestPathOptions : BasicOptions {
|
|||
arangodb::velocypack::Slice) const override;
|
||||
};
|
||||
|
||||
|
||||
class SingleServerTraversalPath : public TraversalPath {
|
||||
public:
|
||||
explicit SingleServerTraversalPath(
|
||||
arangodb::basics::EnumeratedPath<std::string, std::string> const& path)
|
||||
: _path(path) {}
|
||||
arangodb::basics::EnumeratedPath<std::string, std::string> const& path,
|
||||
DepthFirstTraverser* traverser)
|
||||
: _traverser(traverser), _path(path) {}
|
||||
|
||||
~SingleServerTraversalPath() {}
|
||||
|
||||
|
@ -168,6 +174,8 @@ class SingleServerTraversalPath : public TraversalPath {
|
|||
void getDocumentByIdentifier(Transaction*, std::string const&,
|
||||
arangodb::velocypack::Builder&);
|
||||
|
||||
DepthFirstTraverser* _traverser;
|
||||
|
||||
arangodb::basics::EnumeratedPath<std::string, std::string> _path;
|
||||
|
||||
arangodb::velocypack::Builder _searchBuilder;
|
||||
|
@ -176,6 +184,8 @@ class SingleServerTraversalPath : public TraversalPath {
|
|||
|
||||
class DepthFirstTraverser : public Traverser {
|
||||
|
||||
friend class SingleServerTraversalPath;
|
||||
|
||||
private:
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief callable class to load edges based on opts.
|
||||
|
@ -348,6 +358,9 @@ class DepthFirstTraverser : public Traverser {
|
|||
bool edgeMatchesConditions(arangodb::velocypack::Slice, size_t);
|
||||
|
||||
bool vertexMatchesConditions(std::string const&, size_t);
|
||||
|
||||
std::shared_ptr<arangodb::velocypack::Buffer<uint8_t>> fetchVertexData(
|
||||
std::string const&);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -243,7 +243,7 @@ bool TraverserExpression::matchesCheck(arangodb::Transaction* trx,
|
|||
VPackSlice result = arangodb::basics::VelocyPackHelper::NullValue();
|
||||
// perform recursive check. this may modify value
|
||||
if (recursiveCheck(varAccess, value)) {
|
||||
result = element;
|
||||
result = value;
|
||||
}
|
||||
|
||||
TRI_ASSERT(compareTo != nullptr);
|
||||
|
|
|
@ -1335,7 +1335,8 @@ function ahuacatlQueryGeneralPathsTestSuite() {
|
|||
testPathsWithDirectionAnyAndMaxLength1: function () {
|
||||
var actual, result = {}, i = 0, ed;
|
||||
|
||||
actual = getQueryResults("FOR p IN GRAPH_PATHS('bla3', {direction :'any', minLength : 1 , maxLength: 1}) SORT p.source._key, p.destination._key RETURN [p.source._key, p.destination._key, p.edges]");
|
||||
actual = getQueryResults("FOR p IN GRAPH_PATHS('bla3', {direction :'any', minLength : 1 , maxLength: 1}) SORT p.source._key, p.destination._key, p.edges[0].what RETURN [p.source._key, p.destination._key, p.edges]");
|
||||
|
||||
actual.forEach(function (p) {
|
||||
i++;
|
||||
ed = "";
|
||||
|
@ -1345,8 +1346,8 @@ function ahuacatlQueryGeneralPathsTestSuite() {
|
|||
result[i + ":" + p[0] + p[1]] = ed;
|
||||
});
|
||||
|
||||
assertEqual(result["1:v1v2"], "|v2->v1");
|
||||
assertEqual(result["2:v1v2"], "|v1->v2");
|
||||
assertEqual(result["1:v1v2"], "|v1->v2");
|
||||
assertEqual(result["2:v1v2"], "|v2->v1");
|
||||
assertEqual(result["3:v1v5"], "|v1->v5");
|
||||
assertEqual(result["4:v2v1"], "|v1->v2");
|
||||
assertEqual(result["5:v2v1"], "|v2->v1");
|
||||
|
|
|
@ -1458,17 +1458,14 @@ function complexFilteringSuite () {
|
|||
assertEqual(stats.scannedIndex, 9);
|
||||
}
|
||||
else {
|
||||
// Cluster uses a lookup cache.
|
||||
// Pointless in single-server mode
|
||||
// Makes Primary Lookups for data
|
||||
|
||||
// 2 Edge Lookups (A)
|
||||
// 2 Primary (B, D) for Filtering
|
||||
// 2 Edge Lookups (B)
|
||||
// 3 Primary Lookups A -> B
|
||||
// 4 Primary Lookups A -> B -> C
|
||||
// 4 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 17);
|
||||
// All edges are cached
|
||||
// 1 Primary Lookups A -> B (B cached)
|
||||
// 1 Primary Lookups A -> B -> C (A, B cached)
|
||||
// 1 Primary Lookups A -> B -> F (A, B cached)
|
||||
assertEqual(stats.scannedIndex, 9);
|
||||
}
|
||||
// 1 Filter On D
|
||||
assertEqual(stats.filtered, 1);
|
||||
|
@ -1500,11 +1497,11 @@ function complexFilteringSuite () {
|
|||
// 2 Edge Lookups (A)
|
||||
// 4 Edge Lookups (2 B) (2 D)
|
||||
// 4 Primary Lookups for Eval (C, E, G, F)
|
||||
// 3 Primary Lookups A -> B
|
||||
// 3 Primary Lookups A -> D
|
||||
// 4 Primary Lookups A -> B -> C
|
||||
// 4 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 24);
|
||||
// 2 Primary Lookups A -> B (A, B)
|
||||
// 1 Primary Lookups A -> D (D)
|
||||
// 0 Primary Lookups A -> B -> C
|
||||
// 0 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 13);
|
||||
}
|
||||
// 2 Filter (E, G)
|
||||
// 2 Filter A->B, A->D path too short
|
||||
|
@ -1539,8 +1536,8 @@ function complexFilteringSuite () {
|
|||
// 2 Primary Lookups for Eval (B, D)
|
||||
// 2 Edge Lookups (0 B) (2 D)
|
||||
// 2 Primary Lookups for Eval (E, G)
|
||||
// 3 Primary Lookups A -> D
|
||||
assertEqual(stats.scannedIndex, 11);
|
||||
// 1 Primary Lookups A -> D
|
||||
assertEqual(stats.scannedIndex, 9);
|
||||
}
|
||||
// 1 Filter (B)
|
||||
// 2 Filter (E, G)
|
||||
|
@ -1573,10 +1570,10 @@ function complexFilteringSuite () {
|
|||
else {
|
||||
// 2 Edge Lookups (A)
|
||||
// 2 Edge Lookups (B)
|
||||
// 3 Primary Lookups A -> B
|
||||
// 4 Primary Lookups A -> B -> C
|
||||
// 4 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 15);
|
||||
// 2 Primary Lookups A -> B
|
||||
// 1 Primary Lookups A -> B -> C
|
||||
// 1 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 8);
|
||||
}
|
||||
// 1 Filter (A->D)
|
||||
assertEqual(stats.filtered, 1);
|
||||
|
@ -1607,11 +1604,11 @@ function complexFilteringSuite () {
|
|||
else {
|
||||
// 2 Edge Lookups (A)
|
||||
// 4 Edge Lookups (2 B) (2 D)
|
||||
// 3 Primary Lookups A -> B
|
||||
// 3 Primary Lookups A -> D
|
||||
// 4 Primary Lookups A -> B -> C
|
||||
// 4 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 20);
|
||||
// 2 Primary Lookups A -> B
|
||||
// 1 Primary Lookups A -> D
|
||||
// 1 Primary Lookups A -> B -> C
|
||||
// 1 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 11);
|
||||
}
|
||||
// 2 Filter On (D->E, D->G)
|
||||
// Filter on A->D, A->B because path is too short is not counted. No Document!
|
||||
|
@ -1659,10 +1656,10 @@ function complexFilteringSuite () {
|
|||
// 2 Edge Lookups (A)
|
||||
// 2 Primary (B, D) for Filtering
|
||||
// 2 Edge Lookups (B)
|
||||
// 3 Primary Lookups A -> B
|
||||
// 4 Primary Lookups A -> B -> C
|
||||
// 4 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 17);
|
||||
// 1 Primary Lookups A -> B
|
||||
// 1 Primary Lookups A -> B -> C
|
||||
// 1 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 9);
|
||||
}
|
||||
// 1 Filter On D
|
||||
assertEqual(stats.filtered, 1);
|
||||
|
@ -1710,10 +1707,10 @@ function complexFilteringSuite () {
|
|||
// 2 Edge Lookups (A)
|
||||
// 2 Primary (B, D) for Filtering
|
||||
// 2 Edge Lookups (B)
|
||||
// 3 Primary Lookups A -> B
|
||||
// 4 Primary Lookups A -> B -> C
|
||||
// 4 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 17);
|
||||
// 1 Primary Lookups A -> B
|
||||
// 1 Primary Lookups A -> B -> C
|
||||
// 1 Primary Lookups A -> B -> F
|
||||
assertEqual(stats.scannedIndex, 9);
|
||||
}
|
||||
// 1 Filter On D
|
||||
assertEqual(stats.filtered, 1);
|
||||
|
|
|
@ -431,14 +431,16 @@ function ahuacatlQueryEdgesTestSuite () {
|
|||
var actual;
|
||||
|
||||
var bindVars = {start: {_id: "v1"}}; // No collection
|
||||
assertQueryError(errors.ERROR_ARANGO_DOCUMENT_HANDLE_BAD.code, q, bindVars);
|
||||
|
||||
bindVars = {start: "UnitTestTheFuxx/v1"}; // Non existing collection
|
||||
actual = getQueryResults(query, bindVars);
|
||||
actual = getQueryResults(q, bindVars);
|
||||
assertEqual(actual, [ ]);
|
||||
|
||||
bindVars = {start: { id: "UnitTestTheFuxx/v1" } }; // No _id attribute
|
||||
assertQueryError(errors.ERROR_ARANGO_DOCUMENT_HANDLE_BAD.code, q, bindVars);
|
||||
bindVars = {start: "UnitTestTheFuxx/v1"}; // Non existing collection
|
||||
actual = getQueryResults(q, bindVars);
|
||||
assertEqual(actual, [ ]);
|
||||
|
||||
bindVars = {start: { id: "UnitTestTheFuxx/v1"}}; // No _id attribute
|
||||
actual = getQueryResults(q, bindVars);
|
||||
assertEqual(actual, [ ]);
|
||||
|
||||
bindVars = {start: [{ id: "UnitTestTheFuxx/v1" }] }; // Error in Array
|
||||
actual = getQueryResults(q, bindVars);
|
||||
|
@ -1250,7 +1252,6 @@ function ahuacatlQueryShortestPathTestSuite () {
|
|||
};
|
||||
|
||||
var actual = getQueryResults("RETURN SHORTEST_PATH(@@v, @@e, '" + vn + "/H', '" + vn + "/A', 'inbound', " + JSON.stringify(config) + ").vertices", { "@v" : vn, "@e" : en });
|
||||
// var actual = getQueryResults("FOR p IN SHORTEST_PATH(@@v, @@e, '" + vn + "/A', '" + vn + "/H', 'outbound', " + JSON.stringify(config) + ") RETURN p.vertex._key", { "@v" : vn, "@e" : en });
|
||||
|
||||
assertEqual([[ vn + "/H", vn + "/G", vn + "/E", vn + "/D", vn + "/A" ]], actual);
|
||||
},
|
||||
|
|
Loading…
Reference in New Issue