1
0
Fork 0

Merged remote changes with local stash

This commit is contained in:
Michael Hackstein 2015-12-02 11:34:58 +01:00
parent 6ce29823a8
commit 1a13af6f36
10 changed files with 300 additions and 58 deletions

View File

@ -273,7 +273,9 @@ int TraversalBlock::initializeCursor (AqlItemBlock* items,
bool TraversalBlock::morePaths (size_t hint) {
freeCaches();
_posInPaths = 0;
if (!_traverser->hasMore()) {
if (! _traverser->hasMore()) {
_engine->_stats.scannedIndex += _traverser->getAndResetReadDocuments();
_engine->_stats.filtered += _traverser->getAndResetFilteredPaths();
return false;
}
auto en = static_cast<TraversalNode const*>(getPlanNode());
@ -301,7 +303,11 @@ bool TraversalBlock::morePaths (size_t hint) {
if ( usesPathOutput() ) {
_paths.emplace_back(pathValue);
}
_engine->_stats.scannedIndex += p->getReadDocuments();
}
_engine->_stats.scannedIndex += _traverser->getAndResetReadDocuments();
_engine->_stats.filtered += _traverser->getAndResetFilteredPaths();
// This is only save as long as _vertices is still build
return _vertices.size() > 0;
}

View File

@ -1036,8 +1036,6 @@ static void insertIntoShardMap (ClusterInfo* ci,
////////////////////////////////////////////////////////////////////////////////
/// @brief get a list of filtered documents in a coordinator
/// All found documents will be inserted into result.
/// After execution the documentIds will contain only all those
/// ids that could not be found.
////////////////////////////////////////////////////////////////////////////////
int getFilteredDocumentsOnCoordinator (
@ -1288,22 +1286,18 @@ int getFilteredEdgesOnCoordinator (
}
for (it = shards.begin(); it != shards.end(); ++it) {
map<string, string>* headers = new map<string, string>;
res = cc->asyncRequest("", coordTransactionID, "shard:" + it->first,
triagens::rest::HttpRequest::HTTP_REQUEST_GET,
"/_db/" + StringUtils::urlEncode(dbname) + "/_api/edges/" + it->first + queryParameters,
0, false, headers, nullptr, 3600.0);
/*
res = cc->asyncRequest("", coordTransactionID, "shard:" + it->first,
triagens::rest::HttpRequest::HTTP_REQUEST_PUT,
"/_db/" + StringUtils::urlEncode(dbname) + "/_api/edges/" + it->first + queryParameters,
&reqBodyString, false, headers, nullptr, 3600.0);
*/
delete res;
}
// Now listen to the results:
int count;
responseCode = triagens::rest::HttpResponse::OK;
contentType = "application/json; charset=utf-8";
size_t filtered = 0;
size_t scannedIndex = 0;
triagens::basics::Json documents(triagens::basics::Json::Array);
@ -1345,10 +1339,15 @@ int getFilteredEdgesOnCoordinator (
documents.transfer(doc);
}
filtered += triagens::basics::JsonHelper::getNumericValue<size_t>(shardResult.get(), "filtered", 0);
scannedIndex += triagens::basics::JsonHelper::getNumericValue<size_t>(shardResult.get(), "scannedIndex", 0);
delete res;
}
result("edges", documents);
result("filtered", triagens::basics::Json(static_cast<int32_t>(filtered)));
result("scannedIndex", triagens::basics::Json(static_cast<int32_t>(scannedIndex)));
return TRI_ERROR_NO_ERROR;
}

View File

@ -163,8 +163,6 @@ namespace triagens {
////////////////////////////////////////////////////////////////////////////////
/// @brief get a list of filtered documents in a coordinator
/// All found documents will be inserted into result.
/// After execution the documentIds will contain only all those
/// ids that could not be found.
////////////////////////////////////////////////////////////////////////////////
int getFilteredDocumentsOnCoordinator (

View File

@ -30,6 +30,7 @@
#include "Cluster/ClusterTraverser.h"
#include "Cluster/ClusterMethods.h"
#include <iostream>
using ClusterTraversalPath = triagens::arango::traverser::ClusterTraversalPath;
using ClusterTraverser = triagens::arango::traverser::ClusterTraverser;
@ -38,7 +39,7 @@ using ClusterTraverser = triagens::arango::traverser::ClusterTraverser;
// -----------------------------------------------------------------------------
triagens::basics::Json* ClusterTraversalPath::pathToJson (Transaction*,
CollectionNameResolver*) const {
CollectionNameResolver*) {
std::unique_ptr<triagens::basics::Json> result(new triagens::basics::Json(triagens::basics::Json::Object));
size_t vCount = _path.vertices.size();
triagens::basics::Json vertices(triagens::basics::Json::Array, vCount);
@ -55,12 +56,12 @@ triagens::basics::Json* ClusterTraversalPath::pathToJson (Transaction*,
}
triagens::basics::Json* ClusterTraversalPath::lastEdgeToJson (Transaction*,
CollectionNameResolver*) const {
CollectionNameResolver*) {
return _traverser->edgeToJson(_path.edges.back());
}
triagens::basics::Json* ClusterTraversalPath::lastVertexToJson (Transaction*,
CollectionNameResolver*) const {
CollectionNameResolver*) {
return _traverser->vertexToJson(_path.vertices.back());
}
@ -87,6 +88,7 @@ bool ClusterTraverser::VertexGetter::operator() (std::string const& edgeId,
if (exp != _traverser->_expressions->end()) {
auto v = _traverser->_vertices.find(result);
if (v == _traverser->_vertices.end()) {
++_traverser->_filteredPaths;
return false;
}
if (! _traverser->vertexMatchesCondition(v->second, exp->second)) {
@ -135,6 +137,12 @@ void ClusterTraverser::EdgeGetter::operator() (std::string const& startVertex,
THROW_ARANGO_EXCEPTION(res);
}
triagens::basics::Json edgesJson = resultEdges.get("edges");
size_t read = triagens::basics::JsonHelper::getNumericValue<size_t>(resultEdges.json(), "scannedIndex", 0);
size_t filter = triagens::basics::JsonHelper::getNumericValue<size_t>(resultEdges.json(), "filter", 0);
_traverser->_readDocuments += read;
std::cout << "Added filtered Edges: " << filter << std::endl;
_traverser->_filteredPaths += filter;
size_t count = edgesJson.size();
if (count == 0) {
last = nullptr;
@ -166,6 +174,7 @@ void ClusterTraverser::EdgeGetter::operator() (std::string const& startVertex,
}
std::map<std::string, std::string> headers;
size_t beforeFetching = _traverser->_vertices.size();
res = getFilteredDocumentsOnCoordinator(_traverser->_dbname,
expVertices,
headers,
@ -174,10 +183,13 @@ void ClusterTraverser::EdgeGetter::operator() (std::string const& startVertex,
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
if (! expVertices.empty() && ! verticesToFetch.empty()) {
if (! expVertices.empty()) {
// There are some vertices that either do not exist or do not match the filter.
// We have to exclude these from the traversal
_traverser->_filteredPaths += _traverser->_vertices.size() - beforeFetching;
std::cout << "Added filtered vertices: " << _traverser->_vertices.size() - beforeFetching << std::endl;
}
_traverser->_readDocuments += verticesToFetch.size();
std::string next = stack.top();
stack.pop();
last = &_continueConst;
@ -229,6 +241,7 @@ void ClusterTraverser::setStartVertex (VertexId& v) {
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
++_readDocuments;
if (responseCode == triagens::rest::HttpResponse::HttpResponseCode::NOT_FOUND) {
_vertices.emplace(id, nullptr);
}
@ -239,6 +252,7 @@ void ClusterTraverser::setStartVertex (VertexId& v) {
}
auto exp = _expressions->find(0);
if (exp != _expressions->end() && ! vertexMatchesCondition(it->second, exp->second)) {
std::cout << "Filtered on set start vertex\n";
// We can stop here. The start vertex does not match condition
_done = true;
}
@ -248,6 +262,7 @@ bool ClusterTraverser::vertexMatchesCondition (TRI_json_t* v, std::vector<Traver
for (auto const& e : exp) {
if (! e->isEdgeAccess) {
if (v == nullptr || ! e->matchesCheck(v)) {
++_filteredPaths;
return false;
}
}

View File

@ -162,13 +162,13 @@ namespace triagens {
}
triagens::basics::Json* pathToJson (Transaction*,
CollectionNameResolver*) const;
CollectionNameResolver*);
triagens::basics::Json* lastEdgeToJson (Transaction*,
CollectionNameResolver*) const;
CollectionNameResolver*);
triagens::basics::Json* lastVertexToJson (Transaction*,
CollectionNameResolver*) const;
CollectionNameResolver*);
private:

View File

@ -32,6 +32,7 @@
#include "Cluster/ClusterMethods.h"
#include "VocBase/Traverser.h"
#include <iostream>
using namespace triagens::rest;
using namespace triagens::arango;
@ -315,6 +316,7 @@ bool RestEdgesHandler::readEdges (std::vector<traverser::TraverserExpression*> c
TRI_transaction_collection_t* collection = trx.trxCollection();
size_t filtered = 0;
std::vector<TRI_doc_mptr_copy_t>&& edges = TRI_LookupEdgesDocumentCollection(
collection->_collection->_collection,
direction,
@ -340,7 +342,7 @@ bool RestEdgesHandler::readEdges (std::vector<traverser::TraverserExpression*> c
// Expressions symbolize an and, so all have to be matched
for (auto& exp : expressions) {
if (exp->isEdgeAccess && ! exp->matchesCheck(e, docCol, trx.resolver())) {
++filtered;
add = false;
break;
}
@ -362,6 +364,8 @@ bool RestEdgesHandler::readEdges (std::vector<traverser::TraverserExpression*> c
result("edges", documents);
result("error", triagens::basics::Json(false));
result("code", triagens::basics::Json(200));
result("scannedIndex", triagens::basics::Json(static_cast<int32_t>(edges.size())));
result("filtered", triagens::basics::Json(static_cast<int32_t>(filtered)));
// and generate a response
generateResult(result.json());
@ -382,13 +386,17 @@ bool RestEdgesHandler::readFilteredEdges () {
triagens::basics::ScopeGuard guard{
[]() -> void { },
[&expressions]() -> void {
std::cout << "Before free Expressions\n";
for (auto& e : expressions) {
delete e;
}
std::cout << "After free Expressions\n";
}
};
if (json == nullptr) {
std::cout << "Before free\n";
delete _response;
std::cout << "After free\n";
_response = nullptr;
return readEdges(expressions);
}

View File

@ -697,7 +697,7 @@ void TRI_RunNeighborsSearch (
// -----------------------------------------------------------------------------
Json* SingleServerTraversalPath::pathToJson (Transaction* trx,
CollectionNameResolver* resolver) const {
CollectionNameResolver* resolver) {
std::unique_ptr<Json> path(new Json(Json::Object, 2));
Json vertices(Json::Array);
for (size_t i = 0; i < _path.vertices.size(); ++i) {
@ -731,18 +731,18 @@ Json* SingleServerTraversalPath::pathToJson (Transaction* trx,
Json* SingleServerTraversalPath::lastEdgeToJson (Transaction* trx,
CollectionNameResolver* resolver) const {
CollectionNameResolver* resolver) {
return edgeToJson(trx, resolver, _path.edges.back());
}
Json* SingleServerTraversalPath::lastVertexToJson (Transaction* trx,
CollectionNameResolver* resolver) const {
CollectionNameResolver* resolver) {
return vertexToJson(trx, resolver, _path.vertices.back());
}
Json* SingleServerTraversalPath::edgeToJson (Transaction* trx,
CollectionNameResolver* resolver,
EdgeInfo const& e) const {
EdgeInfo const& e) {
auto collection = trx->trxCollection(e.cid);
TRI_shaped_json_t shapedJson;
TRI_EXTRACT_SHAPED_JSON_MARKER(shapedJson, &e.mptr);
@ -756,7 +756,7 @@ Json* SingleServerTraversalPath::edgeToJson (Transaction* trx,
Json* SingleServerTraversalPath::vertexToJson (Transaction* trx,
CollectionNameResolver* resolver,
VertexId const& v) const {
VertexId const& v) {
auto collection = trx->trxCollection(v.cid);
if (collection == nullptr) {
SingleCollectionReadOnlyTransaction intTrx(new StandaloneTransactionContext(), trx->vocbase(), v.cid);
@ -768,6 +768,7 @@ Json* SingleServerTraversalPath::vertexToJson (Transaction* trx,
collection = intTrx.trxCollection();
TRI_doc_mptr_copy_t mptr;
intTrx.read(&mptr, v.key);
++_readDocuments;
std::unique_ptr<Json> tmp(new Json(TRI_ExpandShapedJson(
collection->_collection->_collection->getShaper(),
resolver,
@ -779,6 +780,7 @@ Json* SingleServerTraversalPath::vertexToJson (Transaction* trx,
}
TRI_doc_mptr_copy_t mptr;
trx->readSingle(collection, &mptr, v.key);
++_readDocuments;
return new Json(TRI_ExpandShapedJson(
collection->_collection->_collection->getShaper(),
resolver,
@ -807,7 +809,6 @@ DepthFirstTraverser::DepthFirstTraverser (
void DepthFirstTraverser::_defInternalFunctions () {
_getVertex = [] (EdgeInfo const& edge, VertexId const& vertex, size_t depth, VertexId& result) -> bool {
auto mptr = edge.mptr;
// TODO fill Statistics
if (strcmp(TRI_EXTRACT_MARKER_FROM_KEY(&mptr), vertex.key) == 0 &&
TRI_EXTRACT_MARKER_FROM_CID(&mptr) == vertex.cid) {
result = VertexId(TRI_EXTRACT_MARKER_TO_CID(&mptr), TRI_EXTRACT_MARKER_TO_KEY(&mptr));
@ -822,12 +823,12 @@ void DepthFirstTraverser::_defInternalFunctions () {
_getEdge = [&] (VertexId const& startVertex, std::vector<EdgeInfo>& edges, TRI_doc_mptr_copy_t*& last, size_t& eColIdx, bool& dir) {
std::vector<TRI_doc_mptr_copy_t> tmp;
TRI_ASSERT(eColIdx < _edgeCols.size());
// TODO fill Statistics
// TODO Self referencing edges
triagens::arango::EdgeIndex* edgeIndex = _edgeCols.at(eColIdx)->edgeIndex();
if (dir) {
TRI_edge_index_iterator_t it(TRI_EDGE_OUT, startVertex.cid, startVertex.key);
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
while (last == nullptr) {
// Switch back direction
dir = false;
@ -839,20 +840,24 @@ void DepthFirstTraverser::_defInternalFunctions () {
TRI_edge_index_iterator_t it(TRI_EDGE_IN, startVertex.cid, startVertex.key);
edgeIndex = _edgeCols.at(eColIdx)->edgeIndex();
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
if (last == nullptr) {
dir = true;
TRI_edge_index_iterator_t it(TRI_EDGE_OUT, startVertex.cid, startVertex.key);
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
}
}
} else {
TRI_edge_index_iterator_t it(TRI_EDGE_IN, startVertex.cid, startVertex.key);
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
while (last == nullptr) {
// now change direction
dir = true;
TRI_edge_index_iterator_t it(TRI_EDGE_OUT, startVertex.cid, startVertex.key);
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
if (last == nullptr) {
// The other direction also has no further edges
dir = false;
@ -864,6 +869,7 @@ void DepthFirstTraverser::_defInternalFunctions () {
TRI_edge_index_iterator_t it(TRI_EDGE_IN, startVertex.cid, startVertex.key);
edgeIndex = _edgeCols.at(eColIdx)->edgeIndex();
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
}
}
}
@ -878,6 +884,7 @@ void DepthFirstTraverser::_defInternalFunctions () {
if (it != _expressions->end()) {
for (auto const& exp : it->second) {
if (exp->isEdgeAccess && ! exp->matchesCheck(tmp.back(), _edgeCols.at(eColIdx), _resolver)) {
++_filteredPaths;
// Retry with the next element
_getEdge(startVertex, edges, last, eColIdx, dir);
return;
@ -907,12 +914,14 @@ void DepthFirstTraverser::_defInternalFunctions () {
TRI_doc_mptr_copy_t mptr;
int res = _trx->readSingle(collection, &mptr, other.key);
++_readDocuments;
if (res != TRI_ERROR_NO_ERROR) {
// Vertex does not exist
_getEdge(startVertex, edges, last, eColIdx, dir);
return;
}
if (! exp->matchesCheck(mptr, collection->_collection->_collection, _resolver)) {
++_filteredPaths;
_getEdge(startVertex, edges, last, eColIdx, dir);
return;
}
@ -927,10 +936,10 @@ void DepthFirstTraverser::_defInternalFunctions () {
std::vector<TRI_doc_mptr_copy_t> tmp;
TRI_ASSERT(eColIdx < _edgeCols.size());
// Do not touch the bool parameter, as long as it is default the first encountered nullptr is final
// TODO fill Statistics
TRI_edge_index_iterator_t it(_opts.direction, startVertex.cid, startVertex.key);
triagens::arango::EdgeIndex* edgeIndex = _edgeCols.at(eColIdx)->edgeIndex();
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
while (last == nullptr) {
// This edge collection does not have any more edges for this vertex. Check the next one
++eColIdx;
@ -940,6 +949,7 @@ void DepthFirstTraverser::_defInternalFunctions () {
}
edgeIndex = _edgeCols.at(eColIdx)->edgeIndex();
edgeIndex->lookup(&it, tmp, last, 1);
++_readDocuments;
}
if (last != nullptr) {
// sth is stored in tmp. Now push it on edges
@ -952,6 +962,7 @@ void DepthFirstTraverser::_defInternalFunctions () {
if (it != _expressions->end()) {
for (auto const& exp : it->second) {
if (exp->isEdgeAccess && ! exp->matchesCheck(tmp.back(), _edgeCols.at(eColIdx), _resolver)) {
++_filteredPaths;
// Retry with the next element
_getEdge(startVertex, edges, last, eColIdx, dir);
return;
@ -981,12 +992,14 @@ void DepthFirstTraverser::_defInternalFunctions () {
TRI_doc_mptr_copy_t mptr;
int res = _trx->readSingle(collection, &mptr, other.key);
++_readDocuments;
if (res != TRI_ERROR_NO_ERROR) {
// Vertex does not exist
_getEdge(startVertex, edges, last, eColIdx, dir);
return;
}
if (! exp->matchesCheck(mptr, collection->_collection->_collection, _resolver)) {
++_filteredPaths;
_getEdge(startVertex, edges, last, eColIdx, dir);
return;
}

View File

@ -206,13 +206,13 @@ namespace triagens {
}
triagens::basics::Json* pathToJson (Transaction*,
CollectionNameResolver*) const override;
CollectionNameResolver*) override;
triagens::basics::Json* lastEdgeToJson (Transaction*,
CollectionNameResolver*) const override;
CollectionNameResolver*) override;
triagens::basics::Json* lastVertexToJson (Transaction*,
CollectionNameResolver*) const override;
CollectionNameResolver*) override;
private:
@ -222,11 +222,11 @@ namespace triagens {
triagens::basics::Json* edgeToJson (Transaction* trx,
CollectionNameResolver* resolver,
EdgeInfo const& e) const;
EdgeInfo const& e);
triagens::basics::Json* vertexToJson (Transaction* trx,
CollectionNameResolver* resolver,
VertexId const& v) const;
VertexId const& v);
// -----------------------------------------------------------------------------
// --SECTION-- private variables

View File

@ -75,7 +75,7 @@ namespace triagens {
}
std::string toString (CollectionNameResolver const* resolver) {
return resolver->getCollectionNameCluster(cid) + "/" + std::string(key);
return resolver->getCollectionNameCluster(cid) + "/" + std::string(key);
}
@ -159,7 +159,7 @@ namespace triagens {
/// @brief Constructor. This is an abstract only class.
////////////////////////////////////////////////////////////////////////////////
TraversalPath () {
TraversalPath () : _readDocuments(0) {
}
virtual ~TraversalPath () {
@ -175,22 +175,38 @@ namespace triagens {
////////////////////////////////////////////////////////////////////////////////
virtual triagens::basics::Json* pathToJson (Transaction*,
CollectionNameResolver*) const = 0;
CollectionNameResolver*) = 0;
////////////////////////////////////////////////////////////////////////////////
/// @brief Builds only the last edge on the path as Json
////////////////////////////////////////////////////////////////////////////////
virtual triagens::basics::Json* lastEdgeToJson (Transaction*,
CollectionNameResolver*) const = 0;
CollectionNameResolver*) = 0;
////////////////////////////////////////////////////////////////////////////////
/// @brief Builds only the last vertex as Json
////////////////////////////////////////////////////////////////////////////////
virtual triagens::basics::Json* lastVertexToJson (Transaction*,
CollectionNameResolver*) const = 0;
CollectionNameResolver*) = 0;
////////////////////////////////////////////////////////////////////////////////
/// @brief Gets the amount of read documents
////////////////////////////////////////////////////////////////////////////////
size_t getReadDocuments () const {
return _readDocuments;
}
protected:
////////////////////////////////////////////////////////////////////////////////
/// @brief Count how many documents have been read
////////////////////////////////////////////////////////////////////////////////
size_t _readDocuments;
};
// -----------------------------------------------------------------------------
@ -250,7 +266,9 @@ namespace triagens {
////////////////////////////////////////////////////////////////////////////////
Traverser ()
: _pruneNext(false),
: _readDocuments(0),
_filteredPaths(0),
_pruneNext(false),
_done(false),
_expressions(nullptr) {
}
@ -261,7 +279,9 @@ namespace triagens {
Traverser (TraverserOptions& opts,
std::unordered_map<size_t, std::vector<TraverserExpression*>> const* expressions)
: _pruneNext(false),
: _readDocuments(0),
_filteredPaths(0),
_pruneNext(false),
_done(false),
_opts(opts),
_expressions(expressions) {
@ -287,7 +307,7 @@ namespace triagens {
size_t skip (size_t amount) {
size_t skipped = 0;
for (size_t i = 0; i < amount; ++i) {
std::unique_ptr<const TraversalPath> p(next());
std::unique_ptr<TraversalPath> p(next());
if (p == nullptr) {
_done = true;
break;
@ -303,6 +323,26 @@ namespace triagens {
virtual TraversalPath* next () = 0;
////////////////////////////////////////////////////////////////////////////////
/// @brief Get the number of filtered paths
////////////////////////////////////////////////////////////////////////////////
size_t getAndResetFilteredPaths () {
size_t tmp = _filteredPaths;
_filteredPaths = 0;
return tmp;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief Get the number of documents loaded
////////////////////////////////////////////////////////////////////////////////
size_t getAndResetReadDocuments () {
size_t tmp = _readDocuments;
_readDocuments = 0;
return tmp;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief Prune the current path prefix. Do not evaluate it any further.
////////////////////////////////////////////////////////////////////////////////
@ -323,6 +363,18 @@ namespace triagens {
protected:
////////////////////////////////////////////////////////////////////////////////
/// @brief counter for all read documents
////////////////////////////////////////////////////////////////////////////////
size_t _readDocuments;
////////////////////////////////////////////////////////////////////////////////
/// @brief counter for all filtered paths
////////////////////////////////////////////////////////////////////////////////
size_t _filteredPaths;
////////////////////////////////////////////////////////////////////////////////
/// @brief toggle if this path should be pruned on next step
////////////////////////////////////////////////////////////////////////////////

View File

@ -518,7 +518,7 @@
};
};
var potentialErrorsSuite = function () {
function potentialErrorsSuite () {
var vc, ec;
return {
@ -671,7 +671,7 @@
};
};
var complexInternaSuite = function () {
function complexInternaSuite () {
return {
@ -800,7 +800,7 @@
};
var complexFilteringSuite = function() {
function complexFilteringSuite () {
/***********************************************************************
* Graph under test:
@ -854,55 +854,206 @@
var query = "FOR v, e, p IN 100 OUTBOUND @start @@eCol FILTER p.vertices[1]._key == 'wrong' RETURN v";
var bindVars = {
"@eCol": en,
"start": vertex.A
"start": vertex.Tri1
};
var result = db._query(query, bindVars).toArray();
assertEqual(result.length, 0);
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 0);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary (Tri1)
// 1 Edge (Tri1->Tri2)
// 1 Primary (Tri2)
assertEqual(stats.scannedIndex, 3);
assertEqual(stats.filtered, 1);
},
testStartVertexEarlyPruneHighDepth: function () {
var query = "FOR v, e, p IN 100 OUTBOUND @start @@eCol FILTER p.vertices[0]._key == 'wrong' RETURN v";
var bindVars = {
"@eCol": en,
"start": vertex.A
"start": vertex.Tri1
};
var result = db._query(query, bindVars).toArray();
assertEqual(result.length, 0);
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 0);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary (Tri1)
assertEqual(stats.scannedIndex, 1);
assertEqual(stats.filtered, 1);
},
testEdgesEarlyPruneHighDepth: function () {
var query = "FOR v, e, p IN 100 OUTBOUND @start @@eCol FILTER p.edges[0]._key == 'wrong' RETURN v";
var bindVars = {
"@eCol": en,
"start": vertex.A
"start": vertex.Tri1
};
var result = db._query(query, bindVars).toArray();
assertEqual(result.length, 0);
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 0);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary (Tri1)
// 1 Edge (Tri1->Tri2)
assertEqual(stats.scannedIndex, 2);
assertEqual(stats.filtered, 1);
},
testVertexLevel0: function () {
var query = `FOR v, e, p IN 1..2 OUTBOUND @start @@ecol
FILTER p.vertices[0].left == true
RETURN v`;
SORT v._key
RETURN v._key`;
var bindVars = {
"@ecol": en,
start: vertex.A
};
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 0);
assertEqual(cursor.getExtra().stats.scannedFull, 0);
assertEqual(cursor.getExtra().stats.scannedIndex, 2);
assertEqual(cursor.getExtra().stats.filtered, 1);
}
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary (A)
// 0 Edge
assertEqual(stats.scannedIndex, 1);
// 1 Filter (A)
assertEqual(stats.filtered, 1);
},
testVertexLevel1: function () {
var query = `FOR v, e, p IN 1..2 OUTBOUND @start @@ecol
FILTER p.vertices[1].left == true
SORT v._key
RETURN v._key`;
var bindVars = {
"@ecol": en,
start: vertex.A
};
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 3);
assertEqual(cursor.toArray(), ["B", "C", "F"]);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary lookup A
// 2 Edge Lookups (A)
// 2 Primary lookup B,D
// 2 Edge Lookups (2 B) (0 D)
// 2 Primary Lookups (C, F)
assertEqual(stats.scannedIndex, 9);
// 1 Filter On D
assertEqual(stats.filtered, 1);
},
testVertexLevel2: function () {
var query = `FOR v, e, p IN 1..2 OUTBOUND @start @@ecol
FILTER p.vertices[2].left == true
SORT v._key
RETURN v._key`;
var bindVars = {
"@ecol": en,
start: vertex.A
};
var cursor = db._query(query, bindVars);
// We expect to find C, F
// B and D will be post filtered
assertEqual(cursor.count(), 2);
assertEqual(cursor.toArray(), ["C", "F"]);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary lookup A
// 2 Edge Lookups (A)
// 2 Primary lookup B,D
// 4 Edge Lookups (2 B) (2 D)
// 4 Primary Lookups (C, F, E, G)
assertEqual(stats.scannedIndex, 13);
// 2 Filter (E, G)
assertEqual(stats.filtered, 2);
},
testVertexLevelsCombined: function () {
var query = `FOR v, e, p IN 1..2 OUTBOUND @start @@ecol
FILTER p.vertices[1].right == true
FILTER p.vertices[2].left == true
SORT v._key
RETURN v._key`;
var bindVars = {
"@ecol": en,
start: vertex.A
};
var cursor = db._query(query, bindVars);
// Everything should be filtered, no results
assertEqual(cursor.count(), 0);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary lookup A
// 2 Edge Lookups (A)
// 2 Primary lookup B,D
// 2 Edge Lookups (0 B) (2 D)
// 2 Primary Lookups (E, G)
assertEqual(stats.scannedIndex, 9);
// 1 Filter (B)
// 2 Filter (E, G)
assertEqual(stats.filtered, 3);
},
testEdgeLevel0: function () {
var query = `FOR v, e, p IN 1..2 OUTBOUND @start @@ecol
FILTER p.edges[0].left == true
SORT v._key
RETURN v._key`;
var bindVars = {
"@ecol": en,
start: vertex.A
};
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 3);
assertEqual(cursor.toArray(), ["B", "C", "F"]);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary (A)
// 2 Edge
// 1 Primary (B)
// 2 Edge
// 2 Primary (C,F)
assertEqual(stats.scannedIndex, 8);
// 1 Filter (A->D)
assertEqual(stats.filtered, 1);
},
testEdgeLevel1: function () {
var query = `FOR v, e, p IN 1..2 OUTBOUND @start @@ecol
FILTER p.edges[1].left == true
SORT v._key
RETURN v._key`;
var bindVars = {
"@ecol": en,
start: vertex.A
};
var cursor = db._query(query, bindVars);
assertEqual(cursor.count(), 4);
assertEqual(cursor.toArray(), ["B", "C", "D", "F"]);
var stats = cursor.getExtra().stats;
assertEqual(stats.scannedFull, 0);
// 1 Primary lookup A
// 2 Edge Lookups (A)
// 2 Primary lookup B,D
// 2 Edge Lookups (2 B) (2 D)
// 2 Primary Lookups (C, F)
assertEqual(stats.scannedIndex, 9);
// 2 Filter On (D->E, D->G)
assertEqual(stats.filtered, 2);
},
};
};
/*
jsunity.run(namedGraphSuite);
jsunity.run(multiCollectionGraphSuite);
jsunity.run(potentialErrorsSuite);
jsunity.run(complexInternaSuite);
*/
jsunity.run(complexFilteringSuite);
return jsunity.done();