mirror of https://gitee.com/bigwinds/arangodb
small optimizations
This commit is contained in:
parent
4c7994597a
commit
3bb5eb1a9d
|
@ -266,7 +266,7 @@ void AqlItemBlock::shrink (size_t nrItems) {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void AqlItemBlock::clearRegisters (std::unordered_set<RegisterId> const& toClear) {
|
||||
for (auto reg : toClear) {
|
||||
for (auto const& reg : toClear) {
|
||||
for (size_t i = 0; i < _nrItems; i++) {
|
||||
AqlValue& a(_data[_nrRegs * i + reg]);
|
||||
|
||||
|
|
|
@ -73,8 +73,8 @@ AggregatorGroup::AggregatorGroup (bool count)
|
|||
|
||||
AggregatorGroup::~AggregatorGroup () {
|
||||
//reset();
|
||||
for (auto it = groupBlocks.begin(); it != groupBlocks.end(); ++it) {
|
||||
delete (*it);
|
||||
for (auto& it : groupBlocks) {
|
||||
delete it;
|
||||
}
|
||||
for (auto& it : groupValues) {
|
||||
it.destroy();
|
||||
|
@ -105,8 +105,8 @@ void AggregatorGroup::initialize (size_t capacity) {
|
|||
}
|
||||
|
||||
void AggregatorGroup::reset () {
|
||||
for (auto it = groupBlocks.begin(); it != groupBlocks.end(); ++it) {
|
||||
delete (*it);
|
||||
for (auto& it : groupBlocks) {
|
||||
delete it;
|
||||
}
|
||||
|
||||
groupBlocks.clear();
|
||||
|
@ -187,8 +187,8 @@ ExecutionBlock::ExecutionBlock (ExecutionEngine* engine,
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ExecutionBlock::~ExecutionBlock () {
|
||||
for (auto it = _buffer.begin(); it != _buffer.end(); ++it) {
|
||||
delete *it;
|
||||
for (auto& it : _buffer) {
|
||||
delete it;
|
||||
}
|
||||
|
||||
_buffer.clear();
|
||||
|
@ -204,7 +204,7 @@ ExecutionBlock::~ExecutionBlock () {
|
|||
|
||||
size_t ExecutionBlock::countBlocksRows (std::vector<AqlItemBlock*> const& blocks) const {
|
||||
size_t count = 0;
|
||||
for (auto it : blocks) {
|
||||
for (auto const& it : blocks) {
|
||||
count += it->size();
|
||||
}
|
||||
return count;
|
||||
|
@ -223,14 +223,15 @@ bool ExecutionBlock::removeDependency (ExecutionBlock* ep) {
|
|||
}
|
||||
|
||||
int ExecutionBlock::initializeCursor (AqlItemBlock* items, size_t pos) {
|
||||
for (auto d : _dependencies) {
|
||||
for (auto& d : _dependencies) {
|
||||
int res = d->initializeCursor(items, pos);
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
for (auto it : _buffer) {
|
||||
for (auto& it : _buffer) {
|
||||
delete it;
|
||||
}
|
||||
_buffer.clear();
|
||||
|
@ -272,7 +273,7 @@ bool ExecutionBlock::walk (WalkerWorker<ExecutionBlock>* worker) {
|
|||
}
|
||||
|
||||
// Now the children in their natural order:
|
||||
for (auto c : _dependencies) {
|
||||
for (auto& c : _dependencies) {
|
||||
if (c->walk(worker)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -299,6 +300,7 @@ bool ExecutionBlock::walk (WalkerWorker<ExecutionBlock>* worker) {
|
|||
int ExecutionBlock::initialize () {
|
||||
for (auto it = _dependencies.begin(); it != _dependencies.end(); ++it) {
|
||||
int res = (*it)->initialize();
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
|
@ -313,8 +315,8 @@ int ExecutionBlock::initialize () {
|
|||
int ExecutionBlock::shutdown (int errorCode) {
|
||||
int ret = TRI_ERROR_NO_ERROR;
|
||||
|
||||
for (auto it = _buffer.begin(); it != _buffer.end(); ++it) {
|
||||
delete *it;
|
||||
for (auto& it : _buffer) {
|
||||
delete it;
|
||||
}
|
||||
_buffer.clear();
|
||||
|
||||
|
@ -571,8 +573,8 @@ bool ExecutionBlock::hasMore () {
|
|||
|
||||
int64_t ExecutionBlock::remaining () {
|
||||
int64_t sum = 0;
|
||||
for (auto it = _buffer.begin(); it != _buffer.end(); ++it) {
|
||||
sum += (*it)->size();
|
||||
for (auto const& it : _buffer) {
|
||||
sum += it->size();
|
||||
}
|
||||
return sum + _dependencies[0]->remaining();
|
||||
}
|
||||
|
@ -593,7 +595,7 @@ int ExecutionBlock::getOrSkipSome (size_t atLeast,
|
|||
std::vector<AqlItemBlock*> collector;
|
||||
|
||||
auto freeCollector = [&collector]() {
|
||||
for (auto x : collector) {
|
||||
for (auto& x : collector) {
|
||||
delete x;
|
||||
}
|
||||
collector.clear();
|
||||
|
@ -722,7 +724,7 @@ int SingletonBlock::initializeCursor (AqlItemBlock* items,
|
|||
// build a whitelist with all the registers that we will copy from above
|
||||
std::unordered_set<RegisterId> whitelist;
|
||||
|
||||
for (auto it : varsUsedLater) {
|
||||
for (auto const& it : varsUsedLater) {
|
||||
auto it2 = registerPlan.find(it->id);
|
||||
|
||||
if (it2 != registerPlan.end()) {
|
||||
|
@ -1077,7 +1079,7 @@ IndexRangeBlock::IndexRangeBlock (ExecutionEngine* engine,
|
|||
for (size_t i = 0; i < n; i++) {
|
||||
_condition->emplace_back(IndexAndCondition());
|
||||
|
||||
for (auto const& ri: en->_ranges[i]) {
|
||||
for (auto const& ri : en->_ranges[i]) {
|
||||
_condition->at(i).emplace_back(ri.clone());
|
||||
}
|
||||
}
|
||||
|
@ -1107,7 +1109,7 @@ IndexRangeBlock::IndexRangeBlock (ExecutionEngine* engine,
|
|||
IndexRangeBlock::~IndexRangeBlock () {
|
||||
destroyHashIndexSearchValues();
|
||||
|
||||
for (auto e : _allVariableBoundExpressions) {
|
||||
for (auto& e : _allVariableBoundExpressions) {
|
||||
delete e;
|
||||
}
|
||||
|
||||
|
@ -1128,7 +1130,7 @@ bool IndexRangeBlock::useHighBounds () const {
|
|||
}
|
||||
|
||||
bool IndexRangeBlock::hasV8Expression () const {
|
||||
for (auto expression : _allVariableBoundExpressions) {
|
||||
for (auto const& expression : _allVariableBoundExpressions) {
|
||||
TRI_ASSERT(expression != nullptr);
|
||||
|
||||
if (expression->isV8()) {
|
||||
|
@ -1313,7 +1315,7 @@ void IndexRangeBlock::buildExpressions () {
|
|||
newCondition.reset(indexAnds.release());
|
||||
}
|
||||
else {
|
||||
for (auto const& indexAnd: *indexAnds) {
|
||||
for (auto const& indexAnd : *indexAnds) {
|
||||
newCondition->emplace_back(std::move(indexAnd));
|
||||
}
|
||||
}
|
||||
|
@ -1373,7 +1375,7 @@ int IndexRangeBlock::initialize () {
|
|||
|
||||
std::unordered_set<Variable*>&& inVars = expression->variables();
|
||||
|
||||
for (auto v : inVars) {
|
||||
for (auto const& v : inVars) {
|
||||
inVarsCur.emplace_back(v);
|
||||
auto it = getPlanNode()->getRegisterPlan()->varInfo.find(v->id);
|
||||
TRI_ASSERT(it != getPlanNode()->getRegisterPlan()->varInfo.end());
|
||||
|
@ -1405,7 +1407,7 @@ int IndexRangeBlock::initialize () {
|
|||
}
|
||||
}
|
||||
catch (...) {
|
||||
for (auto e : _allVariableBoundExpressions) {
|
||||
for (auto& e : _allVariableBoundExpressions) {
|
||||
delete e;
|
||||
}
|
||||
_allVariableBoundExpressions.clear();
|
||||
|
|
|
@ -644,7 +644,7 @@ struct CoordinatorInstanciator : public WalkerWorker<ExecutionNode> {
|
|||
TRI_ASSERT(cc != nullptr);
|
||||
|
||||
// iterate over all shards of the collection
|
||||
for (auto & shardId : collection->shardIds()) {
|
||||
for (auto& shardId : collection->shardIds()) {
|
||||
// inject the current shard id into the collection
|
||||
collection->setCurrentShard(shardId);
|
||||
auto jsonPlan = generatePlanForOneShard(info, connectedId, shardId, true);
|
||||
|
|
|
@ -300,7 +300,7 @@ ExecutionNode::ExecutionNode (ExecutionPlan* plan,
|
|||
_regsToClear.reserve(len);
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
RegisterId oneRegToClear = JsonHelper::getNumericValue<RegisterId>(jsonRegsToClearList.at(i).json(), 0);
|
||||
_regsToClear.insert(oneRegToClear);
|
||||
_regsToClear.emplace(oneRegToClear);
|
||||
}
|
||||
|
||||
auto allVars = plan->getAst()->variables();
|
||||
|
@ -320,7 +320,7 @@ ExecutionNode::ExecutionNode (ExecutionPlan* plan,
|
|||
std::string errmsg = "varsUsedLater: ID not found in all-array: " + StringUtils::itoa(oneVarUsedLater->id);
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_NOT_IMPLEMENTED, errmsg);
|
||||
}
|
||||
_varsUsedLater.insert(oneVariable);
|
||||
_varsUsedLater.emplace(oneVariable);
|
||||
}
|
||||
|
||||
auto jsonvarsValidList = json.get("varsValid");
|
||||
|
@ -339,7 +339,7 @@ ExecutionNode::ExecutionNode (ExecutionPlan* plan,
|
|||
std::string errmsg = "varsValid: ID not found in all-array: " + StringUtils::itoa(oneVarValid->id);
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_NOT_IMPLEMENTED, errmsg);
|
||||
}
|
||||
_varsValid.insert(oneVariable);
|
||||
_varsValid.emplace(oneVariable);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -379,7 +379,7 @@ void ExecutionNode::CloneHelper (ExecutionNode* other,
|
|||
for (auto const& orgVar: _varsUsedLater) {
|
||||
auto var = allVars->getVariable(orgVar->id);
|
||||
TRI_ASSERT(var != nullptr);
|
||||
other->_varsUsedLater.insert(var);
|
||||
other->_varsUsedLater.emplace(var);
|
||||
}
|
||||
|
||||
other->_varsValid.reserve(_varsValid.size());
|
||||
|
@ -387,7 +387,7 @@ void ExecutionNode::CloneHelper (ExecutionNode* other,
|
|||
for (auto const& orgVar: _varsValid) {
|
||||
auto var = allVars->getVariable(orgVar->id);
|
||||
TRI_ASSERT(var != nullptr);
|
||||
other->_varsValid.insert(var);
|
||||
other->_varsValid.emplace(var);
|
||||
}
|
||||
|
||||
if (_registerPlan.get() != nullptr) {
|
||||
|
@ -1119,7 +1119,7 @@ void ExecutionNode::RegisterPlan::after (ExecutionNode *en) {
|
|||
auto it2 = varInfo.find(v->id);
|
||||
TRI_ASSERT(it2 != varInfo.end());
|
||||
RegisterId r = it2->second.registerId;
|
||||
regsToClear.insert(r);
|
||||
regsToClear.emplace(r);
|
||||
}
|
||||
}
|
||||
en->setRegsToClear(regsToClear);
|
||||
|
@ -1504,7 +1504,7 @@ ExecutionNode* IndexRangeNode::clone (ExecutionPlan* plan,
|
|||
for (size_t i = 0; i < _ranges.size(); i++){
|
||||
ranges.emplace_back(std::vector<RangeInfo>());
|
||||
|
||||
for (auto const& x: _ranges.at(i)) {
|
||||
for (auto const& x : _ranges.at(i)) {
|
||||
ranges.at(i).emplace_back(x);
|
||||
}
|
||||
}
|
||||
|
@ -1739,7 +1739,7 @@ std::vector<Variable const*> IndexRangeNode::getVariablesUsedHere () const {
|
|||
AstNode const* a = b.getExpressionAst(_plan->getAst());
|
||||
std::unordered_set<Variable*> vars = Ast::getReferencedVariables(a);
|
||||
for (auto const& vv : vars) {
|
||||
s.insert(vv);
|
||||
s.emplace(vv);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1982,7 +1982,7 @@ struct SubqueryVarUsageFinder : public WalkerWorker<ExecutionNode> {
|
|||
// Add variables used here to _usedLater:
|
||||
auto&& usedHere = en->getVariablesUsedHere();
|
||||
for (auto const& v : usedHere) {
|
||||
_usedLater.insert(v);
|
||||
_usedLater.emplace(v);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -1991,7 +1991,7 @@ struct SubqueryVarUsageFinder : public WalkerWorker<ExecutionNode> {
|
|||
// Add variables set here to _valid:
|
||||
auto&& setHere = en->getVariablesSetHere();
|
||||
for (auto const& v : setHere) {
|
||||
_valid.insert(v);
|
||||
_valid.emplace(v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2005,7 +2005,7 @@ struct SubqueryVarUsageFinder : public WalkerWorker<ExecutionNode> {
|
|||
// create the set difference. note: cannot use std::set_difference as our sets are NOT sorted
|
||||
for (auto it = subfinder._usedLater.begin(); it != subfinder._usedLater.end(); ++it) {
|
||||
if (_valid.find(*it) != _valid.end()) {
|
||||
_usedLater.insert((*it));
|
||||
_usedLater.emplace((*it));
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
@ -2482,12 +2482,12 @@ std::vector<Variable const*> AggregateNode::getVariablesUsedHere () const {
|
|||
myselfAsNonConst->walk(&finder);
|
||||
}
|
||||
for (auto& x : finder.userVars) {
|
||||
v.insert(x);
|
||||
v.emplace(x);
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (auto& x : _keepVariables) {
|
||||
v.insert(x);
|
||||
v.emplace(x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -324,7 +324,7 @@ namespace triagens {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void removeDependencies () {
|
||||
for (auto x : _dependencies) {
|
||||
for (auto& x : _dependencies) {
|
||||
for (auto it = x->_parents.begin();
|
||||
it != x->_parents.end();
|
||||
++it) {
|
||||
|
@ -381,7 +381,7 @@ namespace triagens {
|
|||
void invalidateCost () {
|
||||
_estimatedCostSet = false;
|
||||
|
||||
for (auto dep : _dependencies) {
|
||||
for (auto& dep : _dependencies) {
|
||||
dep->invalidateCost();
|
||||
}
|
||||
}
|
||||
|
@ -1514,7 +1514,7 @@ namespace triagens {
|
|||
std::vector<Variable const*> v;
|
||||
v.reserve(vars.size());
|
||||
|
||||
for (auto vv : vars) {
|
||||
for (auto& vv : vars) {
|
||||
v.emplace_back(vv);
|
||||
}
|
||||
|
||||
|
@ -1930,7 +1930,9 @@ namespace triagens {
|
|||
|
||||
std::vector<Variable const*> getVariablesUsedHere () const override final {
|
||||
std::vector<Variable const*> v;
|
||||
for (auto p : _elements) {
|
||||
v.reserve(_elements.size());
|
||||
|
||||
for (auto& p : _elements) {
|
||||
v.emplace_back(p.first);
|
||||
}
|
||||
return v;
|
||||
|
@ -2185,7 +2187,7 @@ namespace triagens {
|
|||
size_t const n = _aggregateVariables.size() + (_outVariable == nullptr ? 0 : 1);
|
||||
v.reserve(n);
|
||||
|
||||
for (auto p : _aggregateVariables) {
|
||||
for (auto const& p : _aggregateVariables) {
|
||||
v.emplace_back(p.first);
|
||||
}
|
||||
if (_outVariable != nullptr) {
|
||||
|
@ -3532,7 +3534,7 @@ namespace triagens {
|
|||
|
||||
std::vector<Variable const*> getVariablesUsedHere () const override final {
|
||||
std::vector<Variable const*> v;
|
||||
for (auto p : _elements) {
|
||||
for (auto const& p : _elements) {
|
||||
v.emplace_back(p.first);
|
||||
}
|
||||
return v;
|
||||
|
|
|
@ -69,7 +69,7 @@ ExecutionPlan::ExecutionPlan (Ast* ast)
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ExecutionPlan::~ExecutionPlan () {
|
||||
for (auto x : _ids){
|
||||
for (auto& x : _ids){
|
||||
delete x.second;
|
||||
}
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ ExecutionPlan* ExecutionPlan::instanciateFromJson (Ast* ast,
|
|||
ExecutionPlan* ExecutionPlan::clone (Query& onThatQuery) {
|
||||
std::unique_ptr<ExecutionPlan> otherPlan(new ExecutionPlan(onThatQuery.ast()));
|
||||
|
||||
for (auto it: _ids) {
|
||||
for (auto const& it: _ids) {
|
||||
otherPlan->registerNode(it.second->clone(otherPlan.get(), false, true));
|
||||
}
|
||||
|
||||
|
@ -171,7 +171,7 @@ triagens::basics::Json ExecutionPlan::toJson (Ast* ast,
|
|||
auto const&& appliedRules = Optimizer::translateRules(_appliedRules);
|
||||
triagens::basics::Json rules(Json::Array, appliedRules.size());
|
||||
|
||||
for (auto r : appliedRules) {
|
||||
for (auto const& r : appliedRules) {
|
||||
rules.add(triagens::basics::Json(r));
|
||||
}
|
||||
result.set("rules", rules);
|
||||
|
@ -179,7 +179,7 @@ triagens::basics::Json ExecutionPlan::toJson (Ast* ast,
|
|||
auto usedCollections = *ast->query()->collections()->collections();
|
||||
triagens::basics::Json jsonCollectionList(Json::Array, usedCollections.size());
|
||||
|
||||
for (auto c : usedCollections) {
|
||||
for (auto const& c : usedCollections) {
|
||||
Json json(Json::Object);
|
||||
|
||||
jsonCollectionList(json("name", Json(c.first))
|
||||
|
@ -275,7 +275,7 @@ ModificationOptions ExecutionPlan::createModificationOptions (AstNode const* nod
|
|||
|
||||
auto const collections = _ast->query()->collections();
|
||||
|
||||
for (auto it : *(collections->collections())) {
|
||||
for (auto const& it : *(collections->collections())) {
|
||||
if (it.second->isReadWrite) {
|
||||
isReadWrite = true;
|
||||
break;
|
||||
|
@ -615,8 +615,8 @@ ExecutionNode* ExecutionPlan::fromNodeSort (ExecutionNode* previous,
|
|||
}
|
||||
catch (...) {
|
||||
// prevent memleak
|
||||
for (auto it = temp.begin(); it != temp.end(); ++it) {
|
||||
delete (*it);
|
||||
for (auto& it : temp) {
|
||||
delete it;
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
@ -1436,8 +1436,8 @@ struct VarUsageFinder : public WalkerWorker<ExecutionNode> {
|
|||
en->setVarsUsedLater(_usedLater);
|
||||
// Add variables used here to _usedLater:
|
||||
auto&& usedHere = en->getVariablesUsedHere();
|
||||
for (auto v : usedHere) {
|
||||
_usedLater.insert(v);
|
||||
for (auto& v : usedHere) {
|
||||
_usedLater.emplace(v);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -1445,8 +1445,8 @@ struct VarUsageFinder : public WalkerWorker<ExecutionNode> {
|
|||
void after (ExecutionNode* en) override final {
|
||||
// Add variables set here to _valid:
|
||||
auto&& setHere = en->getVariablesSetHere();
|
||||
for (auto v : setHere) {
|
||||
_valid.insert(v);
|
||||
for (auto& v : setHere) {
|
||||
_valid.emplace(v);
|
||||
_varSetBy->emplace(std::make_pair(v->id, en));
|
||||
}
|
||||
en->setVarsValid(_valid);
|
||||
|
@ -1488,7 +1488,7 @@ bool ExecutionPlan::varUsageComputed () const {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void ExecutionPlan::unlinkNodes (std::unordered_set<ExecutionNode*>& toRemove) {
|
||||
for (auto x : toRemove) {
|
||||
for (auto& x : toRemove) {
|
||||
unlinkNode(x);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -223,7 +223,7 @@ void Expression::replaceVariables (std::unordered_map<VariableId, Variable const
|
|||
_node = _ast->clone(_node);
|
||||
TRI_ASSERT(_node != nullptr);
|
||||
|
||||
_ast->replaceVariables(const_cast<AstNode*>(_node), replacements);
|
||||
_node = _ast->replaceVariables(const_cast<AstNode*>(_node), replacements);
|
||||
invalidate();
|
||||
}
|
||||
|
||||
|
|
|
@ -145,7 +145,6 @@ int Optimizer::createPlans (ExecutionPlan* plan,
|
|||
// which optimizer rules are disabled?
|
||||
std::unordered_set<int> const&& disabledIds = getDisabledRuleIds(rulesSpecification);
|
||||
|
||||
|
||||
// _plans contains the previous optimisation result
|
||||
_plans.clear();
|
||||
try {
|
||||
|
@ -160,7 +159,7 @@ int Optimizer::createPlans (ExecutionPlan* plan,
|
|||
|
||||
while (leastDoneLevel < maxRuleLevel) {
|
||||
// Find variable usage for all old plans now:
|
||||
for (auto p : _plans.list) {
|
||||
for (auto& p : _plans.list) {
|
||||
if (! p->varUsageComputed()) {
|
||||
p->findVarUsage();
|
||||
}
|
||||
|
@ -249,7 +248,7 @@ int Optimizer::createPlans (ExecutionPlan* plan,
|
|||
|
||||
_plans.steal(_newPlans);
|
||||
leastDoneLevel = maxRuleLevel;
|
||||
for (auto l : _plans.levelDone) {
|
||||
for (auto const& l : _plans.levelDone) {
|
||||
if (l < leastDoneLevel) {
|
||||
leastDoneLevel = l;
|
||||
}
|
||||
|
@ -293,8 +292,9 @@ int Optimizer::createPlans (ExecutionPlan* plan,
|
|||
std::vector<std::string> Optimizer::translateRules (std::vector<int> const& rules) {
|
||||
std::vector<std::string> names;
|
||||
|
||||
for (auto r : rules) {
|
||||
for (auto const& r : rules) {
|
||||
auto it = _rules.find(r);
|
||||
|
||||
if (it != _rules.end()) {
|
||||
names.emplace_back((*it).second.name);
|
||||
}
|
||||
|
@ -311,7 +311,7 @@ std::vector<std::string> Optimizer::translateRules (std::vector<int> const& rule
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void Optimizer::estimatePlans () {
|
||||
for (auto p : _plans.list) {
|
||||
for (auto& p : _plans.list) {
|
||||
p->getCost();
|
||||
// this value is cached in the plan, so formally this step is
|
||||
// unnecessary, but for the sake of cleanliness...
|
||||
|
@ -336,20 +336,21 @@ std::unordered_set<int> Optimizer::getDisabledRuleIds (std::vector<std::string>
|
|||
std::unordered_set<int> disabled;
|
||||
|
||||
// lookup ids of all disabled rules
|
||||
for (auto name : names) {
|
||||
for (auto const& name : names) {
|
||||
if (name[0] == '-') {
|
||||
// disable rule
|
||||
if (name == "-all") {
|
||||
// disable all rules
|
||||
for (auto it : _rules) {
|
||||
disabled.insert(it.first);
|
||||
for (auto const& it : _rules) {
|
||||
disabled.emplace(it.first);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// disable a specific rule
|
||||
auto it = _ruleLookup.find(std::string(name.c_str() + 1));
|
||||
|
||||
if (it != _ruleLookup.end()) {
|
||||
disabled.insert((*it).second);
|
||||
disabled.emplace((*it).second);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -361,6 +362,7 @@ std::unordered_set<int> Optimizer::getDisabledRuleIds (std::vector<std::string>
|
|||
}
|
||||
else {
|
||||
auto it = _ruleLookup.find(std::string(name.c_str() + 1));
|
||||
|
||||
if (it != _ruleLookup.end()) {
|
||||
disabled.erase((*it).second);
|
||||
}
|
||||
|
|
|
@ -302,7 +302,7 @@ namespace triagens {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
~PlanList () {
|
||||
for (auto p : list) {
|
||||
for (auto& p : list) {
|
||||
delete p;
|
||||
}
|
||||
}
|
||||
|
@ -312,7 +312,7 @@ namespace triagens {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool isContained (ExecutionPlan* plan) const {
|
||||
for (auto p : list) {
|
||||
for (auto const& p : list) {
|
||||
if (p == plan) {
|
||||
return true;
|
||||
}
|
||||
|
@ -379,7 +379,7 @@ namespace triagens {
|
|||
void steal (PlanList& b) {
|
||||
list.swap(b.list);
|
||||
levelDone.swap(b.levelDone);
|
||||
for (auto p : b.list) {
|
||||
for (auto& p : b.list) {
|
||||
delete p;
|
||||
}
|
||||
b.list.clear();
|
||||
|
@ -411,7 +411,7 @@ namespace triagens {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void clear () {
|
||||
for (auto p : list) {
|
||||
for (auto& p : list) {
|
||||
delete p;
|
||||
}
|
||||
list.clear();
|
||||
|
|
|
@ -110,27 +110,27 @@ int triagens::aql::removeRedundantSortsRule (Optimizer* opt,
|
|||
}
|
||||
|
||||
// remove sort that is a direct predecessor of a sort
|
||||
toUnlink.insert(current);
|
||||
toUnlink.emplace(current);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SortInformation::otherLessAccurate: {
|
||||
toUnlink.insert(current);
|
||||
toUnlink.emplace(current);
|
||||
break;
|
||||
}
|
||||
|
||||
case SortInformation::ourselvesLessAccurate: {
|
||||
// the sort at the start of the pipeline makes the sort at the end
|
||||
// superfluous, so we'll remove it
|
||||
toUnlink.insert(n);
|
||||
toUnlink.emplace(n);
|
||||
break;
|
||||
}
|
||||
|
||||
case SortInformation::allEqual: {
|
||||
// the sort at the end of the pipeline makes the sort at the start
|
||||
// superfluous, so we'll remove it
|
||||
toUnlink.insert(current);
|
||||
toUnlink.emplace(current);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ int triagens::aql::removeRedundantSortsRule (Optimizer* opt,
|
|||
sortNode->simplify(plan)) {
|
||||
// sort node had only constant expressions. it will make no difference if we execute it or not
|
||||
// so we can remove it
|
||||
toUnlink.insert(n);
|
||||
toUnlink.emplace(n);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -239,7 +239,7 @@ int triagens::aql::removeUnnecessaryFiltersRule (Optimizer* opt,
|
|||
if (root->isTrue()) {
|
||||
// filter is always true
|
||||
// remove filter node and merge with following node
|
||||
toUnlink.insert(n);
|
||||
toUnlink.emplace(n);
|
||||
modified = true;
|
||||
}
|
||||
else if (root->isFalse()) {
|
||||
|
@ -708,7 +708,7 @@ class PropagateConstantAttributesHelper {
|
|||
|
||||
if (it2 == (*it).second.end()) {
|
||||
// first value for the attribute
|
||||
(*it).second.insert(std::make_pair(name, value));
|
||||
(*it).second.emplace(std::make_pair(name, value));
|
||||
}
|
||||
else {
|
||||
auto previous = (*it2).second;
|
||||
|
@ -1712,7 +1712,7 @@ int triagens::aql::removeUnnecessaryCalculationsRule (Optimizer* opt,
|
|||
// The variable whose value is calculated here is not used at
|
||||
// all further down the pipeline! We remove the whole
|
||||
// calculation node,
|
||||
toUnlink.insert(n);
|
||||
toUnlink.emplace(n);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2053,7 +2053,7 @@ class FilterToEnumCollFinder : public WalkerWorker<ExecutionNode> {
|
|||
_changesPlaces(changesPlaces),
|
||||
_changes(changes) {
|
||||
|
||||
_varIds.insert(var->id);
|
||||
_varIds.emplace(var->id);
|
||||
};
|
||||
|
||||
~FilterToEnumCollFinder () {
|
||||
|
@ -2775,7 +2775,7 @@ public:
|
|||
// only remove a node once, otherwise this might cause follow up failures
|
||||
if (removedNodes.find(sortNodeID) == removedNodes.end()) {
|
||||
newPlan->unlinkNode(newPlan->getNodeById(sortNodeID));
|
||||
removedNodes.insert(sortNodeID);
|
||||
removedNodes.emplace(sortNodeID);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -3204,8 +3204,8 @@ int triagens::aql::removeFiltersCoveredByIndexRule (Optimizer* opt,
|
|||
for (auto const& it : ranges) {
|
||||
for (auto it2 : it) {
|
||||
if (condition.isFullyCoveredBy(it2)) {
|
||||
toUnlink.insert(setter);
|
||||
toUnlink.insert(n);
|
||||
toUnlink.emplace(setter);
|
||||
toUnlink.emplace(n);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -3285,7 +3285,7 @@ int triagens::aql::interchangeAdjacentEnumerationsRule (Optimizer* opt,
|
|||
std::unordered_set<ExecutionNode*> nodesSet;
|
||||
for (auto const& n : nodes) {
|
||||
TRI_ASSERT(nodesSet.find(n) == nodesSet.end());
|
||||
nodesSet.insert(n);
|
||||
nodesSet.emplace(n);
|
||||
}
|
||||
|
||||
std::vector<ExecutionNode*> nodesToPermute;
|
||||
|
@ -3950,8 +3950,8 @@ int triagens::aql::removeUnnecessaryRemoteScatterRule (Optimizer* opt,
|
|||
}
|
||||
|
||||
if (canOptimize) {
|
||||
toUnlink.insert(n);
|
||||
toUnlink.insert(deps[0]);
|
||||
toUnlink.emplace(n);
|
||||
toUnlink.emplace(deps[0]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4054,7 +4054,7 @@ class RemoveToEnumCollFinder : public WalkerWorker<ExecutionNode> {
|
|||
return false; // continue . . .
|
||||
}
|
||||
case EN::REMOTE: {
|
||||
_toUnlink.insert(en);
|
||||
_toUnlink.emplace(en);
|
||||
_lastNode = en;
|
||||
return false; // continue . . .
|
||||
}
|
||||
|
@ -4064,7 +4064,7 @@ class RemoveToEnumCollFinder : public WalkerWorker<ExecutionNode> {
|
|||
break; // abort . . .
|
||||
}
|
||||
_scatter = true;
|
||||
_toUnlink.insert(en);
|
||||
_toUnlink.emplace(en);
|
||||
_lastNode = en;
|
||||
return false; // continue . . .
|
||||
}
|
||||
|
@ -4073,7 +4073,7 @@ class RemoveToEnumCollFinder : public WalkerWorker<ExecutionNode> {
|
|||
break; // abort . . .
|
||||
}
|
||||
_gather = true;
|
||||
_toUnlink.insert(en);
|
||||
_toUnlink.emplace(en);
|
||||
_lastNode = en;
|
||||
return false; // continue . . .
|
||||
}
|
||||
|
|
|
@ -868,7 +868,7 @@ QueryResult Query::explain () {
|
|||
triagens::basics::Json out(Json::Array);
|
||||
|
||||
auto plans = opt.getPlans();
|
||||
for (auto it : plans) {
|
||||
for (auto& it : plans) {
|
||||
TRI_ASSERT(it != nullptr);
|
||||
|
||||
it->findVarUsage();
|
||||
|
|
|
@ -361,8 +361,8 @@ RangeInfoMap* RangeInfoMap::clone () const {
|
|||
std::unique_ptr<RangeInfoMap> rim(new RangeInfoMap());
|
||||
|
||||
try {
|
||||
for (auto const& x: _ranges) {
|
||||
for (auto const& y: x.second) {
|
||||
for (auto const& x : _ranges) {
|
||||
for (auto const& y : x.second) {
|
||||
rim->insert(y.second.clone());
|
||||
}
|
||||
}
|
||||
|
@ -427,7 +427,7 @@ void RangeInfoMap::attributes (std::unordered_set<std::string>& set,
|
|||
std::unordered_map<std::string, RangeInfo> const* map = find(var);
|
||||
|
||||
if (map != nullptr) {
|
||||
for (auto const& x: *map) {
|
||||
for (auto const& x : *map) {
|
||||
set.emplace(x.first);
|
||||
}
|
||||
}
|
||||
|
@ -440,7 +440,7 @@ void RangeInfoMap::attributes (std::unordered_set<std::string>& set,
|
|||
std::unordered_set<std::string> RangeInfoMap::variables () const {
|
||||
std::unordered_set<std::string> vars;
|
||||
|
||||
for (auto const& x: _ranges) {
|
||||
for (auto const& x : _ranges) {
|
||||
vars.emplace(x.first);
|
||||
}
|
||||
|
||||
|
@ -467,7 +467,7 @@ RangeInfoMapVec::RangeInfoMapVec (RangeInfoMap* rim)
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RangeInfoMapVec::~RangeInfoMapVec () {
|
||||
for (auto& x: _rangeInfoMapVec) {
|
||||
for (auto& x : _rangeInfoMapVec) {
|
||||
delete x;
|
||||
}
|
||||
_rangeInfoMapVec.clear();
|
||||
|
@ -495,7 +495,7 @@ void RangeInfoMapVec::emplace_back (RangeInfoMap* rim) {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void RangeInfoMapVec::eraseEmptyOrUndefined (std::string const& var) {
|
||||
for (auto& x: _rangeInfoMapVec) {
|
||||
for (auto& x : _rangeInfoMapVec) {
|
||||
x->eraseEmptyOrUndefined(var);
|
||||
}
|
||||
}
|
||||
|
@ -616,8 +616,8 @@ RangeInfoMapVec* triagens::aql::orCombineRangeInfoMapVecs (RangeInfoMapVec* lhs,
|
|||
for (size_t i = 0; i < rhs->size(); i++) {
|
||||
std::unique_ptr<RangeInfoMap> rim(new RangeInfoMap());
|
||||
|
||||
for (auto const& x: (*rhs)[i]->_ranges) {
|
||||
for (auto const& y: x.second) {
|
||||
for (auto const& x : (*rhs)[i]->_ranges) {
|
||||
for (auto const& y : x.second) {
|
||||
RangeInfo ri = y.second.clone();
|
||||
rim->insert(ri);
|
||||
}
|
||||
|
@ -646,8 +646,8 @@ RangeInfoMapVec* triagens::aql::orCombineRangeInfoMapVecs (RangeInfoMapVec* lhs,
|
|||
|
||||
RangeInfoMap* triagens::aql::andCombineRangeInfoMaps (RangeInfoMap* lhs,
|
||||
RangeInfoMap* rhs) {
|
||||
for (auto const& x: rhs->_ranges) {
|
||||
for (auto const& y: x.second) {
|
||||
for (auto const& x : rhs->_ranges) {
|
||||
for (auto const& y : x.second) {
|
||||
lhs->insert(y.second.clone());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -606,10 +606,10 @@ namespace triagens {
|
|||
copy._lowConst.assign(_lowConst);
|
||||
copy._highConst.assign(_highConst);
|
||||
|
||||
for (auto x: _lows) {
|
||||
for (auto const& x : _lows) {
|
||||
copy._lows.emplace_back(x);
|
||||
}
|
||||
for (auto x: _highs) {
|
||||
for (auto const& x : _highs) {
|
||||
copy._highs.emplace_back(x);
|
||||
}
|
||||
copy._valid = _valid;
|
||||
|
@ -799,7 +799,7 @@ namespace triagens {
|
|||
triagens::basics::Json list(triagens::basics::Json::Array);
|
||||
|
||||
for (auto const& x : _ranges) {
|
||||
for (auto const& y: x.second) {
|
||||
for (auto const& y : x.second) {
|
||||
triagens::basics::Json item(triagens::basics::Json::Object);
|
||||
|
||||
item("variable", triagens::basics::Json(x.first))
|
||||
|
|
|
@ -54,7 +54,7 @@ VariableGenerator::VariableGenerator ()
|
|||
|
||||
VariableGenerator::~VariableGenerator () {
|
||||
// free all variables
|
||||
for (auto it : _variables) {
|
||||
for (auto& it : _variables) {
|
||||
delete it.second;
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ std::string VariableGenerator::nextName () {
|
|||
triagens::basics::Json VariableGenerator::toJson (TRI_memory_zone_t* zone) const {
|
||||
Json jsonAllVariablesList(Json::Array, _variables.size());
|
||||
|
||||
for (auto oneVariable: _variables) {
|
||||
for (auto const& oneVariable : _variables) {
|
||||
jsonAllVariablesList(oneVariable.second->toJson());
|
||||
}
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ namespace triagens {
|
|||
|
||||
bool done (T* en) {
|
||||
if (_done.find(en) == _done.end()) {
|
||||
_done.insert(en);
|
||||
_done.emplace(en);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue