mirror of https://gitee.com/bigwinds/arangodb
make_unique
This commit is contained in:
parent
e096903af6
commit
a694b7ef48
|
@ -572,7 +572,7 @@ int HashedAggregateBlock::getOrSkipSome (size_t atLeast,
|
|||
auto planNode = static_cast<AggregateNode const*>(getPlanNode());
|
||||
auto nrRegs = planNode->getRegisterPlan()->nrRegs[planNode->getDepth()];
|
||||
|
||||
std::unique_ptr<AqlItemBlock> result(new AqlItemBlock(allGroups.size(), nrRegs));
|
||||
auto result = std::make_unique<AqlItemBlock>(allGroups.size(), nrRegs);
|
||||
|
||||
if (src != nullptr) {
|
||||
inheritRegisters(src, result.get(), 0);
|
||||
|
|
|
@ -306,7 +306,7 @@ AqlItemBlock* AqlItemBlock::slice (size_t from,
|
|||
std::unordered_map<AqlValue, AqlValue> cache;
|
||||
cache.reserve((to - from) * _nrRegs / 4 + 1);
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(to - from, _nrRegs));
|
||||
auto res = std::make_unique<AqlItemBlock>(to - from, _nrRegs);
|
||||
|
||||
for (RegisterId col = 0; col < _nrRegs; col++) {
|
||||
res->_docColls[col] = _docColls[col];
|
||||
|
@ -348,7 +348,7 @@ AqlItemBlock* AqlItemBlock::slice (size_t row,
|
|||
std::unordered_set<RegisterId> const& registers) const {
|
||||
std::unordered_map<AqlValue, AqlValue> cache;
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(1, _nrRegs));
|
||||
auto res = std::make_unique<AqlItemBlock>(1, _nrRegs);
|
||||
|
||||
for (RegisterId col = 0; col < _nrRegs; col++) {
|
||||
if (registers.find(col) == registers.end()) {
|
||||
|
@ -395,7 +395,7 @@ AqlItemBlock* AqlItemBlock::slice (std::vector<size_t>& chosen,
|
|||
std::unordered_map<AqlValue, AqlValue> cache;
|
||||
cache.reserve((to - from) * _nrRegs / 4 + 1);
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(to - from, _nrRegs));
|
||||
auto res = std::make_unique<AqlItemBlock>(to - from, _nrRegs);
|
||||
|
||||
for (RegisterId col = 0; col < _nrRegs; col++) {
|
||||
res->_docColls[col] = _docColls[col];
|
||||
|
@ -443,7 +443,7 @@ AqlItemBlock* AqlItemBlock::steal (std::vector<size_t>& chosen,
|
|||
size_t to) {
|
||||
TRI_ASSERT(from < to && to <= chosen.size());
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(to - from, _nrRegs));
|
||||
auto res = std::make_unique<AqlItemBlock>(to - from, _nrRegs);
|
||||
|
||||
for (RegisterId col = 0; col < _nrRegs; col++) {
|
||||
res->_docColls[col] = _docColls[col];
|
||||
|
@ -494,7 +494,7 @@ AqlItemBlock* AqlItemBlock::concatenate (std::vector<AqlItemBlock*> const& block
|
|||
TRI_ASSERT(totalSize > 0);
|
||||
TRI_ASSERT(nrRegs > 0);
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(totalSize, nrRegs));
|
||||
auto res = std::make_unique<AqlItemBlock>(totalSize, nrRegs);
|
||||
|
||||
size_t pos = 0;
|
||||
for (it = blocks.begin(); it != blocks.end(); ++it) {
|
||||
|
|
|
@ -1006,7 +1006,7 @@ AqlValue AqlValue::CreateFromBlocks (triagens::arango::AqlTransaction* trx,
|
|||
totalSize += (*it)->size();
|
||||
}
|
||||
|
||||
std::unique_ptr<Json> json(new Json(Json::Array, totalSize));
|
||||
auto json = std::make_unique<Json>(Json::Array, totalSize);
|
||||
|
||||
for (auto it = src.begin(); it != src.end(); ++it) {
|
||||
auto current = (*it);
|
||||
|
@ -1048,7 +1048,7 @@ AqlValue AqlValue::CreateFromBlocks (triagens::arango::AqlTransaction* trx,
|
|||
totalSize += (*it)->size();
|
||||
}
|
||||
|
||||
std::unique_ptr<Json> json(new Json(Json::Array, totalSize));
|
||||
auto json = std::make_unique<Json>(Json::Array, totalSize);
|
||||
|
||||
for (auto it = src.begin(); it != src.end(); ++it) {
|
||||
auto current = (*it);
|
||||
|
|
|
@ -483,7 +483,7 @@ AqlItemBlock* ReturnBlock::getSome (size_t atLeast,
|
|||
TRI_ASSERT(it != ep->getRegisterPlan()->varInfo.end());
|
||||
RegisterId const registerId = it->second.registerId;
|
||||
|
||||
std::unique_ptr<AqlItemBlock> stripped(new AqlItemBlock(n, 1));
|
||||
auto stripped = std::make_unique<AqlItemBlock>(n, 1);
|
||||
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
auto a = res->getValueReference(i, registerId);
|
||||
|
|
|
@ -318,8 +318,7 @@ AqlItemBlock* GatherBlock::getSome (size_t atLeast, size_t atMost) {
|
|||
AqlItemBlock* example =_gatherBlockBuffer.at(index).front();
|
||||
size_t nrRegs = example->getNrRegs();
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(toSend,
|
||||
static_cast<triagens::aql::RegisterId>(nrRegs)));
|
||||
auto res = std::make_unique<AqlItemBlock>(toSend, static_cast<triagens::aql::RegisterId>(nrRegs));
|
||||
// automatically deleted if things go wrong
|
||||
|
||||
for (RegisterId i = 0; i < nrRegs; i++) {
|
||||
|
|
|
@ -288,7 +288,7 @@ void Collection::fillIndexesCoordinator () const {
|
|||
continue;
|
||||
}
|
||||
|
||||
std::unique_ptr<triagens::aql::Index> idx(new triagens::aql::Index(v));
|
||||
auto idx = std::make_unique<triagens::aql::Index>(v);
|
||||
|
||||
indexes.emplace_back(idx.get());
|
||||
auto p = idx.release();
|
||||
|
@ -376,7 +376,7 @@ void Collection::fillIndexesDBServer () const {
|
|||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<triagens::aql::Index> idx(new triagens::aql::Index(v));
|
||||
auto idx = std::make_unique<triagens::aql::Index>(v);
|
||||
// assign the found local index
|
||||
idx->setInternals(data, false);
|
||||
|
||||
|
@ -403,7 +403,7 @@ void Collection::fillIndexesLocal () const {
|
|||
continue;
|
||||
}
|
||||
|
||||
std::unique_ptr<triagens::aql::Index> idx(new triagens::aql::Index(allIndexes[i]));
|
||||
auto idx = std::make_unique<triagens::aql::Index>(allIndexes[i]);
|
||||
indexes.emplace_back(idx.get());
|
||||
idx.release();
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ Collection* Collections::add (std::string const& name,
|
|||
THROW_ARANGO_EXCEPTION(TRI_ERROR_QUERY_TOO_MANY_COLLECTIONS);
|
||||
}
|
||||
|
||||
std::unique_ptr<Collection> collection(new Collection(name, _vocbase, accessType));
|
||||
auto collection = std::make_unique<Collection>(name, _vocbase, accessType);
|
||||
_collections.emplace(name, collection.get());
|
||||
|
||||
return collection.release();
|
||||
|
|
|
@ -337,7 +337,7 @@ Condition::~Condition () {
|
|||
|
||||
Condition* Condition::fromJson (ExecutionPlan* plan,
|
||||
triagens::basics::Json const& json) {
|
||||
std::unique_ptr<Condition> condition(new Condition(plan->getAst()));
|
||||
auto condition = std::make_unique<Condition>(plan->getAst());
|
||||
|
||||
if (json.isObject() && json.members() != 0) {
|
||||
// note: the AST is responsible for freeing the AstNode later!
|
||||
|
@ -356,7 +356,7 @@ Condition* Condition::fromJson (ExecutionPlan* plan,
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Condition* Condition::clone () const {
|
||||
std::unique_ptr<Condition> copy(new Condition(_ast));
|
||||
auto copy = std::make_unique<Condition>(_ast);
|
||||
|
||||
if (_root != nullptr) {
|
||||
copy->_root = _root->clone(_ast);
|
||||
|
@ -846,7 +846,7 @@ bool Condition::sortOrs (Variable const* variable,
|
|||
result.first == variable &&
|
||||
(operand->type != NODE_TYPE_OPERATOR_BINARY_IN || rhs->isArray())) {
|
||||
// create the condition data struct on the heap
|
||||
std::unique_ptr<ConditionData> data(new ConditionData(sub, usedIndexes[i]));
|
||||
auto data = std::make_unique<ConditionData>(sub, usedIndexes[i]);
|
||||
// push it into an owning vector
|
||||
conditionData.emplace_back(data.get());
|
||||
// vector is now responsible for data
|
||||
|
@ -864,7 +864,7 @@ bool Condition::sortOrs (Variable const* variable,
|
|||
rhs->isAttributeAccessForVariable(result) &&
|
||||
result.first == variable) {
|
||||
// create the condition data struct on the heap
|
||||
std::unique_ptr<ConditionData> data(new ConditionData(sub, usedIndexes[i]));
|
||||
auto data = std::make_unique<ConditionData>(sub, usedIndexes[i]);
|
||||
// push it into an owning vector
|
||||
conditionData.emplace_back(data.get());
|
||||
// vector is now responsible for data
|
||||
|
|
|
@ -106,7 +106,7 @@ bool ConditionFinder::before (ExecutionNode* en) {
|
|||
break;
|
||||
}
|
||||
|
||||
std::unique_ptr<Condition> condition(new Condition(_plan->getAst()));
|
||||
auto condition = std::make_unique<Condition>(_plan->getAst());
|
||||
|
||||
bool foundCondition = false;
|
||||
for (auto& it : _variableDefinitions) {
|
||||
|
|
|
@ -688,7 +688,7 @@ struct CoordinatorInstanciator : public WalkerWorker<ExecutionNode> {
|
|||
}
|
||||
|
||||
try {
|
||||
std::unique_ptr<ExecutionEngine> engine(new ExecutionEngine(localQuery));
|
||||
auto engine = std::make_unique<ExecutionEngine>(localQuery);
|
||||
localQuery->engine(engine.get());
|
||||
|
||||
std::unordered_map<ExecutionNode*, ExecutionBlock*> cache;
|
||||
|
@ -929,7 +929,7 @@ ExecutionEngine* ExecutionEngine::instantiateFromPlan (QueryRegistry* queryRegis
|
|||
if (isCoordinator) {
|
||||
// instantiate the engine on the coordinator
|
||||
|
||||
std::unique_ptr<CoordinatorInstanciator> inst(new CoordinatorInstanciator(query, queryRegistry));
|
||||
auto inst = std::make_unique<CoordinatorInstanciator>(query, queryRegistry);
|
||||
plan->root()->walk(inst.get());
|
||||
|
||||
#if 0
|
||||
|
@ -1076,7 +1076,7 @@ ExecutionEngine* ExecutionEngine::instantiateFromPlan (QueryRegistry* queryRegis
|
|||
else {
|
||||
// instantiate the engine on a local server
|
||||
engine = new ExecutionEngine(query);
|
||||
std::unique_ptr<Instanciator> inst(new Instanciator(engine));
|
||||
auto inst = std::make_unique<Instanciator>(engine);
|
||||
plan->root()->walk(inst.get());
|
||||
root = inst.get()->root;
|
||||
TRI_ASSERT(root != nullptr);
|
||||
|
|
|
@ -338,7 +338,7 @@ ExecutionNode::ExecutionNode (ExecutionPlan* plan,
|
|||
len = jsonvarsUsedLater.size();
|
||||
_varsUsedLater.reserve(len);
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
std::unique_ptr<Variable> oneVarUsedLater(new Variable(jsonvarsUsedLater.at(i)));
|
||||
auto oneVarUsedLater = std::make_unique<Variable>(jsonvarsUsedLater.at(i));
|
||||
Variable* oneVariable = allVars->getVariable(oneVarUsedLater->id);
|
||||
|
||||
if (oneVariable == nullptr) {
|
||||
|
@ -357,7 +357,7 @@ ExecutionNode::ExecutionNode (ExecutionPlan* plan,
|
|||
len = jsonvarsValidList.size();
|
||||
_varsValid.reserve(len);
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
std::unique_ptr<Variable> oneVarValid(new Variable(jsonvarsValidList.at(i)));
|
||||
auto oneVarValid = std::make_unique<Variable>(jsonvarsValidList.at(i));
|
||||
Variable* oneVariable = allVars->getVariable(oneVarValid->id);
|
||||
|
||||
if (oneVariable == nullptr) {
|
||||
|
@ -797,7 +797,7 @@ void ExecutionNode::RegisterPlan::clear () {
|
|||
}
|
||||
|
||||
ExecutionNode::RegisterPlan* ExecutionNode::RegisterPlan::clone (ExecutionPlan* otherPlan, ExecutionPlan* plan) {
|
||||
std::unique_ptr<RegisterPlan> other(new RegisterPlan());
|
||||
auto other = std::make_unique<RegisterPlan>();
|
||||
|
||||
other->nrRegsHere = nrRegsHere;
|
||||
other->nrRegs = nrRegs;
|
||||
|
|
|
@ -93,7 +93,7 @@ ExecutionPlan* ExecutionPlan::instantiateFromAst (Ast* ast) {
|
|||
TRI_ASSERT(root != nullptr);
|
||||
TRI_ASSERT(root->type == NODE_TYPE_ROOT);
|
||||
|
||||
std::unique_ptr<ExecutionPlan> plan(new ExecutionPlan(ast));
|
||||
auto plan = std::make_unique<ExecutionPlan>(ast);
|
||||
|
||||
plan->_root = plan->fromNode(root);
|
||||
|
||||
|
@ -142,7 +142,7 @@ ExecutionPlan* ExecutionPlan::instantiateFromJson (Ast* ast,
|
|||
triagens::basics::Json const& json) {
|
||||
TRI_ASSERT(ast != nullptr);
|
||||
|
||||
std::unique_ptr<ExecutionPlan> plan(new ExecutionPlan(ast));
|
||||
auto plan = std::make_unique<ExecutionPlan>(ast);
|
||||
|
||||
plan->_root = plan->fromJson(json);
|
||||
plan->_varUsageComputed = true;
|
||||
|
@ -186,7 +186,7 @@ class CloneNodeAdder final : public WalkerWorker<ExecutionNode> {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ExecutionPlan* ExecutionPlan::clone () {
|
||||
std::unique_ptr<ExecutionPlan> plan(new ExecutionPlan(_ast));
|
||||
auto plan = std::make_unique<ExecutionPlan>(_ast);
|
||||
|
||||
plan->_root = _root->clone(plan.get(), true, false);
|
||||
plan->_nextId = _nextId;
|
||||
|
@ -211,7 +211,7 @@ ExecutionPlan* ExecutionPlan::clone () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ExecutionPlan* ExecutionPlan::clone (Query const& query) {
|
||||
std::unique_ptr<ExecutionPlan> otherPlan(new ExecutionPlan(query.ast()));
|
||||
auto otherPlan = std::make_unique<ExecutionPlan>(query.ast());
|
||||
|
||||
for (auto const& it: _ids) {
|
||||
otherPlan->registerNode(it.second->clone(otherPlan.get(), false, true));
|
||||
|
@ -304,7 +304,7 @@ ExecutionNode* ExecutionPlan::createCalculation (Variable* out,
|
|||
}
|
||||
|
||||
// generate a temporary calculation node
|
||||
std::unique_ptr<Expression> expr(new Expression(_ast, const_cast<AstNode*>(expression)));
|
||||
auto expr = std::make_unique<Expression>(_ast, const_cast<AstNode*>(expression));
|
||||
|
||||
CalculationNode* en;
|
||||
if (conditionVariable != nullptr) {
|
||||
|
|
|
@ -719,7 +719,7 @@ AqlValue Expression::executeSimpleExpressionArray (AstNode const* node,
|
|||
}
|
||||
|
||||
size_t const n = node->numMembers();
|
||||
std::unique_ptr<Json> array(new Json(Json::Array, n));
|
||||
auto array = std::make_unique<Json>(Json::Array, n);
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
auto member = node->getMemberUnchecked(i);
|
||||
|
@ -755,7 +755,7 @@ AqlValue Expression::executeSimpleExpressionObject (AstNode const* node,
|
|||
}
|
||||
|
||||
size_t const n = node->numMembers();
|
||||
std::unique_ptr<Json> object(new Json(Json::Object, n));
|
||||
auto object = std::make_unique<Json>(Json::Object, n);
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
auto member = node->getMemberUnchecked(i);
|
||||
|
@ -1119,7 +1119,7 @@ AqlValue Expression::executeSimpleExpressionExpansion (AstNode const* node,
|
|||
// flatten value...
|
||||
|
||||
// generate a new temporary for the flattened array
|
||||
std::unique_ptr<Json> flattened(new Json(Json::Array));
|
||||
auto flattened = std::make_unique<Json>(Json::Array);
|
||||
|
||||
TRI_document_collection_t const* myCollection = nullptr;
|
||||
value = executeSimpleExpression(node->getMember(0), &myCollection, trx, argv, startPos, vars, regs, false);
|
||||
|
@ -1186,7 +1186,7 @@ AqlValue Expression::executeSimpleExpressionExpansion (AstNode const* node,
|
|||
}
|
||||
|
||||
size_t const n = value.arraySize();
|
||||
std::unique_ptr<Json> array(new Json(Json::Array, n));
|
||||
auto array = std::make_unique<Json>(Json::Array, n);
|
||||
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
// TODO: check why we must copy the array member. will crash without copying!
|
||||
|
|
|
@ -2357,7 +2357,7 @@ static AqlValue VertexIdsToAqlValue (triagens::arango::AqlTransaction* trx,
|
|||
CollectionNameResolver const* resolver,
|
||||
std::unordered_set<VertexId>& ids,
|
||||
bool includeData = false) {
|
||||
std::unique_ptr<Json> result(new Json(Json::Array, ids.size()));
|
||||
auto result = std::make_unique<Json>(Json::Array, ids.size());
|
||||
|
||||
if (includeData) {
|
||||
for (auto& it : ids) {
|
||||
|
@ -2541,11 +2541,11 @@ AqlValue Functions::Neighbors (triagens::aql::Query* query,
|
|||
// Function to return constant distance
|
||||
auto wc = [](TRI_doc_mptr_copy_t&) -> double { return 1; };
|
||||
|
||||
std::unique_ptr<EdgeCollectionInfo> eci(new EdgeCollectionInfo(
|
||||
auto eci = std::make_unique<EdgeCollectionInfo>(
|
||||
eCid,
|
||||
trx->documentCollection(eCid),
|
||||
wc
|
||||
));
|
||||
);
|
||||
TRI_IF_FAILURE("EdgeCollectionInfoOOM1") {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ int IndexBlock::initialize () {
|
|||
// instantiate expressions:
|
||||
auto instantiateExpression = [&] (size_t i, size_t j, size_t k, AstNode* a) -> void {
|
||||
// all new AstNodes are registered with the Ast in the Query
|
||||
std::unique_ptr<Expression> e(new Expression(ast, a));
|
||||
auto e = std::make_unique<Expression>(ast, a);
|
||||
|
||||
TRI_IF_FAILURE("IndexBlock::initialize") {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
|
||||
|
@ -159,7 +159,7 @@ int IndexBlock::initialize () {
|
|||
std::unordered_set<Variable const*> inVars;
|
||||
e->variables(inVars);
|
||||
|
||||
std::unique_ptr<NonConstExpression> nce(new NonConstExpression(i, j, k, e.get()));
|
||||
auto nce = std::make_unique<NonConstExpression>(i, j, k, e.get());
|
||||
e.release();
|
||||
_nonConstExpressions.push_back(nce.get());
|
||||
nce.release();
|
||||
|
|
|
@ -2003,7 +2003,7 @@ struct SortToIndexNode final : public WalkerWorker<ExecutionNode> {
|
|||
}
|
||||
|
||||
if (bestIndex != nullptr) {
|
||||
std::unique_ptr<Condition> condition(new Condition(_plan->getAst()));
|
||||
auto condition = std::make_unique<Condition>(_plan->getAst());
|
||||
condition->normalize(_plan);
|
||||
|
||||
std::unique_ptr<ExecutionNode> newNode(new IndexNode(
|
||||
|
@ -2219,7 +2219,7 @@ int triagens::aql::removeFiltersCoveredByIndexRule (Optimizer* opt,
|
|||
auto conditionNode = calculationNode->expression()->node();
|
||||
|
||||
// build the filter condition
|
||||
std::unique_ptr<Condition> condition(new Condition(plan->getAst()));
|
||||
auto condition = std::make_unique<Condition>(plan->getAst());
|
||||
condition->andCombine(conditionNode);
|
||||
condition->normalize(plan);
|
||||
|
||||
|
@ -2262,7 +2262,7 @@ int triagens::aql::removeFiltersCoveredByIndexRule (Optimizer* opt,
|
|||
else if (newNode != condition->root()) {
|
||||
// some condition is left, but it is a different one than
|
||||
// the one from the FILTER node
|
||||
std::unique_ptr<Expression> expr(new Expression(plan->getAst(), newNode));
|
||||
auto expr = std::make_unique<Expression>(plan->getAst(), newNode);
|
||||
CalculationNode* cn = new CalculationNode(plan, plan->nextId(), expr.get(), calculationNode->outVariable());
|
||||
expr.release();
|
||||
plan->registerNode(cn);
|
||||
|
|
|
@ -508,7 +508,7 @@ QueryResult Query::prepare (QueryRegistry* registry) {
|
|||
init();
|
||||
enterState(PARSING);
|
||||
|
||||
std::unique_ptr<Parser> parser(new Parser(this));
|
||||
auto parser = std::make_unique<Parser>(this);
|
||||
std::unique_ptr<ExecutionPlan> plan;
|
||||
|
||||
if (_queryString != nullptr) {
|
||||
|
|
|
@ -533,7 +533,7 @@ QueryCacheResultEntry* QueryCache::store (TRI_vocbase_t* vocbase,
|
|||
auto const part = getPart(vocbase);
|
||||
|
||||
// create the cache entry outside the lock
|
||||
std::unique_ptr<QueryCacheResultEntry> entry(new QueryCacheResultEntry(hash, queryString, queryStringLength, result, collections));
|
||||
auto entry = std::make_unique<QueryCacheResultEntry>(hash, queryString, queryStringLength, result, collections);
|
||||
|
||||
WRITE_LOCKER(_entriesLock[part]);
|
||||
|
||||
|
@ -541,7 +541,7 @@ QueryCacheResultEntry* QueryCache::store (TRI_vocbase_t* vocbase,
|
|||
|
||||
if (it == _entries[part].end()) {
|
||||
// create entry for the current database
|
||||
std::unique_ptr<QueryCacheDatabaseEntry> db(new QueryCacheDatabaseEntry());
|
||||
auto db = std::make_unique<QueryCacheDatabaseEntry>();
|
||||
it = _entries[part].emplace(vocbase, db.get()).first;
|
||||
db.release();
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ bool QueryList::insert (Query const* query,
|
|||
}
|
||||
|
||||
try {
|
||||
std::unique_ptr<QueryEntry> entry(new QueryEntry(query, stamp));
|
||||
auto entry = std::make_unique<QueryEntry>(query, stamp);
|
||||
|
||||
WRITE_LOCKER(_lock);
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ void QueryRegistry::insert (QueryId id,
|
|||
}
|
||||
auto q = m->second.find(id);
|
||||
if (q == m->second.end()) {
|
||||
std::unique_ptr<QueryInfo> p(new QueryInfo());
|
||||
auto p = std::make_unique<QueryInfo>();
|
||||
p->_vocbase = vocbase;
|
||||
p->_id = id;
|
||||
p->_query = query;
|
||||
|
|
|
@ -199,7 +199,7 @@ Scopes::~Scopes () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void Scopes::start (ScopeType type) {
|
||||
std::unique_ptr<Scope> scope(new Scope(type));
|
||||
auto scope = std::make_unique<Scope>(type);
|
||||
|
||||
_activeScopes.emplace_back(scope.get());
|
||||
scope.release();
|
||||
|
|
|
@ -67,7 +67,7 @@ TraversalBlock::TraversalBlock (ExecutionEngine* engine,
|
|||
for (auto& map : *_expressions) {
|
||||
for (size_t i = 0; i < map.second.size(); ++i) {
|
||||
SimpleTraverserExpression* it = dynamic_cast<SimpleTraverserExpression*>(map.second.at(i));
|
||||
std::unique_ptr<Expression> e(new Expression(ast, it->toEvaluate));
|
||||
auto e = std::make_unique<Expression>(ast, it->toEvaluate);
|
||||
_hasV8Expression |= e->isV8();
|
||||
std::unordered_set<Variable const*> inVars;
|
||||
e->variables(inVars);
|
||||
|
@ -439,7 +439,6 @@ AqlItemBlock* TraversalBlock::getSome (size_t, // atLeast,
|
|||
RegisterId nrRegs = getPlanNode()->getRegisterPlan()->nrRegs[getPlanNode()->getDepth()];
|
||||
|
||||
std::unique_ptr<AqlItemBlock> res(requestBlock(toSend, nrRegs));
|
||||
// std::unique_ptr<AqlItemBlock> res(new AqlItemBlock(toSend, nrRegs));
|
||||
// automatically freed if we throw
|
||||
TRI_ASSERT(curRegs <= res->getNrRegs());
|
||||
|
||||
|
|
|
@ -284,7 +284,7 @@ bool TraversalConditionFinder::before (ExecutionNode* en) {
|
|||
case EN::TRAVERSAL: {
|
||||
auto node = static_cast<TraversalNode *>(en);
|
||||
|
||||
std::unique_ptr<Condition> condition(new Condition(_plan->getAst()));
|
||||
auto condition = std::make_unique<Condition>(_plan->getAst());
|
||||
|
||||
bool foundCondition = false;
|
||||
auto const& varsValidInTraversal = node->getVarsValid();
|
||||
|
|
|
@ -51,7 +51,8 @@ TraversalNode::TraversalNode (ExecutionPlan* plan,
|
|||
TRI_ASSERT(direction != nullptr);
|
||||
TRI_ASSERT(start != nullptr);
|
||||
TRI_ASSERT(graph != nullptr);
|
||||
std::unique_ptr<arango::CollectionNameResolver> resolver(new arango::CollectionNameResolver(vocbase));
|
||||
auto resolver = std::make_unique<arango::CollectionNameResolver>(vocbase);
|
||||
|
||||
if (graph->type == NODE_TYPE_COLLECTION_LIST) {
|
||||
size_t edgeCollectionCount = graph->numMembers();
|
||||
_graphJson = triagens::basics::Json(triagens::basics::Json::Array, edgeCollectionCount);
|
||||
|
@ -457,10 +458,7 @@ void TraversalNode::storeSimpleExpression (bool isEdgeAccess,
|
|||
it = _expressions.find(indexAccess);
|
||||
}
|
||||
|
||||
std::unique_ptr<SimpleTraverserExpression> e(new SimpleTraverserExpression(isEdgeAccess,
|
||||
comparisonType,
|
||||
varAccess,
|
||||
compareTo));
|
||||
auto e = std::make_unique<SimpleTraverserExpression>(isEdgeAccess, comparisonType, varAccess, compareTo);
|
||||
it->second.push_back(e.get());
|
||||
e.release();
|
||||
}
|
||||
|
|
|
@ -177,7 +177,7 @@ ClusterCommResult const ClusterComm::asyncRequest (
|
|||
std::shared_ptr<ClusterCommCallback> callback,
|
||||
ClusterCommTimeout timeout) {
|
||||
|
||||
std::unique_ptr<ClusterCommOperation> op(new ClusterCommOperation());
|
||||
auto op = std::make_unique<ClusterCommOperation>();
|
||||
op->result.clientTransactionID = clientTransactionID;
|
||||
op->result.coordTransactionID = coordTransactionID;
|
||||
do {
|
||||
|
@ -302,7 +302,7 @@ std::unique_ptr<ClusterCommResult> ClusterComm::syncRequest (
|
|||
|
||||
map<string, string> headersCopy(headerFields);
|
||||
|
||||
std::unique_ptr<ClusterCommResult> res(new ClusterCommResult());
|
||||
auto res = std::make_unique<ClusterCommResult>();
|
||||
res->clientTransactionID = clientTransactionID;
|
||||
res->coordTransactionID = coordTransactionID;
|
||||
do {
|
||||
|
|
|
@ -39,7 +39,8 @@ using ClusterTraverser = triagens::arango::traverser::ClusterTraverser;
|
|||
|
||||
triagens::basics::Json* ClusterTraversalPath::pathToJson (triagens::arango::Transaction*,
|
||||
triagens::arango::CollectionNameResolver*) {
|
||||
std::unique_ptr<triagens::basics::Json> result(new triagens::basics::Json(triagens::basics::Json::Object));
|
||||
auto result = std::make_unique<triagens::basics::Json>(triagens::basics::Json::Object);
|
||||
|
||||
size_t vCount = _path.vertices.size();
|
||||
triagens::basics::Json vertices(triagens::basics::Json::Array, vCount);
|
||||
for (auto& it : _path.vertices) {
|
||||
|
@ -323,7 +324,7 @@ triagens::arango::traverser::TraversalPath* ClusterTraverser::next () {
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
std::unique_ptr<ClusterTraversalPath> p(new ClusterTraversalPath(this, path));
|
||||
auto p = std::make_unique<ClusterTraversalPath>(this, path);
|
||||
if (_opts.shouldPrunePath(p.get())) {
|
||||
_enumerator->prune();
|
||||
return next();
|
||||
|
|
|
@ -534,7 +534,7 @@ bool HttpCommTask::processRead () {
|
|||
if (found && StringUtils::trim(expect) == "100-continue") {
|
||||
LOG_TRACE("received a 100-continue request");
|
||||
|
||||
std::unique_ptr<StringBuffer> buffer(new StringBuffer(TRI_UNKNOWN_MEM_ZONE));
|
||||
auto buffer = std::make_unique<StringBuffer>(TRI_UNKNOWN_MEM_ZONE);
|
||||
buffer->appendText(TRI_CHAR_LENGTH_PAIR("HTTP/1.1 100 (Continue)\r\n\r\n"));
|
||||
|
||||
_writeBuffers.push_back(buffer.get());
|
||||
|
@ -709,7 +709,7 @@ void HttpCommTask::sendChunk (StringBuffer* buffer) {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void HttpCommTask::finishedChunked () {
|
||||
std::unique_ptr<StringBuffer> buffer(new StringBuffer(TRI_UNKNOWN_MEM_ZONE, 6));
|
||||
auto buffer = std::make_unique<StringBuffer>(TRI_UNKNOWN_MEM_ZONE, 6);
|
||||
buffer->appendText(TRI_CHAR_LENGTH_PAIR("0\r\n\r\n"));
|
||||
|
||||
_writeBuffers.push_back(buffer.get());
|
||||
|
@ -784,7 +784,7 @@ void HttpCommTask::addResponse (HttpResponse* response) {
|
|||
// }
|
||||
|
||||
// reserve a buffer with some spare capacity
|
||||
std::unique_ptr<StringBuffer> buffer(new StringBuffer(TRI_UNKNOWN_MEM_ZONE, responseBodyLength + 128));
|
||||
auto buffer = std::make_unique<StringBuffer>(TRI_UNKNOWN_MEM_ZONE, responseBodyLength + 128);
|
||||
|
||||
// write header
|
||||
response->writeHeader(buffer.get());
|
||||
|
|
|
@ -306,7 +306,7 @@ void HttpServer::handleAsync (HttpCommTask* task) {
|
|||
bool HttpServer::handleRequestAsync (std::unique_ptr<HttpHandler>& handler,
|
||||
uint64_t* jobId) {
|
||||
// execute the handler using the dispatcher
|
||||
std::unique_ptr<HttpServerJob> job(new HttpServerJob(this, handler.get(), nullptr));
|
||||
auto job = std::make_unique<HttpServerJob>(this, handler.get(), nullptr);
|
||||
// handler now belongs to the job
|
||||
auto h = handler.release();
|
||||
|
||||
|
@ -358,7 +358,7 @@ bool HttpServer::handleRequest (HttpCommTask* task,
|
|||
|
||||
// execute the handler using the dispatcher
|
||||
else {
|
||||
std::unique_ptr<HttpServerJob> job(new HttpServerJob(this, handler.get(), task));
|
||||
auto job = std::make_unique<HttpServerJob>(this, handler.get(), task);
|
||||
// handler now belongs to the job
|
||||
auto h = handler.release();
|
||||
|
||||
|
|
|
@ -289,17 +289,17 @@ HashIndex::HashIndex (TRI_idx_iid_t iid,
|
|||
indexBuckets = collection->_info._indexBuckets;
|
||||
}
|
||||
|
||||
std::unique_ptr<HashElementFunc> func(new HashElementFunc(_paths.size()));
|
||||
std::unique_ptr<IsEqualElementElementByKey> compare(new IsEqualElementElementByKey(_paths.size()));
|
||||
auto func = std::make_unique<HashElementFunc>(_paths.size());
|
||||
auto compare = std::make_unique<IsEqualElementElementByKey>(_paths.size());
|
||||
|
||||
if (unique) {
|
||||
std::unique_ptr<TRI_HashArray_t> array(new TRI_HashArray_t(HashKey,
|
||||
*(func.get()),
|
||||
IsEqualKeyElementHash,
|
||||
IsEqualElementElement,
|
||||
*(compare.get()),
|
||||
indexBuckets,
|
||||
[] () -> std::string { return "unique hash-array"; }));
|
||||
auto array = std::make_unique<TRI_HashArray_t>(HashKey,
|
||||
*(func.get()),
|
||||
IsEqualKeyElementHash,
|
||||
IsEqualElementElement,
|
||||
*(compare.get()),
|
||||
indexBuckets,
|
||||
[] () -> std::string { return "unique hash-array"; });
|
||||
|
||||
_uniqueArray = new HashIndex::UniqueArray(array.get(), func.get(), compare.get());
|
||||
array.release();
|
||||
|
@ -307,14 +307,14 @@ HashIndex::HashIndex (TRI_idx_iid_t iid,
|
|||
else {
|
||||
_multiArray = nullptr;
|
||||
|
||||
std::unique_ptr<TRI_HashArrayMulti_t> array(new TRI_HashArrayMulti_t(HashKey,
|
||||
*(func.get()),
|
||||
IsEqualKeyElement,
|
||||
IsEqualElementElement,
|
||||
*(compare.get()),
|
||||
indexBuckets,
|
||||
64,
|
||||
[] () -> std::string { return "multi hash-array"; }));
|
||||
auto array = std::make_unique<TRI_HashArrayMulti_t>(HashKey,
|
||||
*(func.get()),
|
||||
IsEqualKeyElement,
|
||||
IsEqualElementElement,
|
||||
*(compare.get()),
|
||||
indexBuckets,
|
||||
64,
|
||||
[] () -> std::string { return "multi hash-array"; });
|
||||
|
||||
_multiArray = new HashIndex::MultiArray(array.get(), func.get(), compare.get());
|
||||
|
||||
|
@ -909,7 +909,7 @@ IndexIterator* HashIndex::iteratorForCondition (IndexIteratorContext* context,
|
|||
size_t current = 0;
|
||||
bool done = false;
|
||||
while (! done) {
|
||||
std::unique_ptr<TRI_hash_index_search_value_t> searchValue(new TRI_hash_index_search_value_t);
|
||||
auto searchValue = std::make_unique<TRI_hash_index_search_value_t>();
|
||||
searchValue->reserve(n);
|
||||
|
||||
bool valid = true;
|
||||
|
|
|
@ -951,7 +951,8 @@ SkiplistIterator* SkiplistIndex::lookup (TRI_index_operator_t* slOperator,
|
|||
TRI_set_errno(res);
|
||||
return nullptr;
|
||||
}
|
||||
std::unique_ptr<SkiplistIterator> results(new SkiplistIterator(this, reverse));
|
||||
|
||||
auto results = std::make_unique<SkiplistIterator>(this, reverse);
|
||||
|
||||
results->findHelper(slOperator, results->_intervals);
|
||||
|
||||
|
|
|
@ -655,7 +655,7 @@ int ContinuousSyncer::startTransaction (TRI_json_t const* json) {
|
|||
|
||||
LOG_TRACE("starting transaction %llu", (unsigned long long) tid);
|
||||
|
||||
std::unique_ptr<ReplicationTransaction> trx(new ReplicationTransaction(_server, _vocbase, tid));
|
||||
auto trx = std::make_unique<ReplicationTransaction>(_server, _vocbase, tid);
|
||||
|
||||
int res = trx->begin();
|
||||
|
||||
|
|
|
@ -572,7 +572,7 @@ bool RestEdgesHandler::readFilteredEdges () {
|
|||
for (size_t i = 0; i < length; ++i) {
|
||||
TRI_json_t* exp = TRI_LookupArrayJson(json.get(), i);
|
||||
if (TRI_IsObjectJson(exp)) {
|
||||
std::unique_ptr<traverser::TraverserExpression> expression(new traverser::TraverserExpression(exp));
|
||||
auto expression = std::make_unique<traverser::TraverserExpression>(exp);
|
||||
expressions.emplace_back(expression.get());
|
||||
expression.release();
|
||||
}
|
||||
|
|
|
@ -391,7 +391,7 @@ void RestExportHandler::createCursor () {
|
|||
size_t limit = triagens::basics::JsonHelper::getNumericValue<size_t>(options.json(), "limit", 0);
|
||||
|
||||
// this may throw!
|
||||
std::unique_ptr<CollectionExport> collectionExport(new CollectionExport(_vocbase, name, _restrictions));
|
||||
auto collectionExport = std::make_unique<CollectionExport>(_vocbase, name, _restrictions);
|
||||
collectionExport->run(waitTime, limit);
|
||||
|
||||
{
|
||||
|
|
|
@ -2227,7 +2227,7 @@ int RestReplicationHandler::processRestoreCollectionCoordinator (
|
|||
// create a dummy primary index
|
||||
{
|
||||
TRI_document_collection_t* doc = nullptr;
|
||||
std::unique_ptr<triagens::arango::PrimaryIndex> primaryIndex(new triagens::arango::PrimaryIndex(doc));
|
||||
auto primaryIndex = std::make_unique<triagens::arango::PrimaryIndex>(doc);
|
||||
|
||||
auto idxJson = primaryIndex->toJson(TRI_UNKNOWN_MEM_ZONE, false);
|
||||
|
||||
|
@ -2246,7 +2246,7 @@ int RestReplicationHandler::processRestoreCollectionCoordinator (
|
|||
|
||||
if (collectionType == TRI_COL_TYPE_EDGE) {
|
||||
// create a dummy edge index
|
||||
std::unique_ptr<triagens::arango::EdgeIndex> edgeIndex(new triagens::arango::EdgeIndex(new_id_tick, nullptr));
|
||||
auto edgeIndex = std::make_unique<triagens::arango::EdgeIndex>(new_id_tick, nullptr);
|
||||
|
||||
auto idxJson = edgeIndex->toJson(TRI_UNKNOWN_MEM_ZONE, false);
|
||||
|
||||
|
@ -3119,7 +3119,7 @@ void RestReplicationHandler::handleCommandCreateKeys () {
|
|||
}
|
||||
|
||||
// initialize a container with the keys
|
||||
std::unique_ptr<CollectionKeys> keys(new CollectionKeys(_vocbase, col->_name, id, 300.0));
|
||||
auto keys = std::make_unique<CollectionKeys>(_vocbase, col->_name, id, 300.0);
|
||||
|
||||
std::string const idString(std::to_string(keys->id()));
|
||||
|
||||
|
|
|
@ -540,7 +540,7 @@ void RestSimpleHandler::lookupByKeys (TRI_json_t const* json) {
|
|||
for (size_t j = 0; j < length; ++j) {
|
||||
TRI_json_t* exp = TRI_LookupArrayJson(postFilter, j);
|
||||
if (TRI_IsObjectJson(exp)) {
|
||||
std::unique_ptr<traverser::TraverserExpression> expression(new traverser::TraverserExpression(exp));
|
||||
auto expression = std::make_unique<traverser::TraverserExpression>(exp);
|
||||
expressions.emplace_back(expression.get());
|
||||
expression.release();
|
||||
}
|
||||
|
|
|
@ -698,7 +698,7 @@ void TRI_RunNeighborsSearch (
|
|||
|
||||
Json* SingleServerTraversalPath::pathToJson (Transaction* trx,
|
||||
CollectionNameResolver* resolver) {
|
||||
std::unique_ptr<Json> path(new Json(Json::Object, 2));
|
||||
auto path = std::make_unique<Json>(Json::Object, 2);
|
||||
Json vertices(Json::Array);
|
||||
for (size_t i = 0; i < _path.vertices.size(); ++i) {
|
||||
auto v = vertexToJson(trx, resolver, _path.vertices[i]);
|
||||
|
@ -1095,7 +1095,9 @@ TraversalPath* DepthFirstTraverser::next () {
|
|||
// Done traversing
|
||||
return nullptr;
|
||||
}
|
||||
std::unique_ptr<SingleServerTraversalPath> p(new SingleServerTraversalPath(path));
|
||||
|
||||
auto p = std::make_unique<SingleServerTraversalPath>(path);
|
||||
|
||||
if (_opts.shouldPrunePath(p.get())) {
|
||||
_enumerator->prune();
|
||||
return next();
|
||||
|
|
|
@ -582,7 +582,7 @@ static HttpResponse* ResponseV8ToCpp (v8::Isolate* isolate,
|
|||
((int) (TRI_ObjectToDouble(res->Get(ResponseCodeKey))));
|
||||
}
|
||||
|
||||
std::unique_ptr<HttpResponse> response(new HttpResponse(code, compatibility));
|
||||
auto response = std::make_unique<HttpResponse>(code, compatibility);
|
||||
|
||||
TRI_GET_GLOBAL_STRING(ContentTypeKey);
|
||||
if (res->Has(ContentTypeKey)) {
|
||||
|
|
|
@ -887,7 +887,7 @@ static void JS_KeyspaceCreate (const v8::FunctionCallbackInfo<v8::Value>& args)
|
|||
ignoreExisting = TRI_ObjectToBoolean(args[2]);
|
||||
}
|
||||
|
||||
std::unique_ptr<KeySpace> ptr(new KeySpace(static_cast<uint32_t>(size)));
|
||||
auto ptr = std::make_unique<KeySpace>(static_cast<uint32_t>(size));
|
||||
|
||||
auto h = &(static_cast<UserStructures*>(vocbase->_userStructures)->hashes);
|
||||
{
|
||||
|
|
|
@ -1917,14 +1917,14 @@ static ExplicitTransaction* BeginTransaction (TRI_vocbase_t* vocbase,
|
|||
bool waitForSync = false;
|
||||
|
||||
// Start Transaction to collect all parts of the path
|
||||
std::unique_ptr<ExplicitTransaction> trx(new ExplicitTransaction(
|
||||
auto trx = std::make_unique<ExplicitTransaction>(
|
||||
vocbase,
|
||||
readCollections,
|
||||
writeCollections,
|
||||
lockTimeout,
|
||||
waitForSync,
|
||||
embed
|
||||
));
|
||||
);
|
||||
|
||||
int res = trx->begin();
|
||||
|
||||
|
|
|
@ -1113,7 +1113,7 @@ static void CreateCollectionCoordinator (const v8::FunctionCallbackInfo<v8::Valu
|
|||
|
||||
if (collectionType == TRI_COL_TYPE_EDGE) {
|
||||
// create a dummy edge index
|
||||
std::unique_ptr<triagens::arango::EdgeIndex> edgeIndex(new triagens::arango::EdgeIndex(id, nullptr));
|
||||
auto edgeIndex = std::make_unique<triagens::arango::EdgeIndex>(id, nullptr);
|
||||
|
||||
idxJson = edgeIndex->toJson(TRI_UNKNOWN_MEM_ZONE, false);
|
||||
triagens::basics::JsonHelper::toVelocyPack(idxJson.json(), velocy);
|
||||
|
|
|
@ -70,7 +70,8 @@ TraverserExpression::TraverserExpression (TRI_json_t const* json) {
|
|||
_nodeRegister.emplace_back(node);
|
||||
};
|
||||
auto registerString = [&](std::string const& str) -> char const* {
|
||||
std::unique_ptr<std::string> copy(new std::string(str.c_str(), str.size()));
|
||||
auto copy = std::make_unique<std::string>(str.c_str(), str.size());
|
||||
|
||||
_stringRegister.emplace_back(copy.get());
|
||||
auto p = copy.release();
|
||||
TRI_ASSERT(p != nullptr);
|
||||
|
|
|
@ -1207,11 +1207,13 @@ int TRI_replication_applier_t::start (TRI_voc_tick_t initialTick,
|
|||
|
||||
// TODO: prevent restart of the applier with a tick after a shutdown
|
||||
|
||||
std::unique_ptr<triagens::arango::ContinuousSyncer> syncer(new triagens::arango::ContinuousSyncer(_server,
|
||||
_vocbase,
|
||||
&_configuration,
|
||||
initialTick,
|
||||
useTick));
|
||||
auto syncer = std::make_unique<triagens::arango::ContinuousSyncer>(
|
||||
_server,
|
||||
_vocbase,
|
||||
&_configuration,
|
||||
initialTick,
|
||||
useTick
|
||||
);
|
||||
|
||||
// reset error
|
||||
if (_state._lastError._msg != nullptr) {
|
||||
|
|
|
@ -1411,7 +1411,7 @@ TRI_vocbase_t* TRI_CreateInitialVocBase (TRI_server_t* server,
|
|||
char const* name,
|
||||
TRI_vocbase_defaults_t const* defaults) {
|
||||
try {
|
||||
std::unique_ptr<TRI_vocbase_t> vocbase(new TRI_vocbase_t(server, type, path, id, name, defaults));
|
||||
auto vocbase = std::make_unique<TRI_vocbase_t>(server, type, path, id, name, defaults);
|
||||
|
||||
return vocbase.release();
|
||||
}
|
||||
|
|
|
@ -1456,7 +1456,7 @@ namespace triagens {
|
|||
}
|
||||
|
||||
Path* search (VertexId& start, VertexId& end) {
|
||||
std::unique_ptr<Path> res(new Path());
|
||||
auto res = std::make_unique<Path>();
|
||||
// Init
|
||||
if (start == end) {
|
||||
res->vertices.emplace_back(start);
|
||||
|
|
|
@ -667,7 +667,7 @@ bool Utf8Helper::getWords (TRI_vector_string_t*& words,
|
|||
RegexMatcher* Utf8Helper::buildMatcher (std::string const& pattern) {
|
||||
UErrorCode status = U_ZERO_ERROR;
|
||||
|
||||
std::unique_ptr<RegexMatcher> matcher(new RegexMatcher(UnicodeString::fromUTF8(pattern), 0, status));
|
||||
auto matcher = std::make_unique<RegexMatcher>(UnicodeString::fromUTF8(pattern), 0, status);
|
||||
if (U_FAILURE(status)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
|
|
@ -645,13 +645,13 @@ static void OutputMessage (TRI_log_level_e level,
|
|||
}
|
||||
|
||||
if (ThreadedLogging) {
|
||||
std::unique_ptr<log_message_t> msg(new log_message_t(
|
||||
auto msg = std::make_unique<log_message_t>(
|
||||
level,
|
||||
severity,
|
||||
message,
|
||||
length,
|
||||
claimOwnership
|
||||
));
|
||||
);
|
||||
|
||||
try {
|
||||
MUTEX_LOCKER(LogMessageQueueLock);
|
||||
|
|
|
@ -222,7 +222,7 @@ ConnectionManager::SingleServerConnection* ConnectionManager::leaseConnection (s
|
|||
|
||||
if (s == nullptr) {
|
||||
// do not yet have a connections list for this endpoint, so let's create one!
|
||||
std::unique_ptr<ServerConnections> sc(new ServerConnections());
|
||||
auto sc = std::make_unique<ServerConnections>();
|
||||
|
||||
sc->_connections.reserve(16);
|
||||
|
||||
|
@ -295,7 +295,7 @@ ConnectionManager::SingleServerConnection* ConnectionManager::leaseConnection (s
|
|||
|
||||
|
||||
// finally create the SingleServerConnection
|
||||
std::unique_ptr<SingleServerConnection> c(new SingleServerConnection(s, cn.get(), ep.get(), endpoint));
|
||||
auto c = std::make_unique<SingleServerConnection>(s, cn.get(), ep.get(), endpoint);
|
||||
|
||||
// Now put it into our administration:
|
||||
s->addConnection(c.get());
|
||||
|
|
Loading…
Reference in New Issue