mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of https://github.com/arangodb/arangodb into devel
This commit is contained in:
commit
31bbbb454f
|
@ -4,6 +4,21 @@ The following list shows in detail which features have been added or improved in
|
|||
ArangoDB 3.1. ArangoDB 3.1 also contains several bugfixes that are not listed
|
||||
here.
|
||||
|
||||
!SECTION SmartGraphs
|
||||
|
||||
ArangoDB 3.1 adds a first major enterprise only feature called SmartGraphs.
|
||||
SmartGraphs form an addition to the already existing graph features and allow to
|
||||
scale graphs beyond a single machine while keeping almost the same query performance.
|
||||
The SmartGraph feature is suggested for all graph database use cases that require
|
||||
a cluster of database servers for what ever reason.
|
||||
You can either have a graph that is too large to be stored on a single machine only.
|
||||
Or you can have a small graph, but at the same time need additional data with has to be
|
||||
sharded and you want to keep all of them in the same envirenment.
|
||||
Or you simply use the cluster for high-availability.
|
||||
In all the above cases SmartGraphs will significantly increase the performance of
|
||||
graph operations.
|
||||
For more detailed information read [this manual section](../Graphs/SmartGraphs/index.html).
|
||||
|
||||
!SECTION Data format
|
||||
|
||||
The format of the revision values stored in the `_rev` attribute of documents
|
||||
|
@ -153,7 +168,6 @@ AQL query editor in the web interface.
|
|||
|
||||
Audit logging has been added, see [Auditing](../Auditing/index.html).
|
||||
|
||||
|
||||
!SECTION Client tools
|
||||
|
||||
Added option `--skip-lines` for arangoimp
|
||||
|
|
|
@ -439,7 +439,7 @@ bool Inception::estimateRAFTInterval() {
|
|||
}
|
||||
}
|
||||
|
||||
maxmean = 1.0e-2*std::ceil(100*(.15 + 1.0e-3*maxmean));
|
||||
maxmean = 1.e-3*std::ceil(1.e3*(.1 + 1.0e-3*(maxmean+3*maxstdev)));
|
||||
|
||||
LOG_TOPIC(INFO, Logger::AGENCY)
|
||||
<< "Auto-adapting RAFT timing to: {" << maxmean
|
||||
|
|
|
@ -50,8 +50,8 @@ Supervision::Supervision()
|
|||
: arangodb::Thread("Supervision"),
|
||||
_agent(nullptr),
|
||||
_snapshot("Supervision"),
|
||||
_frequency(5.0),
|
||||
_gracePeriod(15),
|
||||
_frequency(5.),
|
||||
_gracePeriod(15.),
|
||||
_jobId(0),
|
||||
_jobIdMax(0),
|
||||
_selfShutdown(false) {}
|
||||
|
|
|
@ -161,8 +161,8 @@ class Supervision : public arangodb::Thread {
|
|||
arangodb::basics::ConditionVariable _cv; /**< @brief Control if thread
|
||||
should run */
|
||||
|
||||
long _frequency;
|
||||
long _gracePeriod;
|
||||
double _frequency;
|
||||
double _gracePeriod;
|
||||
uint64_t _jobId;
|
||||
uint64_t _jobIdMax;
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -124,7 +124,7 @@ extern int Aqldebug;
|
|||
|
||||
union YYSTYPE
|
||||
{
|
||||
#line 19 "Aql/grammar.y" /* yacc.c:1909 */
|
||||
#line 19 "Aql/grammar.y" /* yacc.c:1915 */
|
||||
|
||||
arangodb::aql::AstNode* node;
|
||||
struct {
|
||||
|
@ -134,7 +134,7 @@ union YYSTYPE
|
|||
bool boolval;
|
||||
int64_t intval;
|
||||
|
||||
#line 138 "Aql/grammar.hpp" /* yacc.c:1909 */
|
||||
#line 138 "Aql/grammar.hpp" /* yacc.c:1915 */
|
||||
};
|
||||
|
||||
typedef union YYSTYPE YYSTYPE;
|
||||
|
|
|
@ -91,7 +91,6 @@ RestStatus RestImportHandler::execute() {
|
|||
// extract the import type
|
||||
std::string const& documentType = _request->value("type", found);
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////
|
||||
switch (_response->transportType()) {
|
||||
case Endpoint::TransportType::HTTP: {
|
||||
if (found &&
|
||||
|
@ -102,8 +101,8 @@ RestStatus RestImportHandler::execute() {
|
|||
// CSV
|
||||
createFromKeyValueList();
|
||||
}
|
||||
} break;
|
||||
/////////////////////////////////////////////////////////////////////////////////
|
||||
break;
|
||||
}
|
||||
case Endpoint::TransportType::VPP: {
|
||||
if (found &&
|
||||
(documentType == "documents" || documentType == "array" ||
|
||||
|
@ -113,7 +112,8 @@ RestStatus RestImportHandler::execute() {
|
|||
generateNotImplemented("ILLEGAL " + IMPORT_PATH);
|
||||
createFromKeyValueListVPack();
|
||||
}
|
||||
} break;
|
||||
break;
|
||||
}
|
||||
}
|
||||
/////////////////////////////////////////////////////////////////////////////////
|
||||
} break;
|
||||
|
@ -171,6 +171,7 @@ std::string RestImportHandler::buildParseError(size_t i,
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief process a single VelocyPack document of Object Type
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int RestImportHandler::handleSingleDocument(SingleCollectionTransaction& trx,
|
||||
RestImportResult& result,
|
||||
VPackBuilder& babies,
|
||||
|
@ -191,76 +192,81 @@ int RestImportHandler::handleSingleDocument(SingleCollectionTransaction& trx,
|
|||
registerError(result, errorMsg);
|
||||
return TRI_ERROR_ARANGO_DOCUMENT_TYPE_INVALID;
|
||||
}
|
||||
|
||||
if (!isEdgeCollection) {
|
||||
babies.add(slice);
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
|
||||
// document ok, now import it
|
||||
VPackBuilder newBuilder;
|
||||
|
||||
if (isEdgeCollection) {
|
||||
// add prefixes to _from and _to
|
||||
if (!_fromPrefix.empty() || !_toPrefix.empty()) {
|
||||
TransactionBuilderLeaser tempBuilder(&trx);
|
||||
// add prefixes to _from and _to
|
||||
if (!_fromPrefix.empty() || !_toPrefix.empty()) {
|
||||
TransactionBuilderLeaser tempBuilder(&trx);
|
||||
|
||||
tempBuilder->openObject();
|
||||
if (!_fromPrefix.empty()) {
|
||||
VPackSlice from = slice.get(StaticStrings::FromString);
|
||||
if (from.isString()) {
|
||||
std::string f = from.copyString();
|
||||
if (f.find('/') == std::string::npos) {
|
||||
tempBuilder->add(StaticStrings::FromString,
|
||||
VPackValue(_fromPrefix + f));
|
||||
}
|
||||
} else if (from.isInteger()) {
|
||||
uint64_t f = from.getNumber<uint64_t>();
|
||||
tempBuilder->openObject();
|
||||
if (!_fromPrefix.empty()) {
|
||||
VPackSlice from = slice.get(StaticStrings::FromString);
|
||||
if (from.isString()) {
|
||||
std::string f = from.copyString();
|
||||
if (f.find('/') == std::string::npos) {
|
||||
tempBuilder->add(StaticStrings::FromString,
|
||||
VPackValue(_fromPrefix + std::to_string(f)));
|
||||
VPackValue(_fromPrefix + f));
|
||||
}
|
||||
}
|
||||
if (!_toPrefix.empty()) {
|
||||
VPackSlice to = slice.get(StaticStrings::ToString);
|
||||
if (to.isString()) {
|
||||
std::string t = to.copyString();
|
||||
if (t.find('/') == std::string::npos) {
|
||||
tempBuilder->add(StaticStrings::ToString,
|
||||
VPackValue(_toPrefix + t));
|
||||
}
|
||||
} else if (to.isInteger()) {
|
||||
uint64_t t = to.getNumber<uint64_t>();
|
||||
tempBuilder->add(StaticStrings::ToString,
|
||||
VPackValue(_toPrefix + std::to_string(t)));
|
||||
}
|
||||
}
|
||||
tempBuilder->close();
|
||||
|
||||
if (tempBuilder->slice().length() > 0) {
|
||||
newBuilder =
|
||||
VPackCollection::merge(slice, tempBuilder->slice(), false, false);
|
||||
slice = newBuilder.slice();
|
||||
} else if (from.isInteger()) {
|
||||
uint64_t f = from.getNumber<uint64_t>();
|
||||
tempBuilder->add(StaticStrings::FromString,
|
||||
VPackValue(_fromPrefix + std::to_string(f)));
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
arangodb::basics::VelocyPackHelper::checkAndGetStringValue(
|
||||
slice, StaticStrings::FromString);
|
||||
arangodb::basics::VelocyPackHelper::checkAndGetStringValue(
|
||||
slice, StaticStrings::ToString);
|
||||
} catch (arangodb::basics::Exception const&) {
|
||||
std::string part = VPackDumper::toString(slice);
|
||||
if (part.size() > 255) {
|
||||
// UTF-8 chars in string will be escaped so we can truncate it at any
|
||||
// point
|
||||
part = part.substr(0, 255) + "...";
|
||||
if (!_toPrefix.empty()) {
|
||||
VPackSlice to = slice.get(StaticStrings::ToString);
|
||||
if (to.isString()) {
|
||||
std::string t = to.copyString();
|
||||
if (t.find('/') == std::string::npos) {
|
||||
tempBuilder->add(StaticStrings::ToString,
|
||||
VPackValue(_toPrefix + t));
|
||||
}
|
||||
} else if (to.isInteger()) {
|
||||
uint64_t t = to.getNumber<uint64_t>();
|
||||
tempBuilder->add(StaticStrings::ToString,
|
||||
VPackValue(_toPrefix + std::to_string(t)));
|
||||
}
|
||||
}
|
||||
tempBuilder->close();
|
||||
|
||||
std::string errorMsg =
|
||||
positionize(i) +
|
||||
"missing '_from' or '_to' attribute, offending document: " + part;
|
||||
|
||||
registerError(result, errorMsg);
|
||||
return TRI_ERROR_ARANGO_INVALID_EDGE_ATTRIBUTE;
|
||||
if (tempBuilder->slice().length() > 0) {
|
||||
newBuilder =
|
||||
VPackCollection::merge(slice, tempBuilder->slice(), false, false);
|
||||
slice = newBuilder.slice();
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
arangodb::basics::VelocyPackHelper::checkAndGetStringValue(
|
||||
slice, StaticStrings::FromString);
|
||||
arangodb::basics::VelocyPackHelper::checkAndGetStringValue(
|
||||
slice, StaticStrings::ToString);
|
||||
} catch (arangodb::basics::Exception const&) {
|
||||
std::string part = VPackDumper::toString(slice);
|
||||
if (part.size() > 255) {
|
||||
// UTF-8 chars in string will be escaped so we can truncate it at any
|
||||
// point
|
||||
part = part.substr(0, 255) + "...";
|
||||
}
|
||||
|
||||
std::string errorMsg =
|
||||
positionize(i) +
|
||||
"missing '_from' or '_to' attribute, offending document: " + part;
|
||||
|
||||
registerError(result, errorMsg);
|
||||
return TRI_ERROR_ARANGO_INVALID_EDGE_ATTRIBUTE;
|
||||
}
|
||||
|
||||
babies.add(slice);
|
||||
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
|
@ -960,6 +966,10 @@ int RestImportHandler::performImport(SingleCollectionTransaction& trx,
|
|||
++pos;
|
||||
}
|
||||
}
|
||||
|
||||
if (opResult.failed() && res == TRI_ERROR_NO_ERROR) {
|
||||
res = opResult.code;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ $navbar-size: 150px;
|
|||
|
||||
.arangodbLogo {
|
||||
height: auto;
|
||||
margin-left: 6px;
|
||||
margin-left: 3px;
|
||||
margin-top: 15px;
|
||||
width: $navbar-size - 6px;
|
||||
}
|
||||
|
@ -171,7 +171,7 @@ $navbar-size: 150px;
|
|||
#communityLabel {
|
||||
color: $c-white;
|
||||
font-family: Roboto,sans-serif;
|
||||
font-size: 7.3pt;
|
||||
font-size: 7pt;
|
||||
font-weight: 100;
|
||||
left: 38px;
|
||||
letter-spacing: 1px;
|
||||
|
|
|
@ -52,6 +52,15 @@ module.exports =
|
|||
return this.service.router.use(path, router, name);
|
||||
}
|
||||
|
||||
reverse (routeName, params, suffix) {
|
||||
return this.service.tree.reverse(
|
||||
this.service.router._routes,
|
||||
routeName,
|
||||
params,
|
||||
suffix
|
||||
);
|
||||
}
|
||||
|
||||
registerType (type, def) {
|
||||
assert(
|
||||
(
|
||||
|
|
|
@ -128,7 +128,7 @@ module.exports =
|
|||
|
||||
const req = new SyntheticRequest(rawReq, this.context);
|
||||
const res = new SyntheticResponse(rawRes, this.context);
|
||||
dispatch(route, req, res);
|
||||
dispatch(route, req, res, this);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -176,6 +176,60 @@ module.exports =
|
|||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
reverse (route, routeName, params, suffix) {
|
||||
if (typeof params === 'string') {
|
||||
suffix = params;
|
||||
params = undefined;
|
||||
}
|
||||
const reversedRoute = reverse(route, routeName);
|
||||
if (!reversedRoute) {
|
||||
throw new Error(`Route could not be resolved: "${routeName}"`);
|
||||
}
|
||||
|
||||
params = Object.assign({}, params);
|
||||
const parts = [];
|
||||
for (const item of reversedRoute) {
|
||||
const context = item.router || item.endpoint || item.middleware;
|
||||
let i = 0;
|
||||
for (let token of context._pathTokens) {
|
||||
if (token === tokenize.PARAM) {
|
||||
const name = context._pathParamNames[i];
|
||||
if (params.hasOwnProperty(name)) {
|
||||
if (Array.isArray(params[name])) {
|
||||
if (!params[name].length) {
|
||||
throw new Error(`Not enough values for parameter "${name}"`);
|
||||
}
|
||||
token = params[name][0];
|
||||
params[name] = params[name].slice(1);
|
||||
if (!params[name].length) {
|
||||
delete params[name];
|
||||
}
|
||||
} else {
|
||||
token = String(params[name]);
|
||||
delete params[name];
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Missing value for parameter "${name}"`);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
if (typeof token === 'string') {
|
||||
parts.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const query = querystring.encode(params);
|
||||
let path = '/' + parts.join('/');
|
||||
if (suffix) {
|
||||
path += '/' + suffix;
|
||||
}
|
||||
if (query) {
|
||||
path += '?' + query;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
};
|
||||
|
||||
function applyPathParams (route) {
|
||||
|
@ -204,7 +258,7 @@ function applyPathParams (route) {
|
|||
}
|
||||
}
|
||||
|
||||
function dispatch (route, req, res) {
|
||||
function dispatch (route, req, res, tree) {
|
||||
let pathParams = {};
|
||||
let queryParams = Object.assign({}, req.queryParams);
|
||||
let headers = Object.assign({}, req.headers);
|
||||
|
@ -311,58 +365,8 @@ function dispatch (route, req, res) {
|
|||
req.path = joinPath(req.path, req.suffix);
|
||||
}
|
||||
res._responses = item._responses;
|
||||
req.reverse = function (routeName, params, suffix) {
|
||||
if (typeof params === 'string') {
|
||||
suffix = params;
|
||||
params = undefined;
|
||||
}
|
||||
const reversedRoute = reverse(route.slice(0, i), routeName);
|
||||
if (!reversedRoute) {
|
||||
throw new Error(`Route could not be resolved: "${routeName}"`);
|
||||
}
|
||||
|
||||
params = Object.assign({}, params);
|
||||
const parts = [];
|
||||
for (const item of reversedRoute) {
|
||||
const context = item.router || item.endpoint || item.middleware;
|
||||
let i = 0;
|
||||
for (let token of context._pathTokens) {
|
||||
if (token === tokenize.PARAM) {
|
||||
const name = context._pathParamNames[i];
|
||||
if (params.hasOwnProperty(name)) {
|
||||
if (Array.isArray(params[name])) {
|
||||
if (!params[name].length) {
|
||||
throw new Error(`Not enough values for parameter "${name}"`);
|
||||
}
|
||||
token = params[name][0];
|
||||
params[name] = params[name].slice(1);
|
||||
if (!params[name].length) {
|
||||
delete params[name];
|
||||
}
|
||||
} else {
|
||||
token = String(params[name]);
|
||||
delete params[name];
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Missing value for parameter "${name}"`);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
if (typeof token === 'string') {
|
||||
parts.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const query = querystring.encode(params);
|
||||
let path = '/' + parts.join('/');
|
||||
if (suffix) {
|
||||
path += '/' + suffix;
|
||||
}
|
||||
if (query) {
|
||||
path += '?' + query;
|
||||
}
|
||||
return path;
|
||||
req.reverse = function (...args) {
|
||||
return tree.reverse(route.slice(0, i), ...args);
|
||||
};
|
||||
|
||||
if (item.endpoint || item.router) {
|
||||
|
@ -507,7 +511,6 @@ function search (router, path, visited) {
|
|||
const child = router._namedRoutes.get(name);
|
||||
if (child.router) {
|
||||
// traverse named child router
|
||||
console.log(visited.indexOf(child));
|
||||
if (tail.length && visited.indexOf(child) === -1) {
|
||||
visited.push(child);
|
||||
const result = search(child.router, tail, visited);
|
||||
|
@ -516,17 +519,14 @@ function search (router, path, visited) {
|
|||
return result;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
} else if (!tail.length) {
|
||||
// found named route
|
||||
if (!tail.length) {
|
||||
return [{endpoint: child}];
|
||||
}
|
||||
return [{endpoint: child}];
|
||||
}
|
||||
}
|
||||
|
||||
// traverse anonymous child routers
|
||||
for (const child of router._routes) {
|
||||
console.log(visited.indexOf(child));
|
||||
if (child.router && visited.indexOf(child) === -1) {
|
||||
visited.push(child);
|
||||
const result = search(child.router, tail, visited);
|
||||
|
|
|
@ -214,10 +214,10 @@ module.exports =
|
|||
|
||||
buildRoutes () {
|
||||
const service = this;
|
||||
const tree = new Tree(this.main.context, this.router);
|
||||
this.tree = new Tree(this.main.context, this.router);
|
||||
let paths = [];
|
||||
try {
|
||||
paths = tree.buildSwaggerPaths();
|
||||
paths = this.tree.buildSwaggerPaths();
|
||||
} catch (e) {
|
||||
console.errorLines(e.stack);
|
||||
let err = e.cause;
|
||||
|
@ -251,7 +251,7 @@ module.exports =
|
|||
let handled = true;
|
||||
|
||||
try {
|
||||
handled = tree.dispatch(req, res);
|
||||
handled = service.tree.dispatch(req, res);
|
||||
} catch (e) {
|
||||
const logLevel = (
|
||||
!e.statusCode ? 'error' : // Unhandled
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany
|
||||
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Steemann
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#include "StringRef.h"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
std::ostream& operator<<(std::ostream& stream, arangodb::StringRef const& ref) {
|
||||
stream << std::string(ref.data(), ref.length());
|
||||
return stream;
|
||||
}
|
||||
|
|
@ -30,6 +30,8 @@
|
|||
#include <velocypack/Slice.h>
|
||||
#include <velocypack/Value.h>
|
||||
|
||||
#include <iosfwd>
|
||||
|
||||
namespace arangodb {
|
||||
|
||||
/// @brief a struct describing a C character array
|
||||
|
@ -174,6 +176,8 @@ class StringRef {
|
|||
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream&, arangodb::StringRef const&);
|
||||
|
||||
inline bool operator==(arangodb::StringRef const& lhs, arangodb::StringRef const& rhs) {
|
||||
return (lhs.size() == rhs.size() && memcmp(lhs.data(), rhs.data(), lhs.size()) == 0);
|
||||
}
|
||||
|
@ -227,4 +231,5 @@ struct equal_to<arangodb::StringRef> {
|
|||
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
|
|
@ -140,6 +140,7 @@ add_library(${LIB_ARANGO} STATIC
|
|||
Basics/StaticStrings.cpp
|
||||
Basics/StringBuffer.cpp
|
||||
Basics/StringHeap.cpp
|
||||
Basics/StringRef.cpp
|
||||
Basics/StringUtils.cpp
|
||||
Basics/Thread.cpp
|
||||
Basics/ThreadPool.cpp
|
||||
|
|
Loading…
Reference in New Issue