1
0
Fork 0

Merge branch 'devel' of https://github.com/arangodb/arangodb into devel

This commit is contained in:
jsteemann 2016-11-01 15:23:49 +01:00
commit 31bbbb454f
14 changed files with 638 additions and 567 deletions

View File

@ -4,6 +4,21 @@ The following list shows in detail which features have been added or improved in
ArangoDB 3.1. ArangoDB 3.1 also contains several bugfixes that are not listed ArangoDB 3.1. ArangoDB 3.1 also contains several bugfixes that are not listed
here. here.
!SECTION SmartGraphs
ArangoDB 3.1 adds a first major enterprise only feature called SmartGraphs.
SmartGraphs form an addition to the already existing graph features and allow to
scale graphs beyond a single machine while keeping almost the same query performance.
The SmartGraph feature is suggested for all graph database use cases that require
a cluster of database servers for what ever reason.
You can either have a graph that is too large to be stored on a single machine only.
Or you can have a small graph, but at the same time need additional data with has to be
sharded and you want to keep all of them in the same envirenment.
Or you simply use the cluster for high-availability.
In all the above cases SmartGraphs will significantly increase the performance of
graph operations.
For more detailed information read [this manual section](../Graphs/SmartGraphs/index.html).
!SECTION Data format !SECTION Data format
The format of the revision values stored in the `_rev` attribute of documents The format of the revision values stored in the `_rev` attribute of documents
@ -153,7 +168,6 @@ AQL query editor in the web interface.
Audit logging has been added, see [Auditing](../Auditing/index.html). Audit logging has been added, see [Auditing](../Auditing/index.html).
!SECTION Client tools !SECTION Client tools
Added option `--skip-lines` for arangoimp Added option `--skip-lines` for arangoimp

View File

@ -439,7 +439,7 @@ bool Inception::estimateRAFTInterval() {
} }
} }
maxmean = 1.0e-2*std::ceil(100*(.15 + 1.0e-3*maxmean)); maxmean = 1.e-3*std::ceil(1.e3*(.1 + 1.0e-3*(maxmean+3*maxstdev)));
LOG_TOPIC(INFO, Logger::AGENCY) LOG_TOPIC(INFO, Logger::AGENCY)
<< "Auto-adapting RAFT timing to: {" << maxmean << "Auto-adapting RAFT timing to: {" << maxmean

View File

@ -50,8 +50,8 @@ Supervision::Supervision()
: arangodb::Thread("Supervision"), : arangodb::Thread("Supervision"),
_agent(nullptr), _agent(nullptr),
_snapshot("Supervision"), _snapshot("Supervision"),
_frequency(5.0), _frequency(5.),
_gracePeriod(15), _gracePeriod(15.),
_jobId(0), _jobId(0),
_jobIdMax(0), _jobIdMax(0),
_selfShutdown(false) {} _selfShutdown(false) {}

View File

@ -161,8 +161,8 @@ class Supervision : public arangodb::Thread {
arangodb::basics::ConditionVariable _cv; /**< @brief Control if thread arangodb::basics::ConditionVariable _cv; /**< @brief Control if thread
should run */ should run */
long _frequency; double _frequency;
long _gracePeriod; double _gracePeriod;
uint64_t _jobId; uint64_t _jobId;
uint64_t _jobIdMax; uint64_t _jobIdMax;

File diff suppressed because it is too large Load Diff

View File

@ -124,7 +124,7 @@ extern int Aqldebug;
union YYSTYPE union YYSTYPE
{ {
#line 19 "Aql/grammar.y" /* yacc.c:1909 */ #line 19 "Aql/grammar.y" /* yacc.c:1915 */
arangodb::aql::AstNode* node; arangodb::aql::AstNode* node;
struct { struct {
@ -134,7 +134,7 @@ union YYSTYPE
bool boolval; bool boolval;
int64_t intval; int64_t intval;
#line 138 "Aql/grammar.hpp" /* yacc.c:1909 */ #line 138 "Aql/grammar.hpp" /* yacc.c:1915 */
}; };
typedef union YYSTYPE YYSTYPE; typedef union YYSTYPE YYSTYPE;

View File

@ -91,7 +91,6 @@ RestStatus RestImportHandler::execute() {
// extract the import type // extract the import type
std::string const& documentType = _request->value("type", found); std::string const& documentType = _request->value("type", found);
/////////////////////////////////////////////////////////////////////////////////
switch (_response->transportType()) { switch (_response->transportType()) {
case Endpoint::TransportType::HTTP: { case Endpoint::TransportType::HTTP: {
if (found && if (found &&
@ -102,8 +101,8 @@ RestStatus RestImportHandler::execute() {
// CSV // CSV
createFromKeyValueList(); createFromKeyValueList();
} }
} break; break;
///////////////////////////////////////////////////////////////////////////////// }
case Endpoint::TransportType::VPP: { case Endpoint::TransportType::VPP: {
if (found && if (found &&
(documentType == "documents" || documentType == "array" || (documentType == "documents" || documentType == "array" ||
@ -113,7 +112,8 @@ RestStatus RestImportHandler::execute() {
generateNotImplemented("ILLEGAL " + IMPORT_PATH); generateNotImplemented("ILLEGAL " + IMPORT_PATH);
createFromKeyValueListVPack(); createFromKeyValueListVPack();
} }
} break; break;
}
} }
///////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////
} break; } break;
@ -171,6 +171,7 @@ std::string RestImportHandler::buildParseError(size_t i,
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @brief process a single VelocyPack document of Object Type /// @brief process a single VelocyPack document of Object Type
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
int RestImportHandler::handleSingleDocument(SingleCollectionTransaction& trx, int RestImportHandler::handleSingleDocument(SingleCollectionTransaction& trx,
RestImportResult& result, RestImportResult& result,
VPackBuilder& babies, VPackBuilder& babies,
@ -192,10 +193,15 @@ int RestImportHandler::handleSingleDocument(SingleCollectionTransaction& trx,
return TRI_ERROR_ARANGO_DOCUMENT_TYPE_INVALID; return TRI_ERROR_ARANGO_DOCUMENT_TYPE_INVALID;
} }
if (!isEdgeCollection) {
babies.add(slice);
return TRI_ERROR_NO_ERROR;
}
// document ok, now import it // document ok, now import it
VPackBuilder newBuilder; VPackBuilder newBuilder;
if (isEdgeCollection) {
// add prefixes to _from and _to // add prefixes to _from and _to
if (!_fromPrefix.empty() || !_toPrefix.empty()) { if (!_fromPrefix.empty() || !_toPrefix.empty()) {
TransactionBuilderLeaser tempBuilder(&trx); TransactionBuilderLeaser tempBuilder(&trx);
@ -258,9 +264,9 @@ int RestImportHandler::handleSingleDocument(SingleCollectionTransaction& trx,
registerError(result, errorMsg); registerError(result, errorMsg);
return TRI_ERROR_ARANGO_INVALID_EDGE_ATTRIBUTE; return TRI_ERROR_ARANGO_INVALID_EDGE_ATTRIBUTE;
} }
}
babies.add(slice); babies.add(slice);
return TRI_ERROR_NO_ERROR; return TRI_ERROR_NO_ERROR;
} }
@ -961,6 +967,10 @@ int RestImportHandler::performImport(SingleCollectionTransaction& trx,
} }
} }
if (opResult.failed() && res == TRI_ERROR_NO_ERROR) {
res = opResult.code;
}
return res; return res;
} }

View File

@ -41,7 +41,7 @@ $navbar-size: 150px;
.arangodbLogo { .arangodbLogo {
height: auto; height: auto;
margin-left: 6px; margin-left: 3px;
margin-top: 15px; margin-top: 15px;
width: $navbar-size - 6px; width: $navbar-size - 6px;
} }
@ -171,7 +171,7 @@ $navbar-size: 150px;
#communityLabel { #communityLabel {
color: $c-white; color: $c-white;
font-family: Roboto,sans-serif; font-family: Roboto,sans-serif;
font-size: 7.3pt; font-size: 7pt;
font-weight: 100; font-weight: 100;
left: 38px; left: 38px;
letter-spacing: 1px; letter-spacing: 1px;

View File

@ -52,6 +52,15 @@ module.exports =
return this.service.router.use(path, router, name); return this.service.router.use(path, router, name);
} }
reverse (routeName, params, suffix) {
return this.service.tree.reverse(
this.service.router._routes,
routeName,
params,
suffix
);
}
registerType (type, def) { registerType (type, def) {
assert( assert(
( (

View File

@ -128,7 +128,7 @@ module.exports =
const req = new SyntheticRequest(rawReq, this.context); const req = new SyntheticRequest(rawReq, this.context);
const res = new SyntheticResponse(rawRes, this.context); const res = new SyntheticResponse(rawRes, this.context);
dispatch(route, req, res); dispatch(route, req, res, this);
return true; return true;
} }
@ -176,6 +176,60 @@ module.exports =
} }
return paths; return paths;
} }
reverse (route, routeName, params, suffix) {
if (typeof params === 'string') {
suffix = params;
params = undefined;
}
const reversedRoute = reverse(route, routeName);
if (!reversedRoute) {
throw new Error(`Route could not be resolved: "${routeName}"`);
}
params = Object.assign({}, params);
const parts = [];
for (const item of reversedRoute) {
const context = item.router || item.endpoint || item.middleware;
let i = 0;
for (let token of context._pathTokens) {
if (token === tokenize.PARAM) {
const name = context._pathParamNames[i];
if (params.hasOwnProperty(name)) {
if (Array.isArray(params[name])) {
if (!params[name].length) {
throw new Error(`Not enough values for parameter "${name}"`);
}
token = params[name][0];
params[name] = params[name].slice(1);
if (!params[name].length) {
delete params[name];
}
} else {
token = String(params[name]);
delete params[name];
}
} else {
throw new Error(`Missing value for parameter "${name}"`);
}
i++;
}
if (typeof token === 'string') {
parts.push(token);
}
}
}
const query = querystring.encode(params);
let path = '/' + parts.join('/');
if (suffix) {
path += '/' + suffix;
}
if (query) {
path += '?' + query;
}
return path;
}
}; };
function applyPathParams (route) { function applyPathParams (route) {
@ -204,7 +258,7 @@ function applyPathParams (route) {
} }
} }
function dispatch (route, req, res) { function dispatch (route, req, res, tree) {
let pathParams = {}; let pathParams = {};
let queryParams = Object.assign({}, req.queryParams); let queryParams = Object.assign({}, req.queryParams);
let headers = Object.assign({}, req.headers); let headers = Object.assign({}, req.headers);
@ -311,58 +365,8 @@ function dispatch (route, req, res) {
req.path = joinPath(req.path, req.suffix); req.path = joinPath(req.path, req.suffix);
} }
res._responses = item._responses; res._responses = item._responses;
req.reverse = function (routeName, params, suffix) { req.reverse = function (...args) {
if (typeof params === 'string') { return tree.reverse(route.slice(0, i), ...args);
suffix = params;
params = undefined;
}
const reversedRoute = reverse(route.slice(0, i), routeName);
if (!reversedRoute) {
throw new Error(`Route could not be resolved: "${routeName}"`);
}
params = Object.assign({}, params);
const parts = [];
for (const item of reversedRoute) {
const context = item.router || item.endpoint || item.middleware;
let i = 0;
for (let token of context._pathTokens) {
if (token === tokenize.PARAM) {
const name = context._pathParamNames[i];
if (params.hasOwnProperty(name)) {
if (Array.isArray(params[name])) {
if (!params[name].length) {
throw new Error(`Not enough values for parameter "${name}"`);
}
token = params[name][0];
params[name] = params[name].slice(1);
if (!params[name].length) {
delete params[name];
}
} else {
token = String(params[name]);
delete params[name];
}
} else {
throw new Error(`Missing value for parameter "${name}"`);
}
i++;
}
if (typeof token === 'string') {
parts.push(token);
}
}
}
const query = querystring.encode(params);
let path = '/' + parts.join('/');
if (suffix) {
path += '/' + suffix;
}
if (query) {
path += '?' + query;
}
return path;
}; };
if (item.endpoint || item.router) { if (item.endpoint || item.router) {
@ -507,7 +511,6 @@ function search (router, path, visited) {
const child = router._namedRoutes.get(name); const child = router._namedRoutes.get(name);
if (child.router) { if (child.router) {
// traverse named child router // traverse named child router
console.log(visited.indexOf(child));
if (tail.length && visited.indexOf(child) === -1) { if (tail.length && visited.indexOf(child) === -1) {
visited.push(child); visited.push(child);
const result = search(child.router, tail, visited); const result = search(child.router, tail, visited);
@ -516,17 +519,14 @@ function search (router, path, visited) {
return result; return result;
} }
} }
} else { } else if (!tail.length) {
// found named route // found named route
if (!tail.length) {
return [{endpoint: child}]; return [{endpoint: child}];
} }
} }
}
// traverse anonymous child routers // traverse anonymous child routers
for (const child of router._routes) { for (const child of router._routes) {
console.log(visited.indexOf(child));
if (child.router && visited.indexOf(child) === -1) { if (child.router && visited.indexOf(child) === -1) {
visited.push(child); visited.push(child);
const result = search(child.router, tail, visited); const result = search(child.router, tail, visited);

View File

@ -214,10 +214,10 @@ module.exports =
buildRoutes () { buildRoutes () {
const service = this; const service = this;
const tree = new Tree(this.main.context, this.router); this.tree = new Tree(this.main.context, this.router);
let paths = []; let paths = [];
try { try {
paths = tree.buildSwaggerPaths(); paths = this.tree.buildSwaggerPaths();
} catch (e) { } catch (e) {
console.errorLines(e.stack); console.errorLines(e.stack);
let err = e.cause; let err = e.cause;
@ -251,7 +251,7 @@ module.exports =
let handled = true; let handled = true;
try { try {
handled = tree.dispatch(req, res); handled = service.tree.dispatch(req, res);
} catch (e) { } catch (e) {
const logLevel = ( const logLevel = (
!e.statusCode ? 'error' : // Unhandled !e.statusCode ? 'error' : // Unhandled

32
lib/Basics/StringRef.cpp Normal file
View File

@ -0,0 +1,32 @@
////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Jan Steemann
////////////////////////////////////////////////////////////////////////////////
#include "StringRef.h"
#include <iostream>
std::ostream& operator<<(std::ostream& stream, arangodb::StringRef const& ref) {
stream << std::string(ref.data(), ref.length());
return stream;
}

View File

@ -30,6 +30,8 @@
#include <velocypack/Slice.h> #include <velocypack/Slice.h>
#include <velocypack/Value.h> #include <velocypack/Value.h>
#include <iosfwd>
namespace arangodb { namespace arangodb {
/// @brief a struct describing a C character array /// @brief a struct describing a C character array
@ -174,6 +176,8 @@ class StringRef {
} }
std::ostream& operator<<(std::ostream&, arangodb::StringRef const&);
inline bool operator==(arangodb::StringRef const& lhs, arangodb::StringRef const& rhs) { inline bool operator==(arangodb::StringRef const& lhs, arangodb::StringRef const& rhs) {
return (lhs.size() == rhs.size() && memcmp(lhs.data(), rhs.data(), lhs.size()) == 0); return (lhs.size() == rhs.size() && memcmp(lhs.data(), rhs.data(), lhs.size()) == 0);
} }
@ -227,4 +231,5 @@ struct equal_to<arangodb::StringRef> {
} }
#endif #endif

View File

@ -140,6 +140,7 @@ add_library(${LIB_ARANGO} STATIC
Basics/StaticStrings.cpp Basics/StaticStrings.cpp
Basics/StringBuffer.cpp Basics/StringBuffer.cpp
Basics/StringHeap.cpp Basics/StringHeap.cpp
Basics/StringRef.cpp
Basics/StringUtils.cpp Basics/StringUtils.cpp
Basics/Thread.cpp Basics/Thread.cpp
Basics/ThreadPool.cpp Basics/ThreadPool.cpp