1
0
Fork 0

Merge branch 'devel' into obi-velocystream-try-merge-devel

* devel: (24 commits)
  minor fixes
  fix effect that on second install we will complain about mismatching passwords
  only test for cleaning up 3rdparty if we're parametrized to do so.
  fix CMake so it finds the PythonInterpreter even with oder versions
  hexify corrupted markers
  Fix comments
  fixed cluster start
  issue #2022: double max allowed request body size, cap --batch-size value in arangoimp
  fixed issue #2023: added replicationFactor to docs
  improve the perf script
  fix perfsetupscript
  clean up perf script
  add SYSTEM flag to boost includes to avoid warnings
  Adding Foxx access to agency
  Adding Foxx access to agency
  fix compile warning
  Add missing windows library.
  fix windows compile problems.
  Fix syntax error in windows client installer.
  If we have relative paths that are working, make them absolute so they still work after CWD's of arangod
  ...

Conflicts:
	arangod/Agency/AgentConfiguration.cpp
	arangod/GeneralServer/HttpCommTask.cpp
	arangod/RestServer/DatabaseFeature.cpp
	arangod/VocBase/datafile.cpp
This commit is contained in:
Jan Christoph Uhde 2016-08-30 22:04:31 +02:00
commit ed111a39a1
29 changed files with 526 additions and 251 deletions

View File

@ -321,7 +321,7 @@ if (MSVC)
#http://lists.boost.org/boost-users/2016/04/85968.php
add_definitions("-D_ENABLE_ATOMIC_ALIGNMENT_FIX")
set(MSVC_LIBS crypt32.lib;WINMM.LIB;Ws2_32.lib)
set(MSVC_LIBS Shlwapi.lib;crypt32.lib;WINMM.LIB;Ws2_32.lib)
set(CMAKE_EXE_LINKER_FLAGS
"${CMAKE_EXE_LINKER_FLAGS} /SUBSYSTEM:CONSOLE /SAFESEH:NO /MACHINE:x64 /ignore:4099 ${BASE_LD_FLAGS}"
@ -708,7 +708,7 @@ if (NOT USE_BOOST_UNITTESTS)
message(STATUS "BOOST unit-tests are disabled")
endif ()
include_directories(${Boost_INCLUDE_DIR})
include_directories(SYSTEM ${Boost_INCLUDE_DIR})
add_definitions(-DARANGODB_BOOST_VERSION=\"${Boost_VERSION}\")
################################################################################

View File

@ -94,8 +94,19 @@ and the hash value is used to determine the target shard.
**Note**: Values of shard key attributes cannot be changed once set.
This option is meaningless in a single server setup.
@RESTBODYPARAM{replicationFactor,integer,optional,int64}
(The default is *1*): in a cluster, this attribute determines how many copies
of each shard are kept on different DBServers. The value 1 means that only one
copy (no synchronous replication) is kept. A value of k means that k-1 replicas
are kept. Any two copies reside on different DBServers. Replication between them is
synchronous, that is, every write operation to the "leader" copy will be replicated
to all "follower" replicas, before the write operation is reported successful.
If a server fails, this is detected automatically and one of the servers holding
copies take over, usually without an error being reported.
@RESTDESCRIPTION
Creates an new collection with a given name. The request must contain an
Creates a new collection with a given name. The request must contain an
object with the following attributes.

View File

@ -101,16 +101,6 @@ if test -f last_compiled_version.sha; then
fi
COMPILE_MATTERS="3rdParty"
CLEAN_IT=1
if test -n "$LASTREV"; then
lines=`git diff ${LASTREV}: ${COMPILE_MATTERS} | wc -l`
if test $lines -eq 0; then
echo "no relevant changes, no need for full recompile"
CLEAN_IT=0
fi
fi
# setup make options
if test -z "${CXX}"; then
@ -193,6 +183,8 @@ case "$1" in
;;
esac
CLEAN_IT=0
while [ $# -gt 0 ]; do
case "$1" in
@ -304,6 +296,10 @@ while [ $# -gt 0 ]; do
shift
;;
--checkCleanBuild)
CLEAN_IT=1
shift
;;
*)
echo "Unknown option: $1"
exit 1
@ -311,6 +307,18 @@ while [ $# -gt 0 ]; do
esac
done
if test -n "$LASTREV"; then
lines=`git diff ${LASTREV}: ${COMPILE_MATTERS} | wc -l`
if test $lines -eq 0; then
echo "no relevant changes, no need for full recompile"
CLEAN_IT=0
fi
fi
if [ "$GCC5" == 1 ]; then
CC=/usr/bin/gcc-5
CXX=/usr/bin/g++-5

View File

@ -68,10 +68,6 @@ Function disableBackButton
EnableWindow $0 0
FunctionEnd
SetShellVarContext all
FunctionEnd
!include Sections.nsh
;--- Component support macros: ---

View File

@ -15,8 +15,9 @@ if [ "$1" = "configure" -a -z "$2" ]; then
/usr/sbin/arango-init-database \
--uid arangodb --gid arangodb || true
fi
db_set arangodb3/password_again ""
db_set arangodb3/password ""
db_go
fi
# check if we should upgrade the database directory

View File

@ -476,7 +476,7 @@ bool config_t::merge(VPackSlice const& conf) {
ss << "Min RAFT interval: ";
if (_minPing == 0) { // Command line beats persistence
if (conf.hasKey(minPingStr)) {
_minPing = conf.get(minPingStr).getNumericValue<double>();
_minPing = conf.get(minPingStr).getDouble();
ss << _minPing << " (persisted)";
} else {
_minPing = 0.5;
@ -491,7 +491,7 @@ bool config_t::merge(VPackSlice const& conf) {
ss << "Max RAFT interval: ";
if (_maxPing == 0) { // Command line beats persistence
if (conf.hasKey(maxPingStr)) {
_maxPing = conf.get(maxPingStr).getNumericValue<double>();
_maxPing = conf.get(maxPingStr).getDouble();
ss << _maxPing << " (persisted)";
} else {
_maxPing = 2.5;

View File

@ -40,14 +40,32 @@ using namespace arangodb::application_features;
using namespace arangodb::basics;
using namespace arangodb::consensus;
static void JS_LeadingVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
static void JS_EnabledAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
v8::Isolate* isolate = args.GetIsolate();
TRI_V8_TRY_CATCH_BEGIN(isolate);
v8::HandleScope scope(isolate);
try {
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
TRI_V8_RETURN_TRUE();
} catch (std::exception const& e) {
TRI_V8_RETURN_FALSE();
}
TRI_V8_TRY_CATCH_END
}
static void JS_LeadingAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
TRI_V8_TRY_CATCH_BEGIN(isolate);
v8::HandleScope scope(isolate);
Agent* agent = nullptr;
try {
AgencyFeature* feature =
ApplicationServer::getEnabledFeature<AgencyFeature>("AgencyFeature");
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
agent = feature->agent();
} catch (std::exception const& e) {
@ -58,20 +76,112 @@ static void JS_LeadingVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
v8::Handle<v8::Object> r = v8::Object::New(isolate);
r->Set(TRI_V8_ASCII_STRING("leading"),
v8::Boolean::New(isolate, agent->leading()));
TRI_V8_RETURN(r);
TRI_V8_TRY_CATCH_END
}
static void JS_ReadVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
static void JS_ReadAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
TRI_V8_TRY_CATCH_BEGIN(isolate);
v8::HandleScope scope(isolate);
Agent* agent = nullptr;
try {
AgencyFeature* feature =
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
agent = feature->agent();
} catch (std::exception const& e) {
TRI_V8_THROW_EXCEPTION_MESSAGE(
TRI_ERROR_INTERNAL,
std::string("couldn't access agency feature: ") + e.what());
}
query_t query = std::make_shared<Builder>();
int res = TRI_V8ToVPack(isolate, *query, args[0], false);
if (res != TRI_ERROR_NO_ERROR) {
TRI_V8_THROW_EXCEPTION(res);
}
read_ret_t ret = agent->read(query);
if (ret.accepted) { // Leading
TRI_V8_RETURN(TRI_VPackToV8(isolate, ret.result->slice()));
} else { // Not leading
TRI_V8_RETURN_FALSE();
}
TRI_V8_TRY_CATCH_END
}
static void JS_WriteVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
static void JS_WriteAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
TRI_V8_TRY_CATCH_BEGIN(isolate);
v8::HandleScope scope(isolate);
Agent* agent = nullptr;
try {
AgencyFeature* feature =
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
agent = feature->agent();
} catch (std::exception const& e) {
TRI_V8_THROW_EXCEPTION_MESSAGE(
TRI_ERROR_INTERNAL,
std::string("couldn't access agency feature: ") + e.what());
}
query_t query = std::make_shared<Builder>();
int res = TRI_V8ToVPack(isolate, *query, args[0], false);
if (res != TRI_ERROR_NO_ERROR) {
TRI_V8_THROW_EXCEPTION(res);
}
write_ret_t ret = agent->write(query);
if (ret.accepted) { // Leading
size_t errors = 0;
Builder body;
body.openObject();
body.add("results", VPackValue(VPackValueType::Array));
for (auto const& index : ret.indices) {
body.add(VPackValue(index));
if (index == 0) {
errors++;
}
}
body.close(); body.close();
// Wait for commit of highest except if it is 0?
arangodb::consensus::index_t max_index = 0;
try {
max_index =
*std::max_element(ret.indices.begin(), ret.indices.end());
} catch (std::exception const& e) {
LOG_TOPIC(WARN, Logger::AGENCY)
<< e.what() << " " << __FILE__ << __LINE__;
}
if (max_index > 0) {
agent->waitFor(max_index);
}
TRI_V8_RETURN(TRI_VPackToV8(isolate, body.slice()));
} else { // Not leading
TRI_V8_RETURN_FALSE();
}
TRI_V8_TRY_CATCH_END
}
@ -88,30 +198,32 @@ void TRI_InitV8Agency(v8::Isolate* isolate, v8::Handle<v8::Context> context) {
// ...........................................................................
ft = v8::FunctionTemplate::New(isolate);
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoVulpes"));
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoAgent"));
rt = ft->InstanceTemplate();
rt->SetInternalFieldCount(2);
TRI_AddMethodVocbase(
isolate, rt, TRI_V8_ASCII_STRING("leading"), JS_LeadingVulpes);
isolate, rt, TRI_V8_ASCII_STRING("enabled"), JS_EnabledAgent);
TRI_AddMethodVocbase(
isolate, rt, TRI_V8_ASCII_STRING("read"), JS_ReadVulpes);
isolate, rt, TRI_V8_ASCII_STRING("leading"), JS_LeadingAgent);
TRI_AddMethodVocbase(
isolate, rt, TRI_V8_ASCII_STRING("write"), JS_WriteVulpes);
isolate, rt, TRI_V8_ASCII_STRING("read"), JS_ReadAgent);
TRI_AddMethodVocbase(
isolate, rt, TRI_V8_ASCII_STRING("write"), JS_WriteAgent);
v8g->VulpesTempl.Reset(isolate, rt);
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoVuplesCtor"));
v8g->AgentTempl.Reset(isolate, rt);
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoAgentCtor"));
TRI_AddGlobalFunctionVocbase(
isolate, context, TRI_V8_ASCII_STRING("ArangoVuplesCtor"),
isolate, context, TRI_V8_ASCII_STRING("ArangoAgentCtor"),
ft->GetFunction(), true);
// register the global object
v8::Handle<v8::Object> aa = rt->NewInstance();
if (!aa.IsEmpty()) {
TRI_AddGlobalVariableVocbase(
isolate, context, TRI_V8_ASCII_STRING("ArangoVuples"), aa);
isolate, context, TRI_V8_ASCII_STRING("ArangoAgent"), aa);
}
}

View File

@ -36,9 +36,9 @@ using namespace arangodb;
using namespace arangodb::basics;
using namespace arangodb::rest;
size_t const HttpCommTask::MaximalHeaderSize = 1 * 1024 * 1024; // 1 MB
size_t const HttpCommTask::MaximalBodySize = 512 * 1024 * 1024; // 512 MB
size_t const HttpCommTask::MaximalPipelineSize = 512 * 1024 * 1024; // 512 MB
size_t const HttpCommTask::MaximalHeaderSize = 2 * 1024 * 1024; // 2 MB
size_t const HttpCommTask::MaximalBodySize = 1024 * 1024 * 1024; // 1024 MB
size_t const HttpCommTask::MaximalPipelineSize = 1024 * 1024 * 1024; // 1024 MB
size_t const HttpCommTask::RunCompactEvery = 500;
HttpCommTask::HttpCommTask(GeneralServer* server, TRI_socket_t sock,

View File

@ -136,7 +136,7 @@ void BootstrapFeature::start() {
auto vocbase = DatabaseFeature::DATABASE->systemDatabase();
auto ss = ServerState::instance();
if (!ss->isRunningInCluster() && !ss->isAgent()) {
if (!ss->isRunningInCluster()) {
LOG_TOPIC(DEBUG, Logger::STARTUP) << "Running server/server.js";
V8DealerFeature::DEALER->loadJavascript(vocbase, "server/server.js");
} else if (ss->isCoordinator()) {

View File

@ -23,8 +23,10 @@
#include "DatabaseFeature.h"
#include "ApplicationFeatures/ApplicationServer.h"
#include "Agency/v8-agency.h"
#include "Aql/QueryCache.h"
#include "Aql/QueryRegistry.h"
#include "Basics/ArangoGlobalContext.h"
#include "Basics/FileUtils.h"
#include "Basics/MutexLocker.h"
#include "Basics/StringUtils.h"
@ -942,6 +944,7 @@ void DatabaseFeature::updateContexts() {
TRI_InitV8VocBridge(isolate, context, queryRegistry, vocbase, i);
TRI_InitV8Queries(isolate, context);
TRI_InitV8Cluster(isolate, context);
TRI_InitV8Agency(isolate, context);
},
vocbase);
}

View File

@ -35,6 +35,7 @@
#include "VocBase/ticks.h"
#include <sstream>
#include <iomanip>
// #define DEBUG_DATAFILE 1
@ -622,7 +623,7 @@ static bool CheckDatafile(TRI_datafile_t* datafile, bool ignoreFailures) {
bool nextMarkerOk = false;
if (size > 0) {
auto next = reinterpret_cast<char const*>(marker) + size;
auto next = reinterpret_cast<char const*>(marker) + DatafileHelper::AlignedSize<size_t>(size);
auto p = next;
if (p < end) {
@ -676,10 +677,69 @@ static bool CheckDatafile(TRI_datafile_t* datafile, bool ignoreFailures) {
datafile->_next = datafile->_data + datafile->_currentSize;
datafile->_state = TRI_DF_STATE_OPEN_ERROR;
LOG(WARN) << "crc mismatch found in datafile '" << datafile->getName() << "' at position " << currentSize << ". expected crc: " << CalculateCrcValue(marker) << ", actual crc: " << marker->getCrc();
LOG(WARN) << "crc mismatch found in datafile '" << datafile->getName()
<< "' at position " << currentSize
<< ", of size " << datafile->_maximalSize;
LOG(WARN) << "crc mismatch found inside marker of type '" << TRI_NameMarkerDatafile(marker)
<< "' and size " << size
<< ". expected crc: " << CalculateCrcValue(marker)
<< ", actual crc: " << marker->getCrc();
{
LOG(INFO) << "raw marker data following:";
char const* p = reinterpret_cast<char const*>(marker);
char const* e = reinterpret_cast<char const*>(marker) + DatafileHelper::AlignedSize<size_t>(size);
std::string line;
std::string raw;
size_t printed = 0;
while (p < e) {
// print offset
line.append("0x");
uintptr_t offset = static_cast<uintptr_t>(p - datafile->_data);
for (size_t i = 0; i < 8; ++i) {
uint8_t c = static_cast<uint8_t>((offset & (0xFFULL << 8 * (7 - i))) >> 8 * (7 - i));
uint8_t n1 = c >> 4;
uint8_t n2 = c & 0x0F;
line.push_back((n1 < 10) ? ('0' + n1) : 'A' + n1 - 10);
line.push_back((n2 < 10) ? ('0' + n2) : 'A' + n2 - 10);
}
// print data
line.append(": ");
for (size_t i = 0; i < 16; ++i) {
if (p >= e) {
line.append(" ");
} else {
uint8_t c = static_cast<uint8_t>(*p++);
uint8_t n1 = c >> 4;
uint8_t n2 = c & 0x0F;
line.push_back((n1 < 10) ? ('0' + n1) : 'A' + n1 - 10);
line.push_back((n2 < 10) ? ('0' + n2) : 'A' + n2 - 10);
line.push_back(' ');
raw.push_back((c < 32 || c >= 127) ? '.' : static_cast<unsigned char>(c));
++printed;
}
}
LOG(INFO) << line << " " << raw;
line.clear();
raw.clear();
if (printed >= 2048) {
LOG(INFO) << "(output truncated due to excessive length)";
break;
}
}
}
if (nextMarkerOk) {
LOG(INFO) << "data directly following this marker looks ok so repairing the marker may recover it";
LOG(INFO) << "please restart the server with the parameter '--wal.ignore-logfile-errors true' to repair the marker";
} else {
LOG(WARN) << "data directly following this marker cannot be analyzed";
}

View File

@ -161,6 +161,16 @@ void ImportFeature::validateOptions(
StringUtils::join(positionals, ", ");
FATAL_ERROR_EXIT();
}
static unsigned const MaxBatchSize = 768 * 1024 * 1024;
if (_chunkSize > MaxBatchSize) {
// it's not sensible to raise the batch size beyond this value
// because the server has a built-in limit for the batch size too
// and will reject bigger HTTP request bodies
LOG(WARN) << "capping --batch-size value to " << MaxBatchSize;
_chunkSize = MaxBatchSize;
}
}
void ImportFeature::start() {

View File

@ -3,32 +3,32 @@
# we can't use RUNTIME DESTINATION here.
install(
FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOBENCH}${CMAKE_EXECUTABLE_SUFFIX}
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOBENCH}${CMAKE_EXECUTABLE_SUFFIX}
DESTINATION ${CMAKE_INSTALL_BINDIR})
install_config(arangobench)
install(
FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGODUMP}${CMAKE_EXECUTABLE_SUFFIX}
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGODUMP}${CMAKE_EXECUTABLE_SUFFIX}
DESTINATION ${CMAKE_INSTALL_BINDIR})
install_config(arangodump)
install(
FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOIMP}${CMAKE_EXECUTABLE_SUFFIX}
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOIMP}${CMAKE_EXECUTABLE_SUFFIX}
DESTINATION ${CMAKE_INSTALL_BINDIR})
install_config(arangoimp)
install(
FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGORESTORE}${CMAKE_EXECUTABLE_SUFFIX}
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGORESTORE}${CMAKE_EXECUTABLE_SUFFIX}
DESTINATION ${CMAKE_INSTALL_BINDIR})
install_config(arangorestore)
install(
FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOSH}${CMAKE_EXECUTABLE_SUFFIX}
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOSH}${CMAKE_EXECUTABLE_SUFFIX}
DESTINATION ${CMAKE_INSTALL_BINDIR})
install_config(arangosh)
install(
FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOVPACK}${CMAKE_EXECUTABLE_SUFFIX}
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY_X}/${BIN_ARANGOVPACK}${CMAKE_EXECUTABLE_SUFFIX}
DESTINATION ${CMAKE_INSTALL_BINDIR})
install_command_alias(${BIN_ARANGOSH}

View File

@ -16,8 +16,6 @@ endif ()
set(CMAKE_INSTALL_SYSCONFDIR_ARANGO "${CMAKE_INSTALL_SYSCONFDIR}/arangodb3")
set(CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO "${CMAKE_INSTALL_FULL_SYSCONFDIR}/arangodb3")
file(TO_NATIVE_PATH "${CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO}" ETCDIR_NATIVE)
# database directory
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/lib/arangodb3")

View File

@ -8,9 +8,23 @@ cmake_minimum_required(VERSION 2.8)
# variables from the main build have to be explicitely forwarded:
################################################################################
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "@PROJECT_BINARY_DIR@/bin/")
set(CMAKE_INSTALL_BINDIR ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_X ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
set(CMAKE_INSTALL_BINDIR @CMAKE_INSTALL_BINDIR@)
set(CMAKE_INSTALL_FULL_BINDIR @CMAKE_INSTALL_FULL_BINDIR@)
set(CMAKE_INSTALL_DATAROOTDIR @CMAKE_INSTALL_DATAROOTDIR@)
set(CMAKE_INSTALL_DATAROOTDIR_ARANGO @CMAKE_INSTALL_DATAROOTDIR_ARANGO@)
set(CMAKE_INSTALL_FULL_DATAROOTDIR_ARANGO @CMAKE_INSTALL_FULL_DATAROOTDIR_ARANGO@)
set(CMAKE_INSTALL_DIR @CMAKE_INSTALL_DIR@)
set(CMAKE_INSTALL_PREFIX @CMAKE_INSTALL_PREFIX@)
set(CMAKE_INSTALL_SYSCONFDIR @CMAKE_INSTALL_SYSCONFDIR@)
set(CMAKE_INSTALL_SYSCONFDIR_ARANGO @CMAKE_INSTALL_SYSCONFDIR_ARANGO@)
set(CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO @CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO@)
################################################################################
# Substitute the install binaries:
################################################################################
@ -29,21 +43,20 @@ set(ARANGODB_SOURCE_DIR @ARANGODB_SOURCE_DIR@)
set(ARANGODB_VERSION @ARANGODB_VERSION@)
set(ARANGODB_PACKAGE_CONTACT @ARANGODB_PACKAGE_CONTACT@)
set(ARANGODB_PACKAGE_REVISION @ARANGODB_PACKAGE_REVISION@)
set(CMAKE_INSTALL_FULL_BINDIR @CMAKE_INSTALL_FULL_BINDIR@)
set(ARANGODB_PACKAGE_VENDOR @ARANGODB_PACKAGE_VENDOR@)
set(CMAKE_TARGET_ARCHITECTURES @CMAKE_TARGET_ARCHITECTURES@)
set(CMAKE_INSTALL_SYSCONFDIR_ARANGO @CMAKE_INSTALL_SYSCONFDIR_ARANGO@)
set(CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO @CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO@)
set(ORIGINAL_SOURCE_DIR @PROJECT_SOURCE_DIR@)
set(PROJECT_SOURCE_DIR @PROJECT_SOURCE_DIR@)
################################################################################
# Get the final values for cpack:
################################################################################
set(CPACK_PACKAGE_VERSION "${ARANGODB_VERSION}")
set(CPACK_PACKAGE_NAME "arangodb3-client")
set(CPACK_DEBIAN_PACKAGE_SECTION "shell")
set(CPACK_PACKAGE_VENDOR ${ARANGODB_PACKAGE_VENDOR})
set(CPACK_PACKAGE_CONTACT ${ARANGODB_PACKAGE_CONTACT})
set(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/LICENSE")
set(CPACK_DEBIAN_PACKAGE_HOMEPAGE ${ARANGODB_URL_INFO_ABOUT})
@ -51,6 +64,10 @@ set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
set(CPACK_DEBIAN_PACKAGE_CONFLICTS "arangodb, arangodb3")
set(CPACK_DEBIAN_COMPRESSION_TYPE "xz")
set(CPACK_COMPONENTS_ALL debian-extras)
set(CPACK_GENERATOR "DEB")
set(CPACK_SET_DESTDIR ON)
set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
file(READ "${PROJECT_SOURCE_DIR}/Installation/debian/client_packagedesc.txt"
CPACK_DEBIAN_PACKAGE_DESCRIPTION)
@ -73,12 +90,7 @@ set(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-${AR
################################################################################
# Install the external files into the package directory:
################################################################################
include(${PROJECT_SOURCE_DIR}/cmake/GNUInstallDirs.cmake)
set(CMAKE_INSTALL_SYSCONFDIR_ARANGO "${CMAKE_INSTALL_SYSCONFDIR}/arangodb3" CACHE PATH "read-only single-machine data (etc)")
set(CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO "${CMAKE_INSTALL_FULL_SYSCONFDIR}/arangodb3" CACHE PATH "read-only single-machine data (etc)")
set(CMAKE_INSTALL_DATAROOTDIR_ARANGO "${CMAKE_INSTALL_DATAROOTDIR}/arangodb3" CACHE PATH "read-only data (share)")
set(CMAKE_INSTALL_FULL_DATAROOTDIR_ARANGO "${CMAKE_INSTALL_FULL_DATAROOTDIR}/arangodb3" CACHE PATH "read-only data (share)")
set(INSTALL_MACROS_NO_TARGET_INSTALL TRUE)
include(${ORIGINAL_SOURCE_DIR}/cmake/InstallMacros.cmake)

View File

@ -24,6 +24,7 @@
// //////////////////////////////////////////////////////////////////////////////
const isCluster = require("@arangodb/cluster").isCluster();
const isAgent = global.ArangoAgent.enabled();
var _ = require('lodash');
var flatten = require('internal').flatten;
@ -231,7 +232,7 @@ function asNumber (num) {
}
function updateQueueDelayClusterAware() {
if (isCluster) {
if (isCluster && !isAgent) {
global.ArangoAgency.set('Current/FoxxmasterQueueupdate', true);
}
updateQueueDelay();

View File

@ -284,6 +284,12 @@ void ArangoGlobalContext::getCheckPath(std::string &path, const char *whichPath,
LOG(ERR) << "failed to locate " << whichPath << " directory, its neither available in '" << path << "' nor in '" << directory << "'";
FATAL_ERROR_EXIT();
}
arangodb::basics::FileUtils::normalizePath(directory);
path = directory;
}
else {
if (!TRI_PathIsAbsolute(path)) {
arangodb::basics::FileUtils::makePathAbsolute(path);
}
}
}

View File

@ -616,6 +616,16 @@ std::string dirname(std::string const& name) {
return base;
}
void makePathAbsolute(std::string &path) {
int err = 0;
std::string cwd = FileUtils::currentDirectory(&err);
char * p = TRI_GetAbsolutePath(path.c_str(), cwd.c_str());
path = p;
TRI_FreeString(TRI_CORE_MEM_ZONE, p);
}
}
}
}

View File

@ -53,6 +53,14 @@ std::string removeTrailingSeparator(std::string const& name);
void normalizePath(std::string& name);
////////////////////////////////////////////////////////////////////////////////
/// @brief makes a path absolute
///
/// path will be modified in-place
////////////////////////////////////////////////////////////////////////////////
void makePathAbsolute(std::string &path);
////////////////////////////////////////////////////////////////////////////////
/// @brief creates a filename
////////////////////////////////////////////////////////////////////////////////

View File

@ -1,5 +1,5 @@
#define LOCCAL_STATE_DIR "@CMAKE_INSTALL_FULL_LOCALSTATEDIR@"
#define _SYSCONFDIR_ "@ETCDIR_NATIVE@"
#define _SYSCONFDIR_ "@CMAKE_INSTALL_FULL_SYSCONFDIR_ARANGO@"
#define STARTUP_DIRECTORY "@PKGDATADIR@/js"
#define DESTINATION_DIR "@CMAKE_INSTALL_DATAROOTDIR_ARANGO@/js"
#define ICU_DESTINATION_DIRECTORY "@ICU_DT_DEST@"

View File

@ -25,6 +25,7 @@
#ifdef _WIN32
#include <tchar.h>
#include <Shlwapi.h>
#endif
#include "Basics/directories.h"
@ -1698,25 +1699,25 @@ std::string TRI_LocateBinaryPath(char const* argv0) {
std::string TRI_GetInstallRoot(std::string const& binaryPath,
char const *installBinaryPath) {
// First lets remove trailing (back) slashes from the bill:
long ibpLength = strlen(installBinaryPath);
size_t ibpLength = strlen(installBinaryPath);
if (installBinaryPath[ibpLength - 1] == TRI_DIR_SEPARATOR_CHAR) {
ibpLength --;
}
long bpLength = binaryPath.length();
const char *pbPath = binaryPath.c_str();
size_t bpLength = binaryPath.length();
char const* pbPath = binaryPath.c_str();
if (pbPath[bpLength - 1] == TRI_DIR_SEPARATOR_CHAR) {
bpLength --;
--bpLength;
}
if (ibpLength > bpLength) {
return TRI_DIR_SEPARATOR_STR;
}
for (int i = 1; i < ibpLength; i ++) {
if (pbPath[bpLength -i] != installBinaryPath[ibpLength - i]) {
for (size_t i = 1; i < ibpLength; ++i) {
if (pbPath[bpLength - i] != installBinaryPath[ibpLength - i]) {
return TRI_DIR_SEPARATOR_STR;
}
}
@ -2422,3 +2423,16 @@ void TRI_InitializeFiles() {
////////////////////////////////////////////////////////////////////////////////
void TRI_ShutdownFiles() {}
#if _WIN32
bool TRI_PathIsAbsolute(const std::string &path) {
return !PathIsRelative(path.c_str());
}
#else
bool TRI_PathIsAbsolute(const std::string &path) {
return path.c_str()[0] == '/';
}
#endif

View File

@ -369,4 +369,10 @@ void TRI_InitializeFiles();
void TRI_ShutdownFiles();
////////////////////////////////////////////////////////////////////////////////
/// @brief checks whether path is full qualified or relative
////////////////////////////////////////////////////////////////////////////////
bool TRI_PathIsAbsolute(const std::string &path);
#endif

View File

@ -325,7 +325,9 @@ void TRI_FixIcuDataEnv() {
putenv(e.c_str());
} else {
#ifdef _SYSCONFDIR_
std::string e = "ICU_DATA=" + std::string(_SYSCONFDIR_) + "..\\..\\bin";
std::string SCDIR(_SYSCONFDIR_);
SCDIR = StringUtils::replace(SCDIR, "/", "\\\\");
std::string e = "ICU_DATA=" + SCDIR + "..\\..\\bin";
e = StringUtils::replace(e, "\\", "\\\\");
putenv(e.c_str());
#else

View File

@ -162,7 +162,7 @@ void SimpleHttpResult::addHeaderField(char const* key, size_t keyLength,
if (_returnCode == 204) {
// HTTP 204 = No content. Assume we will have a content-length of 0.
// note that will value can be overridden later if the response has the content-length
// note that the value can be overridden later if the response has the content-length
// header set to some other value
setContentLength(0);
}

View File

@ -28,6 +28,7 @@ TRI_v8_global_s::TRI_v8_global_s(v8::Isolate* isolate)
JSVPack(),
AgencyTempl(),
AgentTempl(),
ClusterInfoTempl(),
ServerStateTempl(),
ClusterCommTempl(),
@ -35,7 +36,6 @@ TRI_v8_global_s::TRI_v8_global_s(v8::Isolate* isolate)
VPackTempl(),
VocbaseColTempl(),
VocbaseTempl(),
VulpesTempl(),
BufferTempl(),

View File

@ -475,6 +475,12 @@ typedef struct TRI_v8_global_s {
v8::Persistent<v8::ObjectTemplate> AgencyTempl;
//////////////////////////////////////////////////////////////////////////////
/// @brief local agent template
//////////////////////////////////////////////////////////////////////////////
v8::Persistent<v8::ObjectTemplate> AgentTempl;
//////////////////////////////////////////////////////////////////////////////
/// @brief clusterinfo template
//////////////////////////////////////////////////////////////////////////////
@ -517,12 +523,6 @@ typedef struct TRI_v8_global_s {
v8::Persistent<v8::ObjectTemplate> VocbaseTempl;
//////////////////////////////////////////////////////////////////////////////
/// @brief vulpes template
//////////////////////////////////////////////////////////////////////////////
v8::Persistent<v8::ObjectTemplate> VulpesTempl;
//////////////////////////////////////////////////////////////////////////////
/// @brief TRI_vocbase_t template
//////////////////////////////////////////////////////////////////////////////

View File

@ -1,17 +1,20 @@
// Compile with
// g++ perfanalysis.cpp -o perfanalyis -std=c++11 -Wall -O3
#include <iostream>
#include <string>
#include <vector>
#include <cstdlib>
#include <cstring>
#include <unordered_map>
#include <algorithm>
#include <iostream>
#include <regex>
#include <sstream>
#include <stdexcept>
#include <string>
#include <unordered_map>
#include <vector>
#include <memory>
using namespace std;
struct Event {
static const regex re;
string threadName;
int tid;
string cpu;
@ -22,45 +25,27 @@ struct Event {
bool isRet;
Event(string& line) : isRet(false) {
char* s = strdup(line.c_str());
char* p = strtok(s, " ");
char* q;
if (p != nullptr) {
threadName = p;
p = strtok(nullptr, " ");
tid = strtol(p, nullptr, 10);
p = strtok(nullptr, " ");
cpu = p;
p = strtok(nullptr, " ");
startTime = strtod(p, nullptr);
p = strtok(nullptr, " ");
q = strtok(nullptr, " ");
if (strcmp(q, "cs:") == 0) {
free(s);
return;
std::smatch match_obj;
if(!std::regex_search(line, match_obj, re)){
throw std::logic_error("could not parse line");
}
name = p;
name.pop_back();
auto l = name.size();
if (l >= 3 && name[l-1] == 't' && name[l-2] == 'e' &&
name[l-3] == 'R') {
threadName = match_obj[1];
tid = std::stoi(match_obj[2]);
cpu = match_obj[3];
startTime = std::stod(match_obj[4]);
duration = 0;
name = match_obj[6];
inbrackets = match_obj[7];
if (match_obj[9].length() > 0) {
isRet = true;
name.pop_back();
name.pop_back();
name.pop_back();
name.erase(name.end() - 3, name.end()); // remove Ret suffix form name
}
inbrackets = q;
}
free(s);
}
bool empty() {
return name.empty();
}
bool empty() { return name.empty(); }
string id() {
return to_string(tid) + name;
}
string id() { return to_string(tid) + name; }
string pretty() {
return to_string(duration) + " " + name + " " + to_string(startTime);
@ -77,31 +62,46 @@ struct Event {
}
};
int main(int argc, char* argv[]) {
unordered_map<string, Event*> table;
vector<Event*> list;
// sample output:
// arangod 32636 [005] 16678249.324973: probe_arangod:insertLocalRet: (14a7f60 <- 14a78d6)
// process tid core timepoint scope:name frame
const regex Event::re(
R"_(\s*(\S+))_" // name 1
R"_(\s+(\d+))_" // tid 2
R"_(\s+\[(\d+)\])_" // cup 3
R"_(\s+(\d+\.\d+):)_" // time 4
R"_(\s+([^: ]+):([^: ]+):)_" // scope:func 5:6
R"_(\s+\(([0-9a-f]+)(\s+<-\s+([0-9a-f]+))?\))_" // (start -> stop) 7 -> 9
,
std::regex_constants::ECMAScript | std::regex_constants::optimize);
int main(int /*argc*/, char* /*argv*/ []) {
unordered_map<string, unique_ptr<Event>> table;
vector<unique_ptr<Event>> list;
string line;
while (getline(cin, line)) {
Event* e = new Event(line);
if (!e->empty()) {
string id = e->id();
if (!e->isRet) {
auto event = std::make_unique<Event>(line);
if (!event->empty()) {
string id = event->id();
// insert to table if it is not a function return
if (!event->isRet) {
auto it = table.find(id);
if (it != table.end()) {
cout << "Alarm, double event found:\n" << line << std::endl;
} else {
table.insert(make_pair(id, e));
table.insert(make_pair(id, std::move(event)));
}
// update duration in table
} else {
auto it = table.find(id);
if (it == table.end()) {
cout << "Return for unknown event found:\n" << line << std::endl;
} else {
Event* s = it->second;
unique_ptr<Event> ev = std::move(it->second);
table.erase(it);
s->duration = e->startTime - s->startTime;
list.push_back(s);
ev->duration = event->startTime - ev->startTime;
list.push_back(std::move(ev));
}
}
}
@ -109,13 +109,11 @@ int main(int argc, char* argv[]) {
cout << "Unreturned events:\n";
for (auto& p : table) {
cout << p.second->pretty() << "\n";
delete p.second;
}
sort(list.begin(), list.end(), [](Event* a, Event* b) -> bool {
return *a < *b;
});
sort(list.begin(), list.end(),
[](unique_ptr<Event>const& a, unique_ptr<Event>const& b) -> bool { return *a < *b; });
cout << "Events sorted by name and time:\n";
for (auto* e : list) {
for (auto& e : list) {
cout << e->pretty() << "\n";
}
return 0;

View File

@ -6,60 +6,78 @@
#
# This script sets up performance monitoring events to measure single
# document operations. Run this script with sudo when the ArangoDB
# process is already running. Then do
# process is already running:
#
# ./setupPerfEvents.sh
#
# Now you are able to recrod the event with:
#
# sudo perf record -e "probe_arangod:*" -aR sleep 60
# (to sample for 60 seconds). A file "perf.data" is written to the
# current directory.
# Dump the events in this file with
#
# The above command will get sample data for 60 seconds. A file "perf.data" is
# written to the current directory. Dump the events in this file with:
#
# sudo perf script > perf.history
#
# This logs the times when individual threads hit the events.
# Use the program perfanalyis.cpp in this directory in the following way:
#
# sudo ./perfanalyis < perf.history > perf.statistics
# This will group enter and exit events of functions together, compute
# the time spent and sort by function.
# Remove all events with
#
# This will group enter and exit events of functions together, compute the time
# spent and sort by function. When finised remove all events with:
#
# sudo perf probe -d "probe_arangod:*"
# List events with
#
# List events with:
#
# sudo perf probe -l
#
#
ARANGOD_EXECUTABLE=build/bin/arangod
perf probe -x $ARANGOD_EXECUTABLE -d "probe_arangod:*"
main(){
local ARANGOD_EXECUTABLE=${1-build/bin/arangod}
echo Adding events, this takes a few seconds...
#delete all existing events
perf probe -x $ARANGOD_EXECUTABLE -d "probe_arangod:*"
echo "Adding events, this takes a few seconds..."
echo "Single document operations..."
addEvent insertLocal
addEvent removeLocal
addEvent modifyLocal
addEvent documentLocal
echo "Single document operations on coordinator..."
addEvent insertCoordinator
addEvent removeCoordinator
addEvent updateCoordinator
addEvent replaceCoordinator
addEvent documentCoordinator
echo "work method in HttpServerJob"
addEvent workHttpServerJob work@HttpServerJob.cpp
echo "work method in RestDocumentHandler"
addEvent executeRestReadDocument readDocument@RestDocumentHandler.cpp
addEvent executeRestInsertDocument createDocument@RestDocumentHandler.cpp
addEvent handleRequest handleRequest@HttpServer.cpp
addEvent handleWrite handleWrite@SocketTask.cpp
addEvent tcp_sendmsg
addEvent tcp_recvmsg
echo Done.
}
addEvent() {
x=$1
y=$2
if [ "x$y" == "x" ] ; then
y=$x
fi
echo $x
perf probe -x $ARANGOD_EXECUTABLE -a $x=$y 2> /dev/null
perf probe -x $ARANGOD_EXECUTABLE -a ${x}Ret=$y%return 2> /dev/null
local name="$1"
local func="${2-"${name}"}"
echo "setting up $name for function: $func"
perf probe -x $ARANGOD_EXECUTABLE -a $name=$func 2> /dev/null #enter function
perf probe -x $ARANGOD_EXECUTABLE -a ${name}Ret=$func%return 2> /dev/null #return form function
}
echo Single document operations...
addEvent insertLocal
addEvent removeLocal
addEvent modifyLocal
addEvent documentLocal
echo Single document operations on coordinator...
addEvent insertCoordinator
addEvent removeCoordinator
addEvent updateCoordinator
addEvent replaceCoordinator
addEvent documentCoordinator
echo work method in HttpServerJob
addEvent workHttpServerJob work@HttpServerJob.cpp
echo work method in RestDocumentHandler
addEvent executeRestReadDocument readDocument@RestDocumentHandler.cpp
addEvent executeRestInsertDocument createDocument@RestDocumentHandler.cpp
addEvent handleRequest handleRequest@HttpServer.cpp
addEvent handleWrite handleWrite@SocketTask.cpp
addEvent tcp_sendmsg
addEvent tcp_recvmsg
echo Done.
main "$@"

View File

@ -96,6 +96,7 @@ start() {
PORT=$2
mkdir cluster/data$PORT
echo Starting $TYPE on port $PORT
mkdir -p cluster/apps$PORT
build/bin/arangod -c none \
--database.directory cluster/data$PORT \
--cluster.agency-endpoint tcp://127.0.0.1:$BASE \