mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of github.com:arangodb/ArangoDB into pipeline
This commit is contained in:
commit
b24a469974
|
@ -37,7 +37,7 @@ endif ()
|
||||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||||
|
|
||||||
# where to find CMAKE modules
|
# where to find CMAKE modules
|
||||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/cmake)
|
set(CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH})
|
||||||
|
|
||||||
# be verbose about flags used
|
# be verbose about flags used
|
||||||
option(VERBOSE OFF)
|
option(VERBOSE OFF)
|
||||||
|
@ -708,7 +708,7 @@ if (NOT USE_BOOST_UNITTESTS)
|
||||||
message(STATUS "BOOST unit-tests are disabled")
|
message(STATUS "BOOST unit-tests are disabled")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include_directories(${Boost_INCLUDE_DIR})
|
include_directories(SYSTEM ${Boost_INCLUDE_DIR})
|
||||||
add_definitions(-DARANGODB_BOOST_VERSION=\"${Boost_VERSION}\")
|
add_definitions(-DARANGODB_BOOST_VERSION=\"${Boost_VERSION}\")
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
|
@ -94,8 +94,19 @@ and the hash value is used to determine the target shard.
|
||||||
**Note**: Values of shard key attributes cannot be changed once set.
|
**Note**: Values of shard key attributes cannot be changed once set.
|
||||||
This option is meaningless in a single server setup.
|
This option is meaningless in a single server setup.
|
||||||
|
|
||||||
|
@RESTBODYPARAM{replicationFactor,integer,optional,int64}
|
||||||
|
(The default is *1*): in a cluster, this attribute determines how many copies
|
||||||
|
of each shard are kept on different DBServers. The value 1 means that only one
|
||||||
|
copy (no synchronous replication) is kept. A value of k means that k-1 replicas
|
||||||
|
are kept. Any two copies reside on different DBServers. Replication between them is
|
||||||
|
synchronous, that is, every write operation to the "leader" copy will be replicated
|
||||||
|
to all "follower" replicas, before the write operation is reported successful.
|
||||||
|
|
||||||
|
If a server fails, this is detected automatically and one of the servers holding
|
||||||
|
copies take over, usually without an error being reported.
|
||||||
|
|
||||||
@RESTDESCRIPTION
|
@RESTDESCRIPTION
|
||||||
Creates an new collection with a given name. The request must contain an
|
Creates a new collection with a given name. The request must contain an
|
||||||
object with the following attributes.
|
object with the following attributes.
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -41,39 +41,149 @@ using namespace arangodb::application_features;
|
||||||
using namespace arangodb::basics;
|
using namespace arangodb::basics;
|
||||||
using namespace arangodb::consensus;
|
using namespace arangodb::consensus;
|
||||||
|
|
||||||
static void JS_LeadingVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
static void JS_EnabledAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
||||||
|
|
||||||
v8::Isolate* isolate = args.GetIsolate();
|
TRI_V8_TRY_CATCH_BEGIN(isolate);
|
||||||
|
v8::HandleScope scope(isolate);
|
||||||
|
|
||||||
|
try {
|
||||||
|
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
|
||||||
|
TRI_V8_RETURN_TRUE();
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
TRI_V8_RETURN_FALSE();
|
||||||
|
}
|
||||||
|
|
||||||
|
TRI_V8_TRY_CATCH_END
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void JS_LeadingAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
||||||
|
|
||||||
|
TRI_V8_TRY_CATCH_BEGIN(isolate);
|
||||||
|
v8::HandleScope scope(isolate);
|
||||||
|
|
||||||
Agent* agent = nullptr;
|
Agent* agent = nullptr;
|
||||||
try {
|
try {
|
||||||
AgencyFeature* feature =
|
AgencyFeature* feature =
|
||||||
ApplicationServer::getEnabledFeature<AgencyFeature>("AgencyFeature");
|
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
|
||||||
agent = feature->agent();
|
agent = feature->agent();
|
||||||
|
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
TRI_V8_THROW_EXCEPTION_MESSAGE(
|
||||||
|
TRI_ERROR_INTERNAL,
|
||||||
|
std::string("couldn't access agency feature: ") + e.what());
|
||||||
|
}
|
||||||
|
|
||||||
|
v8::Handle<v8::Object> r = v8::Object::New(isolate);
|
||||||
|
|
||||||
|
r->Set(TRI_V8_ASCII_STRING("leading"),
|
||||||
|
v8::Boolean::New(isolate, agent->leading()));
|
||||||
|
|
||||||
|
TRI_V8_RETURN(r);
|
||||||
|
TRI_V8_TRY_CATCH_END
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void JS_ReadAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
||||||
|
|
||||||
|
TRI_V8_TRY_CATCH_BEGIN(isolate);
|
||||||
|
v8::HandleScope scope(isolate);
|
||||||
|
|
||||||
|
Agent* agent = nullptr;
|
||||||
|
try {
|
||||||
|
AgencyFeature* feature =
|
||||||
|
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
|
||||||
|
agent = feature->agent();
|
||||||
|
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
TRI_V8_THROW_EXCEPTION_MESSAGE(
|
||||||
|
TRI_ERROR_INTERNAL,
|
||||||
|
std::string("couldn't access agency feature: ") + e.what());
|
||||||
|
}
|
||||||
|
|
||||||
|
query_t query = std::make_shared<Builder>();
|
||||||
|
int res = TRI_V8ToVPack(isolate, *query, args[0], false);
|
||||||
|
|
||||||
|
if (res != TRI_ERROR_NO_ERROR) {
|
||||||
|
TRI_V8_THROW_EXCEPTION(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
read_ret_t ret = agent->read(query);
|
||||||
|
|
||||||
|
if (ret.accepted) { // Leading
|
||||||
|
TRI_V8_RETURN(TRI_VPackToV8(isolate, ret.result->slice()));
|
||||||
|
} else { // Not leading
|
||||||
|
TRI_V8_RETURN_FALSE();
|
||||||
|
}
|
||||||
|
|
||||||
|
TRI_V8_TRY_CATCH_END
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void JS_WriteAgent(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
||||||
|
|
||||||
|
TRI_V8_TRY_CATCH_BEGIN(isolate);
|
||||||
|
v8::HandleScope scope(isolate);
|
||||||
|
|
||||||
|
Agent* agent = nullptr;
|
||||||
|
try {
|
||||||
|
AgencyFeature* feature =
|
||||||
|
ApplicationServer::getEnabledFeature<AgencyFeature>("Agency");
|
||||||
|
agent = feature->agent();
|
||||||
|
|
||||||
} catch (std::exception const& e) {
|
} catch (std::exception const& e) {
|
||||||
TRI_V8_THROW_EXCEPTION_MESSAGE(
|
TRI_V8_THROW_EXCEPTION_MESSAGE(
|
||||||
TRI_ERROR_INTERNAL,
|
TRI_ERROR_INTERNAL,
|
||||||
std::string("couldn't access agency feature: ") + e.what());
|
std::string("couldn't access agency feature: ") + e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::Handle<v8::Object> r = v8::Object::New(isolate);
|
query_t query = std::make_shared<Builder>();
|
||||||
|
int res = TRI_V8ToVPack(isolate, *query, args[0], false);
|
||||||
|
|
||||||
|
if (res != TRI_ERROR_NO_ERROR) {
|
||||||
|
TRI_V8_THROW_EXCEPTION(res);
|
||||||
|
}
|
||||||
|
|
||||||
r->Set(TRI_V8_ASCII_STRING("leading"),
|
write_ret_t ret = agent->write(query);
|
||||||
v8::Boolean::New(isolate, agent->leading()));
|
|
||||||
|
|
||||||
TRI_V8_RETURN(r);
|
|
||||||
|
|
||||||
|
if (ret.accepted) { // Leading
|
||||||
|
|
||||||
|
size_t errors = 0;
|
||||||
|
Builder body;
|
||||||
|
body.openObject();
|
||||||
|
body.add("results", VPackValue(VPackValueType::Array));
|
||||||
|
for (auto const& index : ret.indices) {
|
||||||
|
body.add(VPackValue(index));
|
||||||
|
if (index == 0) {
|
||||||
|
errors++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
body.close(); body.close();
|
||||||
|
|
||||||
|
// Wait for commit of highest except if it is 0?
|
||||||
|
arangodb::consensus::index_t max_index = 0;
|
||||||
|
try {
|
||||||
|
max_index =
|
||||||
|
*std::max_element(ret.indices.begin(), ret.indices.end());
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
LOG_TOPIC(WARN, Logger::AGENCY)
|
||||||
|
<< e.what() << " " << __FILE__ << __LINE__;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (max_index > 0) {
|
||||||
|
agent->waitFor(max_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
TRI_V8_RETURN(TRI_VPackToV8(isolate, body.slice()));
|
||||||
|
} else { // Not leading
|
||||||
|
TRI_V8_RETURN_FALSE();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
TRI_V8_TRY_CATCH_END
|
||||||
|
|
||||||
static void JS_ReadVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static void JS_WriteVulpes(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -89,30 +199,32 @@ void TRI_InitV8Agency(v8::Isolate* isolate, v8::Handle<v8::Context> context) {
|
||||||
// ...........................................................................
|
// ...........................................................................
|
||||||
|
|
||||||
ft = v8::FunctionTemplate::New(isolate);
|
ft = v8::FunctionTemplate::New(isolate);
|
||||||
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoVulpes"));
|
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoAgent"));
|
||||||
|
|
||||||
rt = ft->InstanceTemplate();
|
rt = ft->InstanceTemplate();
|
||||||
rt->SetInternalFieldCount(2);
|
rt->SetInternalFieldCount(2);
|
||||||
|
|
||||||
TRI_AddMethodVocbase(
|
TRI_AddMethodVocbase(
|
||||||
isolate, rt, TRI_V8_ASCII_STRING("leading"), JS_LeadingVulpes);
|
isolate, rt, TRI_V8_ASCII_STRING("enabled"), JS_EnabledAgent);
|
||||||
TRI_AddMethodVocbase(
|
TRI_AddMethodVocbase(
|
||||||
isolate, rt, TRI_V8_ASCII_STRING("read"), JS_ReadVulpes);
|
isolate, rt, TRI_V8_ASCII_STRING("leading"), JS_LeadingAgent);
|
||||||
TRI_AddMethodVocbase(
|
TRI_AddMethodVocbase(
|
||||||
isolate, rt, TRI_V8_ASCII_STRING("write"), JS_WriteVulpes);
|
isolate, rt, TRI_V8_ASCII_STRING("read"), JS_ReadAgent);
|
||||||
|
TRI_AddMethodVocbase(
|
||||||
|
isolate, rt, TRI_V8_ASCII_STRING("write"), JS_WriteAgent);
|
||||||
|
|
||||||
v8g->VulpesTempl.Reset(isolate, rt);
|
v8g->AgentTempl.Reset(isolate, rt);
|
||||||
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoVuplesCtor"));
|
ft->SetClassName(TRI_V8_ASCII_STRING("ArangoAgentCtor"));
|
||||||
|
|
||||||
TRI_AddGlobalFunctionVocbase(
|
TRI_AddGlobalFunctionVocbase(
|
||||||
isolate, context, TRI_V8_ASCII_STRING("ArangoVuplesCtor"),
|
isolate, context, TRI_V8_ASCII_STRING("ArangoAgentCtor"),
|
||||||
ft->GetFunction(), true);
|
ft->GetFunction(), true);
|
||||||
|
|
||||||
// register the global object
|
// register the global object
|
||||||
v8::Handle<v8::Object> aa = rt->NewInstance();
|
v8::Handle<v8::Object> aa = rt->NewInstance();
|
||||||
if (!aa.IsEmpty()) {
|
if (!aa.IsEmpty()) {
|
||||||
TRI_AddGlobalVariableVocbase(
|
TRI_AddGlobalVariableVocbase(
|
||||||
isolate, context, TRI_V8_ASCII_STRING("ArangoVuples"), aa);
|
isolate, context, TRI_V8_ASCII_STRING("ArangoAgent"), aa);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,9 +47,9 @@ class HttpResponse;
|
||||||
namespace rest {
|
namespace rest {
|
||||||
class GeneralServer;
|
class GeneralServer;
|
||||||
|
|
||||||
size_t const HttpCommTask::MaximalHeaderSize = 1 * 1024 * 1024; // 1 MB
|
size_t const HttpCommTask::MaximalHeaderSize = 2 * 1024 * 1024; // 2 MB
|
||||||
size_t const HttpCommTask::MaximalBodySize = 512 * 1024 * 1024; // 512 MB
|
size_t const HttpCommTask::MaximalBodySize = 1024 * 1024 * 1024; // 1024 MB
|
||||||
size_t const HttpCommTask::MaximalPipelineSize = 512 * 1024 * 1024; // 512 MB
|
size_t const HttpCommTask::MaximalPipelineSize = 1024 * 1024 * 1024; // 1024 MB
|
||||||
size_t const HttpCommTask::RunCompactEvery = 500;
|
size_t const HttpCommTask::RunCompactEvery = 500;
|
||||||
|
|
||||||
HttpCommTask::HttpCommTask(GeneralServer* server, TRI_socket_t sock,
|
HttpCommTask::HttpCommTask(GeneralServer* server, TRI_socket_t sock,
|
||||||
|
|
|
@ -24,6 +24,7 @@
|
||||||
|
|
||||||
#include "Basics/StringUtils.h"
|
#include "Basics/StringUtils.h"
|
||||||
#include "Basics/ArangoGlobalContext.h"
|
#include "Basics/ArangoGlobalContext.h"
|
||||||
|
#include "Agency/v8-agency.h"
|
||||||
#include "Cluster/ServerState.h"
|
#include "Cluster/ServerState.h"
|
||||||
#include "Cluster/v8-cluster.h"
|
#include "Cluster/v8-cluster.h"
|
||||||
#include "GeneralServer/GeneralServerFeature.h"
|
#include "GeneralServer/GeneralServerFeature.h"
|
||||||
|
@ -219,6 +220,7 @@ void DatabaseFeature::updateContexts() {
|
||||||
i);
|
i);
|
||||||
TRI_InitV8Queries(isolate, context);
|
TRI_InitV8Queries(isolate, context);
|
||||||
TRI_InitV8Cluster(isolate, context);
|
TRI_InitV8Cluster(isolate, context);
|
||||||
|
TRI_InitV8Agency(isolate, context);
|
||||||
},
|
},
|
||||||
vocbase);
|
vocbase);
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,7 @@
|
||||||
#include "VocBase/server.h"
|
#include "VocBase/server.h"
|
||||||
|
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
#include <iomanip>
|
||||||
|
|
||||||
// #define DEBUG_DATAFILE 1
|
// #define DEBUG_DATAFILE 1
|
||||||
|
|
||||||
|
@ -756,7 +757,7 @@ static bool CheckDatafile(TRI_datafile_t* datafile, bool ignoreFailures) {
|
||||||
bool nextMarkerOk = false;
|
bool nextMarkerOk = false;
|
||||||
|
|
||||||
if (size > 0) {
|
if (size > 0) {
|
||||||
auto next = reinterpret_cast<char const*>(marker) + size;
|
auto next = reinterpret_cast<char const*>(marker) + DatafileHelper::AlignedSize<size_t>(size);
|
||||||
auto p = next;
|
auto p = next;
|
||||||
|
|
||||||
if (p < end) {
|
if (p < end) {
|
||||||
|
@ -783,7 +784,7 @@ static bool CheckDatafile(TRI_datafile_t* datafile, bool ignoreFailures) {
|
||||||
// there is a next marker
|
// there is a next marker
|
||||||
auto nextMarker =
|
auto nextMarker =
|
||||||
reinterpret_cast<TRI_df_marker_t const*>(next);
|
reinterpret_cast<TRI_df_marker_t const*>(next);
|
||||||
|
|
||||||
if (nextMarker->getType() != 0 &&
|
if (nextMarker->getType() != 0 &&
|
||||||
nextMarker->getSize() >= sizeof(TRI_df_marker_t) &&
|
nextMarker->getSize() >= sizeof(TRI_df_marker_t) &&
|
||||||
next + nextMarker->getSize() <= end &&
|
next + nextMarker->getSize() <= end &&
|
||||||
|
@ -810,10 +811,67 @@ static bool CheckDatafile(TRI_datafile_t* datafile, bool ignoreFailures) {
|
||||||
datafile->_next = datafile->_data + datafile->_currentSize;
|
datafile->_next = datafile->_data + datafile->_currentSize;
|
||||||
datafile->_state = TRI_DF_STATE_OPEN_ERROR;
|
datafile->_state = TRI_DF_STATE_OPEN_ERROR;
|
||||||
|
|
||||||
LOG(WARN) << "crc mismatch found in datafile '" << datafile->getName(datafile) << "' at position " << currentSize << ". expected crc: " << CalculateCrcValue(marker) << ", actual crc: " << marker->getCrc();
|
LOG(WARN) << "crc mismatch found in datafile '" << datafile->getName(datafile) << "' of size "
|
||||||
|
<< datafile->_maximalSize << ", at position " << currentSize;
|
||||||
|
|
||||||
|
LOG(WARN) << "crc mismatch found inside marker of type '" << TRI_NameMarkerDatafile(marker)
|
||||||
|
<< "' and size " << size
|
||||||
|
<< ". expected crc: " << CalculateCrcValue(marker) << ", actual crc: " << marker->getCrc();
|
||||||
|
|
||||||
|
{
|
||||||
|
LOG(INFO) << "raw marker data following:";
|
||||||
|
char const* p = reinterpret_cast<char const*>(marker);
|
||||||
|
char const* e = reinterpret_cast<char const*>(marker) + DatafileHelper::AlignedSize<size_t>(size);
|
||||||
|
std::string line;
|
||||||
|
std::string raw;
|
||||||
|
size_t printed = 0;
|
||||||
|
while (p < e) {
|
||||||
|
// print offset
|
||||||
|
line.append("0x");
|
||||||
|
uintptr_t offset = static_cast<uintptr_t>(p - datafile->_data);
|
||||||
|
for (size_t i = 0; i < 8; ++i) {
|
||||||
|
uint8_t c = static_cast<uint8_t>((offset & (0xFFULL << 8 * (7 - i))) >> 8 * (7 - i));
|
||||||
|
uint8_t n1 = c >> 4;
|
||||||
|
uint8_t n2 = c & 0x0F;
|
||||||
|
|
||||||
|
line.push_back((n1 < 10) ? ('0' + n1) : 'A' + n1 - 10);
|
||||||
|
line.push_back((n2 < 10) ? ('0' + n2) : 'A' + n2 - 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
// print data
|
||||||
|
line.append(": ");
|
||||||
|
for (size_t i = 0; i < 16; ++i) {
|
||||||
|
if (p >= e) {
|
||||||
|
line.append(" ");
|
||||||
|
} else {
|
||||||
|
uint8_t c = static_cast<uint8_t>(*p++);
|
||||||
|
uint8_t n1 = c >> 4;
|
||||||
|
uint8_t n2 = c & 0x0F;
|
||||||
|
|
||||||
|
line.push_back((n1 < 10) ? ('0' + n1) : 'A' + n1 - 10);
|
||||||
|
line.push_back((n2 < 10) ? ('0' + n2) : 'A' + n2 - 10);
|
||||||
|
line.push_back(' ');
|
||||||
|
|
||||||
|
raw.push_back((c < 32 || c >= 127) ? '.' : static_cast<unsigned char>(c));
|
||||||
|
|
||||||
|
++printed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG(INFO) << line << " " << raw;
|
||||||
|
line.clear();
|
||||||
|
raw.clear();
|
||||||
|
|
||||||
|
if (printed >= 2048) {
|
||||||
|
LOG(INFO) << "(output truncated due to excessive length)";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (nextMarkerOk) {
|
if (nextMarkerOk) {
|
||||||
LOG(INFO) << "data directly following this marker looks ok so repairing the marker may recover it";
|
LOG(INFO) << "data directly following this marker looks ok so repairing the marker may recover it";
|
||||||
|
LOG(INFO) << "please restart the server with the parameter '--wal.ignore-logfile-errors true' to repair the marker";
|
||||||
} else {
|
} else {
|
||||||
LOG(WARN) << "data directly following this marker cannot be analyzed";
|
LOG(WARN) << "data directly following this marker cannot be analyzed";
|
||||||
}
|
}
|
||||||
|
|
|
@ -161,6 +161,16 @@ void ImportFeature::validateOptions(
|
||||||
StringUtils::join(positionals, ", ");
|
StringUtils::join(positionals, ", ");
|
||||||
FATAL_ERROR_EXIT();
|
FATAL_ERROR_EXIT();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static unsigned const MaxBatchSize = 768 * 1024 * 1024;
|
||||||
|
|
||||||
|
if (_chunkSize > MaxBatchSize) {
|
||||||
|
// it's not sensible to raise the batch size beyond this value
|
||||||
|
// because the server has a built-in limit for the batch size too
|
||||||
|
// and will reject bigger HTTP request bodies
|
||||||
|
LOG(WARN) << "capping --batch-size value to " << MaxBatchSize;
|
||||||
|
_chunkSize = MaxBatchSize;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void ImportFeature::start() {
|
void ImportFeature::start() {
|
||||||
|
|
|
@ -0,0 +1,165 @@
|
||||||
|
#.rst:
|
||||||
|
# FindPythonInterp
|
||||||
|
# ----------------
|
||||||
|
#
|
||||||
|
# Find python interpreter
|
||||||
|
#
|
||||||
|
# This module finds if Python interpreter is installed and determines
|
||||||
|
# where the executables are. This code sets the following variables:
|
||||||
|
#
|
||||||
|
# ::
|
||||||
|
#
|
||||||
|
# PYTHONINTERP_FOUND - Was the Python executable found
|
||||||
|
# PYTHON_EXECUTABLE - path to the Python interpreter
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# ::
|
||||||
|
#
|
||||||
|
# PYTHON_VERSION_STRING - Python version found e.g. 2.5.2
|
||||||
|
# PYTHON_VERSION_MAJOR - Python major version found e.g. 2
|
||||||
|
# PYTHON_VERSION_MINOR - Python minor version found e.g. 5
|
||||||
|
# PYTHON_VERSION_PATCH - Python patch version found e.g. 2
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# The Python_ADDITIONAL_VERSIONS variable can be used to specify a list
|
||||||
|
# of version numbers that should be taken into account when searching
|
||||||
|
# for Python. You need to set this variable before calling
|
||||||
|
# find_package(PythonInterp).
|
||||||
|
#
|
||||||
|
# If calling both ``find_package(PythonInterp)`` and
|
||||||
|
# ``find_package(PythonLibs)``, call ``find_package(PythonInterp)`` first to
|
||||||
|
# get the currently active Python version by default with a consistent version
|
||||||
|
# of PYTHON_LIBRARIES.
|
||||||
|
|
||||||
|
#=============================================================================
|
||||||
|
# Copyright 2005-2010 Kitware, Inc.
|
||||||
|
# Copyright 2011 Bjoern Ricks <bjoern.ricks@gmail.com>
|
||||||
|
# Copyright 2012 Rolf Eike Beer <eike@sf-mail.de>
|
||||||
|
#
|
||||||
|
# Distributed under the OSI-approved BSD License (the "License");
|
||||||
|
# see accompanying file Copyright.txt for details.
|
||||||
|
#
|
||||||
|
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||||
|
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||||
|
# See the License for more information.
|
||||||
|
#=============================================================================
|
||||||
|
# (To distribute this file outside of CMake, substitute the full
|
||||||
|
# License text for the above reference.)
|
||||||
|
|
||||||
|
unset(_Python_NAMES)
|
||||||
|
|
||||||
|
set(_PYTHON1_VERSIONS 1.6 1.5)
|
||||||
|
set(_PYTHON2_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0)
|
||||||
|
set(_PYTHON3_VERSIONS 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
|
||||||
|
|
||||||
|
if(PythonInterp_FIND_VERSION)
|
||||||
|
if(PythonInterp_FIND_VERSION_COUNT GREATER 1)
|
||||||
|
set(_PYTHON_FIND_MAJ_MIN "${PythonInterp_FIND_VERSION_MAJOR}.${PythonInterp_FIND_VERSION_MINOR}")
|
||||||
|
list(APPEND _Python_NAMES
|
||||||
|
python${_PYTHON_FIND_MAJ_MIN}
|
||||||
|
python${PythonInterp_FIND_VERSION_MAJOR})
|
||||||
|
unset(_PYTHON_FIND_OTHER_VERSIONS)
|
||||||
|
if(NOT PythonInterp_FIND_VERSION_EXACT)
|
||||||
|
foreach(_PYTHON_V ${_PYTHON${PythonInterp_FIND_VERSION_MAJOR}_VERSIONS})
|
||||||
|
if(NOT _PYTHON_V VERSION_LESS _PYTHON_FIND_MAJ_MIN)
|
||||||
|
list(APPEND _PYTHON_FIND_OTHER_VERSIONS ${_PYTHON_V})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
unset(_PYTHON_FIND_MAJ_MIN)
|
||||||
|
else()
|
||||||
|
list(APPEND _Python_NAMES python${PythonInterp_FIND_VERSION_MAJOR})
|
||||||
|
set(_PYTHON_FIND_OTHER_VERSIONS ${_PYTHON${PythonInterp_FIND_VERSION_MAJOR}_VERSIONS})
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
set(_PYTHON_FIND_OTHER_VERSIONS ${_PYTHON3_VERSIONS} ${_PYTHON2_VERSIONS} ${_PYTHON1_VERSIONS})
|
||||||
|
endif()
|
||||||
|
find_program(PYTHON_EXECUTABLE NAMES ${_Python_NAMES})
|
||||||
|
|
||||||
|
# Set up the versions we know about, in the order we will search. Always add
|
||||||
|
# the user supplied additional versions to the front.
|
||||||
|
set(_Python_VERSIONS ${Python_ADDITIONAL_VERSIONS})
|
||||||
|
# If FindPythonInterp has already found the major and minor version,
|
||||||
|
# insert that version next to get consistent versions of the interpreter and
|
||||||
|
# library.
|
||||||
|
if(DEFINED PYTHONLIBS_VERSION_STRING)
|
||||||
|
string(REPLACE "." ";" _PYTHONLIBS_VERSION "${PYTHONLIBS_VERSION_STRING}")
|
||||||
|
list(GET _PYTHONLIBS_VERSION 0 _PYTHONLIBS_VERSION_MAJOR)
|
||||||
|
list(GET _PYTHONLIBS_VERSION 1 _PYTHONLIBS_VERSION_MINOR)
|
||||||
|
list(APPEND _Python_VERSIONS ${_PYTHONLIBS_VERSION_MAJOR}.${_PYTHONLIBS_VERSION_MINOR})
|
||||||
|
endif()
|
||||||
|
# Search for the current active python version first
|
||||||
|
list(APPEND _Python_VERSIONS ";")
|
||||||
|
list(APPEND _Python_VERSIONS ${_PYTHON_FIND_OTHER_VERSIONS})
|
||||||
|
|
||||||
|
unset(_PYTHON_FIND_OTHER_VERSIONS)
|
||||||
|
unset(_PYTHON1_VERSIONS)
|
||||||
|
unset(_PYTHON2_VERSIONS)
|
||||||
|
unset(_PYTHON3_VERSIONS)
|
||||||
|
|
||||||
|
# Search for newest python version if python executable isn't found
|
||||||
|
if(NOT PYTHON_EXECUTABLE)
|
||||||
|
foreach(_CURRENT_VERSION IN LISTS _Python_VERSIONS)
|
||||||
|
set(_Python_NAMES python${_CURRENT_VERSION})
|
||||||
|
if(WIN32)
|
||||||
|
list(APPEND _Python_NAMES python)
|
||||||
|
endif()
|
||||||
|
find_program(PYTHON_EXECUTABLE
|
||||||
|
NAMES ${_Python_NAMES}
|
||||||
|
PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]
|
||||||
|
)
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# determine python version string
|
||||||
|
if(PYTHON_EXECUTABLE)
|
||||||
|
execute_process(COMMAND "${PYTHON_EXECUTABLE}" -c
|
||||||
|
"import sys; sys.stdout.write(';'.join([str(x) for x in sys.version_info[:3]]))"
|
||||||
|
OUTPUT_VARIABLE _VERSION
|
||||||
|
RESULT_VARIABLE _PYTHON_VERSION_RESULT
|
||||||
|
ERROR_QUIET)
|
||||||
|
if(NOT _PYTHON_VERSION_RESULT)
|
||||||
|
string(REPLACE ";" "." PYTHON_VERSION_STRING "${_VERSION}")
|
||||||
|
list(GET _VERSION 0 PYTHON_VERSION_MAJOR)
|
||||||
|
list(GET _VERSION 1 PYTHON_VERSION_MINOR)
|
||||||
|
list(GET _VERSION 2 PYTHON_VERSION_PATCH)
|
||||||
|
if(PYTHON_VERSION_PATCH EQUAL 0)
|
||||||
|
# it's called "Python 2.7", not "2.7.0"
|
||||||
|
string(REGEX REPLACE "\\.0$" "" PYTHON_VERSION_STRING "${PYTHON_VERSION_STRING}")
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
# sys.version predates sys.version_info, so use that
|
||||||
|
execute_process(COMMAND "${PYTHON_EXECUTABLE}" -c "import sys; sys.stdout.write(sys.version)"
|
||||||
|
OUTPUT_VARIABLE _VERSION
|
||||||
|
RESULT_VARIABLE _PYTHON_VERSION_RESULT
|
||||||
|
ERROR_QUIET)
|
||||||
|
if(NOT _PYTHON_VERSION_RESULT)
|
||||||
|
string(REGEX REPLACE " .*" "" PYTHON_VERSION_STRING "${_VERSION}")
|
||||||
|
string(REGEX REPLACE "^([0-9]+)\\.[0-9]+.*" "\\1" PYTHON_VERSION_MAJOR "${PYTHON_VERSION_STRING}")
|
||||||
|
string(REGEX REPLACE "^[0-9]+\\.([0-9])+.*" "\\1" PYTHON_VERSION_MINOR "${PYTHON_VERSION_STRING}")
|
||||||
|
if(PYTHON_VERSION_STRING MATCHES "^[0-9]+\\.[0-9]+\\.([0-9]+)")
|
||||||
|
set(PYTHON_VERSION_PATCH "${CMAKE_MATCH_1}")
|
||||||
|
else()
|
||||||
|
set(PYTHON_VERSION_PATCH "0")
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
# sys.version was first documented for Python 1.5, so assume
|
||||||
|
# this is older.
|
||||||
|
set(PYTHON_VERSION_STRING "1.4")
|
||||||
|
set(PYTHON_VERSION_MAJOR "1")
|
||||||
|
set(PYTHON_VERSION_MINOR "4")
|
||||||
|
set(PYTHON_VERSION_PATCH "0")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
unset(_PYTHON_VERSION_RESULT)
|
||||||
|
unset(_VERSION)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set PYTHONINTERP_FOUND to TRUE if
|
||||||
|
# all listed variables are TRUE
|
||||||
|
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake)
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(PythonInterp REQUIRED_VARS PYTHON_EXECUTABLE VERSION_VAR PYTHON_VERSION_STRING)
|
||||||
|
|
||||||
|
mark_as_advanced(PYTHON_EXECUTABLE)
|
|
@ -1708,18 +1708,18 @@ std::string TRI_GetInstallRoot(std::string const& binaryPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t bpLength = binaryPath.length();
|
size_t bpLength = binaryPath.length();
|
||||||
const char *pbPath = binaryPath.c_str();
|
char const* pbPath = binaryPath.c_str();
|
||||||
|
|
||||||
if (pbPath[bpLength - 1] == TRI_DIR_SEPARATOR_CHAR) {
|
if (pbPath[bpLength - 1] == TRI_DIR_SEPARATOR_CHAR) {
|
||||||
bpLength --;
|
--bpLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ibpLength > bpLength) {
|
if (ibpLength > bpLength) {
|
||||||
return TRI_DIR_SEPARATOR_STR;
|
return TRI_DIR_SEPARATOR_STR;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 1; i < ibpLength; i ++) {
|
for (size_t i = 1; i < ibpLength; ++i) {
|
||||||
if (pbPath[bpLength -i] != installBinaryPath[ibpLength - i]) {
|
if (pbPath[bpLength - i] != installBinaryPath[ibpLength - i]) {
|
||||||
return TRI_DIR_SEPARATOR_STR;
|
return TRI_DIR_SEPARATOR_STR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,7 +162,7 @@ void SimpleHttpResult::addHeaderField(char const* key, size_t keyLength,
|
||||||
|
|
||||||
if (_returnCode == 204) {
|
if (_returnCode == 204) {
|
||||||
// HTTP 204 = No content. Assume we will have a content-length of 0.
|
// HTTP 204 = No content. Assume we will have a content-length of 0.
|
||||||
// note that will value can be overridden later if the response has the content-length
|
// note that the value can be overridden later if the response has the content-length
|
||||||
// header set to some other value
|
// header set to some other value
|
||||||
setContentLength(0);
|
setContentLength(0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ TRI_v8_global_s::TRI_v8_global_s(v8::Isolate* isolate)
|
||||||
JSVPack(),
|
JSVPack(),
|
||||||
|
|
||||||
AgencyTempl(),
|
AgencyTempl(),
|
||||||
|
AgentTempl(),
|
||||||
ClusterInfoTempl(),
|
ClusterInfoTempl(),
|
||||||
ServerStateTempl(),
|
ServerStateTempl(),
|
||||||
ClusterCommTempl(),
|
ClusterCommTempl(),
|
||||||
|
@ -35,7 +36,6 @@ TRI_v8_global_s::TRI_v8_global_s(v8::Isolate* isolate)
|
||||||
VPackTempl(),
|
VPackTempl(),
|
||||||
VocbaseColTempl(),
|
VocbaseColTempl(),
|
||||||
VocbaseTempl(),
|
VocbaseTempl(),
|
||||||
VulpesTempl(),
|
|
||||||
|
|
||||||
BufferTempl(),
|
BufferTempl(),
|
||||||
|
|
||||||
|
|
|
@ -475,6 +475,12 @@ typedef struct TRI_v8_global_s {
|
||||||
|
|
||||||
v8::Persistent<v8::ObjectTemplate> AgencyTempl;
|
v8::Persistent<v8::ObjectTemplate> AgencyTempl;
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief local agent template
|
||||||
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
v8::Persistent<v8::ObjectTemplate> AgentTempl;
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief clusterinfo template
|
/// @brief clusterinfo template
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -517,12 +523,6 @@ typedef struct TRI_v8_global_s {
|
||||||
|
|
||||||
v8::Persistent<v8::ObjectTemplate> VocbaseTempl;
|
v8::Persistent<v8::ObjectTemplate> VocbaseTempl;
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
|
||||||
/// @brief vulpes template
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
v8::Persistent<v8::ObjectTemplate> VulpesTempl;
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief TRI_vocbase_t template
|
/// @brief TRI_vocbase_t template
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -1,17 +1,20 @@
|
||||||
// Compile with
|
// Compile with
|
||||||
// g++ perfanalysis.cpp -o perfanalyis -std=c++11 -Wall -O3
|
// g++ perfanalysis.cpp -o perfanalyis -std=c++11 -Wall -O3
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
#include <string>
|
|
||||||
#include <vector>
|
|
||||||
#include <cstdlib>
|
|
||||||
#include <cstring>
|
|
||||||
#include <unordered_map>
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <iostream>
|
||||||
|
#include <regex>
|
||||||
|
#include <sstream>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
#include <unordered_map>
|
||||||
|
#include <vector>
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
struct Event {
|
struct Event {
|
||||||
|
static const regex re;
|
||||||
string threadName;
|
string threadName;
|
||||||
int tid;
|
int tid;
|
||||||
string cpu;
|
string cpu;
|
||||||
|
@ -22,45 +25,27 @@ struct Event {
|
||||||
bool isRet;
|
bool isRet;
|
||||||
|
|
||||||
Event(string& line) : isRet(false) {
|
Event(string& line) : isRet(false) {
|
||||||
char* s = strdup(line.c_str());
|
std::smatch match_obj;
|
||||||
char* p = strtok(s, " ");
|
if(!std::regex_search(line, match_obj, re)){
|
||||||
char* q;
|
throw std::logic_error("could not parse line");
|
||||||
if (p != nullptr) {
|
}
|
||||||
threadName = p;
|
|
||||||
p = strtok(nullptr, " ");
|
threadName = match_obj[1];
|
||||||
tid = strtol(p, nullptr, 10);
|
tid = std::stoi(match_obj[2]);
|
||||||
p = strtok(nullptr, " ");
|
cpu = match_obj[3];
|
||||||
cpu = p;
|
startTime = std::stod(match_obj[4]);
|
||||||
p = strtok(nullptr, " ");
|
duration = 0;
|
||||||
startTime = strtod(p, nullptr);
|
name = match_obj[6];
|
||||||
p = strtok(nullptr, " ");
|
inbrackets = match_obj[7];
|
||||||
q = strtok(nullptr, " ");
|
if (match_obj[9].length() > 0) {
|
||||||
if (strcmp(q, "cs:") == 0) {
|
isRet = true;
|
||||||
free(s);
|
name.erase(name.end() - 3, name.end()); // remove Ret suffix form name
|
||||||
return;
|
|
||||||
}
|
|
||||||
name = p;
|
|
||||||
name.pop_back();
|
|
||||||
auto l = name.size();
|
|
||||||
if (l >= 3 && name[l-1] == 't' && name[l-2] == 'e' &&
|
|
||||||
name[l-3] == 'R') {
|
|
||||||
isRet = true;
|
|
||||||
name.pop_back();
|
|
||||||
name.pop_back();
|
|
||||||
name.pop_back();
|
|
||||||
}
|
|
||||||
inbrackets = q;
|
|
||||||
}
|
}
|
||||||
free(s);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool empty() {
|
bool empty() { return name.empty(); }
|
||||||
return name.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
string id() {
|
string id() { return to_string(tid) + name; }
|
||||||
return to_string(tid) + name;
|
|
||||||
}
|
|
||||||
|
|
||||||
string pretty() {
|
string pretty() {
|
||||||
return to_string(duration) + " " + name + " " + to_string(startTime);
|
return to_string(duration) + " " + name + " " + to_string(startTime);
|
||||||
|
@ -77,31 +62,46 @@ struct Event {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
// sample output:
|
||||||
unordered_map<string, Event*> table;
|
// arangod 32636 [005] 16678249.324973: probe_arangod:insertLocalRet: (14a7f60 <- 14a78d6)
|
||||||
vector<Event*> list;
|
// process tid core timepoint scope:name frame
|
||||||
|
const regex Event::re(
|
||||||
|
R"_(\s*(\S+))_" // name 1
|
||||||
|
R"_(\s+(\d+))_" // tid 2
|
||||||
|
R"_(\s+\[(\d+)\])_" // cup 3
|
||||||
|
R"_(\s+(\d+\.\d+):)_" // time 4
|
||||||
|
R"_(\s+([^: ]+):([^: ]+):)_" // scope:func 5:6
|
||||||
|
R"_(\s+\(([0-9a-f]+)(\s+<-\s+([0-9a-f]+))?\))_" // (start -> stop) 7 -> 9
|
||||||
|
,
|
||||||
|
std::regex_constants::ECMAScript | std::regex_constants::optimize);
|
||||||
|
|
||||||
|
int main(int /*argc*/, char* /*argv*/ []) {
|
||||||
|
unordered_map<string, unique_ptr<Event>> table;
|
||||||
|
vector<unique_ptr<Event>> list;
|
||||||
|
|
||||||
string line;
|
string line;
|
||||||
while (getline(cin, line)) {
|
while (getline(cin, line)) {
|
||||||
Event* e = new Event(line);
|
auto event = std::make_unique<Event>(line);
|
||||||
if (!e->empty()) {
|
if (!event->empty()) {
|
||||||
string id = e->id();
|
string id = event->id();
|
||||||
if (!e->isRet) {
|
// insert to table if it is not a function return
|
||||||
|
if (!event->isRet) {
|
||||||
auto it = table.find(id);
|
auto it = table.find(id);
|
||||||
if (it != table.end()) {
|
if (it != table.end()) {
|
||||||
cout << "Alarm, double event found:\n" << line << std::endl;
|
cout << "Alarm, double event found:\n" << line << std::endl;
|
||||||
} else {
|
} else {
|
||||||
table.insert(make_pair(id, e));
|
table.insert(make_pair(id, std::move(event)));
|
||||||
}
|
}
|
||||||
|
// update duration in table
|
||||||
} else {
|
} else {
|
||||||
auto it = table.find(id);
|
auto it = table.find(id);
|
||||||
if (it == table.end()) {
|
if (it == table.end()) {
|
||||||
cout << "Return for unknown event found:\n" << line << std::endl;
|
cout << "Return for unknown event found:\n" << line << std::endl;
|
||||||
} else {
|
} else {
|
||||||
Event* s = it->second;
|
unique_ptr<Event> ev = std::move(it->second);
|
||||||
table.erase(it);
|
table.erase(it);
|
||||||
s->duration = e->startTime - s->startTime;
|
ev->duration = event->startTime - ev->startTime;
|
||||||
list.push_back(s);
|
list.push_back(std::move(ev));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -109,13 +109,11 @@ int main(int argc, char* argv[]) {
|
||||||
cout << "Unreturned events:\n";
|
cout << "Unreturned events:\n";
|
||||||
for (auto& p : table) {
|
for (auto& p : table) {
|
||||||
cout << p.second->pretty() << "\n";
|
cout << p.second->pretty() << "\n";
|
||||||
delete p.second;
|
|
||||||
}
|
}
|
||||||
sort(list.begin(), list.end(), [](Event* a, Event* b) -> bool {
|
sort(list.begin(), list.end(),
|
||||||
return *a < *b;
|
[](unique_ptr<Event>const& a, unique_ptr<Event>const& b) -> bool { return *a < *b; });
|
||||||
});
|
|
||||||
cout << "Events sorted by name and time:\n";
|
cout << "Events sorted by name and time:\n";
|
||||||
for (auto* e : list) {
|
for (auto& e : list) {
|
||||||
cout << e->pretty() << "\n";
|
cout << e->pretty() << "\n";
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
@ -6,60 +6,78 @@
|
||||||
#
|
#
|
||||||
# This script sets up performance monitoring events to measure single
|
# This script sets up performance monitoring events to measure single
|
||||||
# document operations. Run this script with sudo when the ArangoDB
|
# document operations. Run this script with sudo when the ArangoDB
|
||||||
# process is already running. Then do
|
# process is already running:
|
||||||
|
#
|
||||||
|
# ./setupPerfEvents.sh
|
||||||
|
#
|
||||||
|
# Now you are able to recrod the event with:
|
||||||
|
#
|
||||||
# sudo perf record -e "probe_arangod:*" -aR sleep 60
|
# sudo perf record -e "probe_arangod:*" -aR sleep 60
|
||||||
# (to sample for 60 seconds). A file "perf.data" is written to the
|
#
|
||||||
# current directory.
|
# The above command will get sample data for 60 seconds. A file "perf.data" is
|
||||||
# Dump the events in this file with
|
# written to the current directory. Dump the events in this file with:
|
||||||
|
#
|
||||||
# sudo perf script > perf.history
|
# sudo perf script > perf.history
|
||||||
|
#
|
||||||
# This logs the times when individual threads hit the events.
|
# This logs the times when individual threads hit the events.
|
||||||
# Use the program perfanalyis.cpp in this directory in the following way:
|
# Use the program perfanalyis.cpp in this directory in the following way:
|
||||||
|
#
|
||||||
# sudo ./perfanalyis < perf.history > perf.statistics
|
# sudo ./perfanalyis < perf.history > perf.statistics
|
||||||
# This will group enter and exit events of functions together, compute
|
#
|
||||||
# the time spent and sort by function.
|
# This will group enter and exit events of functions together, compute the time
|
||||||
# Remove all events with
|
# spent and sort by function. When finised remove all events with:
|
||||||
|
#
|
||||||
# sudo perf probe -d "probe_arangod:*"
|
# sudo perf probe -d "probe_arangod:*"
|
||||||
# List events with
|
#
|
||||||
|
# List events with:
|
||||||
|
#
|
||||||
# sudo perf probe -l
|
# sudo perf probe -l
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
ARANGOD_EXECUTABLE=build/bin/arangod
|
main(){
|
||||||
perf probe -x $ARANGOD_EXECUTABLE -d "probe_arangod:*"
|
local ARANGOD_EXECUTABLE=${1-build/bin/arangod}
|
||||||
|
|
||||||
echo Adding events, this takes a few seconds...
|
#delete all existing events
|
||||||
|
perf probe -x $ARANGOD_EXECUTABLE -d "probe_arangod:*"
|
||||||
|
|
||||||
|
echo "Adding events, this takes a few seconds..."
|
||||||
|
|
||||||
|
echo "Single document operations..."
|
||||||
|
addEvent insertLocal
|
||||||
|
addEvent removeLocal
|
||||||
|
addEvent modifyLocal
|
||||||
|
addEvent documentLocal
|
||||||
|
|
||||||
|
echo "Single document operations on coordinator..."
|
||||||
|
addEvent insertCoordinator
|
||||||
|
addEvent removeCoordinator
|
||||||
|
addEvent updateCoordinator
|
||||||
|
addEvent replaceCoordinator
|
||||||
|
addEvent documentCoordinator
|
||||||
|
|
||||||
|
echo "work method in HttpServerJob"
|
||||||
|
addEvent workHttpServerJob work@HttpServerJob.cpp
|
||||||
|
|
||||||
|
echo "work method in RestDocumentHandler"
|
||||||
|
addEvent executeRestReadDocument readDocument@RestDocumentHandler.cpp
|
||||||
|
addEvent executeRestInsertDocument createDocument@RestDocumentHandler.cpp
|
||||||
|
addEvent handleRequest handleRequest@HttpServer.cpp
|
||||||
|
addEvent handleWrite handleWrite@SocketTask.cpp
|
||||||
|
|
||||||
|
addEvent tcp_sendmsg
|
||||||
|
addEvent tcp_recvmsg
|
||||||
|
|
||||||
|
echo Done.
|
||||||
|
}
|
||||||
|
|
||||||
addEvent() {
|
addEvent() {
|
||||||
x=$1
|
local name="$1"
|
||||||
y=$2
|
local func="${2-"${name}"}"
|
||||||
if [ "x$y" == "x" ] ; then
|
|
||||||
y=$x
|
echo "setting up $name for function: $func"
|
||||||
fi
|
perf probe -x $ARANGOD_EXECUTABLE -a $name=$func 2> /dev/null #enter function
|
||||||
echo $x
|
perf probe -x $ARANGOD_EXECUTABLE -a ${name}Ret=$func%return 2> /dev/null #return form function
|
||||||
perf probe -x $ARANGOD_EXECUTABLE -a $x=$y 2> /dev/null
|
|
||||||
perf probe -x $ARANGOD_EXECUTABLE -a ${x}Ret=$y%return 2> /dev/null
|
|
||||||
}
|
}
|
||||||
echo Single document operations...
|
|
||||||
addEvent insertLocal
|
|
||||||
addEvent removeLocal
|
|
||||||
addEvent modifyLocal
|
|
||||||
addEvent documentLocal
|
|
||||||
|
|
||||||
echo Single document operations on coordinator...
|
main "$@"
|
||||||
addEvent insertCoordinator
|
|
||||||
addEvent removeCoordinator
|
|
||||||
addEvent updateCoordinator
|
|
||||||
addEvent replaceCoordinator
|
|
||||||
addEvent documentCoordinator
|
|
||||||
|
|
||||||
echo work method in HttpServerJob
|
|
||||||
addEvent workHttpServerJob work@HttpServerJob.cpp
|
|
||||||
|
|
||||||
echo work method in RestDocumentHandler
|
|
||||||
addEvent executeRestReadDocument readDocument@RestDocumentHandler.cpp
|
|
||||||
addEvent executeRestInsertDocument createDocument@RestDocumentHandler.cpp
|
|
||||||
addEvent handleRequest handleRequest@HttpServer.cpp
|
|
||||||
addEvent handleWrite handleWrite@SocketTask.cpp
|
|
||||||
|
|
||||||
addEvent tcp_sendmsg
|
|
||||||
addEvent tcp_recvmsg
|
|
||||||
|
|
||||||
echo Done.
|
|
||||||
|
|
|
@ -96,6 +96,7 @@ start() {
|
||||||
PORT=$2
|
PORT=$2
|
||||||
mkdir cluster/data$PORT
|
mkdir cluster/data$PORT
|
||||||
echo Starting $TYPE on port $PORT
|
echo Starting $TYPE on port $PORT
|
||||||
|
mkdir -p cluster/apps$PORT
|
||||||
build/bin/arangod -c none \
|
build/bin/arangod -c none \
|
||||||
--database.directory cluster/data$PORT \
|
--database.directory cluster/data$PORT \
|
||||||
--cluster.agency-endpoint tcp://127.0.0.1:$BASE \
|
--cluster.agency-endpoint tcp://127.0.0.1:$BASE \
|
||||||
|
|
Loading…
Reference in New Issue