mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of github.com:arangodb/ArangoDB into dox-cox-pix-ex-rox-box
This commit is contained in:
commit
14fe556031
|
@ -360,9 +360,9 @@ else ()
|
|||
|
||||
option(USE_DEBUG_V8 "compile V8 in DEBUG mode" OFF)
|
||||
|
||||
set(V8_CFLAGS "")
|
||||
set(V8_CXXFLAGS "")
|
||||
set(V8_LDFLAGS "")
|
||||
set(V8_CFLAGS "$ENV{V8_CFLAGS}")
|
||||
set(V8_CXXFLAGS "$ENV{V8_CXXFLAGS}")
|
||||
set(V8_LDFLAGS "$ENV{V8_LDFLAGS}")
|
||||
|
||||
if (USE_DEBUG_V8)
|
||||
set(V8_TARGET_ARCH "${V8_PROC_ARCH}.debug")
|
||||
|
|
|
@ -2242,7 +2242,7 @@ int InputBuffer::incrementalHistorySearch(PromptBase& pi, int startChar) {
|
|||
historyLinePosition); // draw user's text with our prompt
|
||||
|
||||
// loop until we get an exit character
|
||||
int c;
|
||||
int c = 0;
|
||||
bool keepLooping = true;
|
||||
bool useSearchedLine = true;
|
||||
bool searchAgain = false;
|
||||
|
|
|
@ -44,14 +44,14 @@ AqlValue::AqlValue(TRI_doc_mptr_t const* mptr) {
|
|||
}
|
||||
|
||||
/// @brief hashes the value
|
||||
uint64_t AqlValue::hash(arangodb::AqlTransaction* trx) const {
|
||||
uint64_t AqlValue::hash(arangodb::AqlTransaction* trx, uint64_t seed) const {
|
||||
switch (type()) {
|
||||
case VPACK_SLICE_POINTER:
|
||||
case VPACK_INLINE:
|
||||
case VPACK_MANAGED: {
|
||||
// we must use the slow hash function here, because a value may have
|
||||
// different representations in case its an array/object/number
|
||||
return slice().normalizedHash();
|
||||
return slice().normalizedHash(seed);
|
||||
}
|
||||
case DOCVEC:
|
||||
case RANGE: {
|
||||
|
@ -59,7 +59,7 @@ uint64_t AqlValue::hash(arangodb::AqlTransaction* trx) const {
|
|||
toVelocyPack(trx, builder, false);
|
||||
// we must use the slow hash function here, because a value may have
|
||||
// different representations in case its an array/object/number
|
||||
return builder.slice().normalizedHash();
|
||||
return builder.slice().normalizedHash(seed);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ struct AqlValue final {
|
|||
public:
|
||||
// construct an empty AqlValue
|
||||
// note: this is the default constructor and should be as cheap as possible
|
||||
AqlValue() {
|
||||
AqlValue() noexcept {
|
||||
// construct a slice of type None
|
||||
_data.internal[0] = '\x00';
|
||||
setType(AqlValueType::VPACK_INLINE);
|
||||
|
@ -180,7 +180,7 @@ struct AqlValue final {
|
|||
}
|
||||
|
||||
/// @brief hashes the value
|
||||
uint64_t hash(arangodb::AqlTransaction*) const;
|
||||
uint64_t hash(arangodb::AqlTransaction*, uint64_t seed = 0xdeadbeef) const;
|
||||
|
||||
/// @brief whether or not the value contains a none value
|
||||
bool isNone() const;
|
||||
|
@ -280,7 +280,8 @@ struct AqlValue final {
|
|||
|
||||
/// @brief invalidates/resets a value to None, not freeing any memory
|
||||
void erase() {
|
||||
initFromSlice(arangodb::velocypack::Slice());
|
||||
_data.internal[0] = '\x00';
|
||||
setType(AqlValueType::VPACK_INLINE);
|
||||
}
|
||||
|
||||
/// @brief destroy, explicit destruction, only when needed
|
||||
|
@ -330,7 +331,7 @@ struct AqlValue final {
|
|||
}
|
||||
|
||||
/// @brief sets the value type
|
||||
inline void setType(AqlValueType type) {
|
||||
inline void setType(AqlValueType type) noexcept {
|
||||
_data.internal[sizeof(_data.internal) - 1] = type;
|
||||
}
|
||||
};
|
||||
|
@ -357,6 +358,51 @@ struct AqlValueMaterializer {
|
|||
explicit AqlValueMaterializer(arangodb::AqlTransaction* trx)
|
||||
: trx(trx), materialized(), hasCopied(false) {}
|
||||
|
||||
AqlValueMaterializer(AqlValueMaterializer const& other)
|
||||
: trx(other.trx), materialized(other.materialized), hasCopied(other.hasCopied) {
|
||||
if (other.hasCopied) {
|
||||
// copy other's slice
|
||||
materialized = other.materialized.clone();
|
||||
}
|
||||
}
|
||||
|
||||
AqlValueMaterializer& operator=(AqlValueMaterializer const& other) {
|
||||
if (this != &other) {
|
||||
TRI_ASSERT(trx == other.trx); // must be from same transaction
|
||||
if (hasCopied) {
|
||||
// destroy our own slice
|
||||
materialized.destroy();
|
||||
hasCopied = false;
|
||||
}
|
||||
// copy other's slice
|
||||
materialized = other.materialized.clone();
|
||||
hasCopied = other.hasCopied;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
AqlValueMaterializer(AqlValueMaterializer&& other) noexcept
|
||||
: trx(other.trx), materialized(other.materialized), hasCopied(other.hasCopied) {
|
||||
// reset other
|
||||
other.hasCopied = false;
|
||||
other.materialized = AqlValue();
|
||||
}
|
||||
|
||||
AqlValueMaterializer& operator=(AqlValueMaterializer&& other) noexcept {
|
||||
if (this != &other) {
|
||||
TRI_ASSERT(trx == other.trx); // must be from same transaction
|
||||
if (hasCopied) {
|
||||
// destroy our own slice
|
||||
materialized.destroy();
|
||||
}
|
||||
// reset other
|
||||
materialized = other.materialized;
|
||||
hasCopied = other.hasCopied;
|
||||
other.materialized = AqlValue();
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
~AqlValueMaterializer() {
|
||||
if (hasCopied) {
|
||||
materialized.destroy();
|
||||
|
|
|
@ -1413,7 +1413,9 @@ AstNode* Ast::replaceVariableReference(AstNode* node, Variable const* variable,
|
|||
void Ast::validateAndOptimize() {
|
||||
struct TraversalContext {
|
||||
std::unordered_set<std::string> writeCollectionsSeen;
|
||||
std::unordered_map<std::string, int64_t> collectionsFirstSeen;
|
||||
int64_t stopOptimizationRequests = 0;
|
||||
int64_t nestingLevel = 0;
|
||||
bool isInFilter = false;
|
||||
bool hasSeenAnyWriteNode = false;
|
||||
bool hasSeenWriteNodeInCurrentScope = false;
|
||||
|
@ -1430,8 +1432,14 @@ void Ast::validateAndOptimize() {
|
|||
// NOOPT will turn all function optimizations off
|
||||
++(static_cast<TraversalContext*>(data)->stopOptimizationRequests);
|
||||
}
|
||||
} else if (node->type == NODE_TYPE_COLLECTION) {
|
||||
// note the level on which we first saw a collection
|
||||
auto c = static_cast<TraversalContext*>(data);
|
||||
c->collectionsFirstSeen.emplace(node->getString(), c->nestingLevel);
|
||||
} else if (node->type == NODE_TYPE_AGGREGATIONS) {
|
||||
++(static_cast<TraversalContext*>(data)->stopOptimizationRequests);
|
||||
} else if (node->type == NODE_TYPE_SUBQUERY) {
|
||||
++static_cast<TraversalContext*>(data)->nestingLevel;
|
||||
} else if (node->hasFlag(FLAG_BIND_PARAMETER)) {
|
||||
return false;
|
||||
} else if (node->type == NODE_TYPE_REMOVE ||
|
||||
|
@ -1454,6 +1462,8 @@ void Ast::validateAndOptimize() {
|
|||
auto postVisitor = [&](AstNode const* node, void* data) -> void {
|
||||
if (node->type == NODE_TYPE_FILTER) {
|
||||
static_cast<TraversalContext*>(data)->isInFilter = false;
|
||||
} else if (node->type == NODE_TYPE_SUBQUERY) {
|
||||
--static_cast<TraversalContext*>(data)->nestingLevel;
|
||||
} else if (node->type == NODE_TYPE_REMOVE ||
|
||||
node->type == NODE_TYPE_INSERT ||
|
||||
node->type == NODE_TYPE_UPDATE ||
|
||||
|
@ -1468,6 +1478,16 @@ void Ast::validateAndOptimize() {
|
|||
auto collection = node->getMember(1);
|
||||
std::string name = collection->getString();
|
||||
c->writeCollectionsSeen.emplace(name);
|
||||
|
||||
auto it = c->collectionsFirstSeen.find(name);
|
||||
|
||||
if (it != c->collectionsFirstSeen.end()) {
|
||||
if ((*it).second < c->nestingLevel) {
|
||||
name = "collection '" + name;
|
||||
name.push_back('\'');
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION, name.c_str());
|
||||
}
|
||||
}
|
||||
} else if (node->type == NODE_TYPE_FCALL) {
|
||||
auto func = static_cast<Function*>(node->getData());
|
||||
TRI_ASSERT(func != nullptr);
|
||||
|
@ -1547,7 +1567,9 @@ void Ast::validateAndOptimize() {
|
|||
!func->canRunOnDBServer) {
|
||||
// if canRunOnDBServer is true, then this is an indicator for a
|
||||
// document-accessing function
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION);
|
||||
std::string name("function ");
|
||||
name.append(func->externalName);
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION, name.c_str());
|
||||
}
|
||||
|
||||
if (static_cast<TraversalContext*>(data)->stopOptimizationRequests == 0) {
|
||||
|
@ -1587,9 +1609,12 @@ void Ast::validateAndOptimize() {
|
|||
// collection
|
||||
if (node->type == NODE_TYPE_COLLECTION) {
|
||||
auto c = static_cast<TraversalContext*>(data);
|
||||
|
||||
|
||||
if (c->writeCollectionsSeen.find(node->getString()) != c->writeCollectionsSeen.end()) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION);
|
||||
std::string name("collection '");
|
||||
name.append(node->getString());
|
||||
name.push_back('\'');
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION, name.c_str());
|
||||
}
|
||||
|
||||
return node;
|
||||
|
@ -1599,7 +1624,7 @@ void Ast::validateAndOptimize() {
|
|||
if (node->type == NODE_TYPE_TRAVERSAL) {
|
||||
// traversals must not be used after a modification operation
|
||||
if (static_cast<TraversalContext*>(data)->hasSeenAnyWriteNode) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION);
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_ACCESS_AFTER_MODIFICATION, "traversal");
|
||||
}
|
||||
|
||||
return node;
|
||||
|
|
|
@ -832,7 +832,8 @@ size_t HashedCollectBlock::GroupKeyHash::operator()(
|
|||
for (auto const& it : value) {
|
||||
// we must use the slow hash function here, because a value may have
|
||||
// different representations in case its an array/object/number
|
||||
hash ^= it.hash(_trx);
|
||||
// (calls normalizedHash() internally)
|
||||
hash = it.hash(_trx, hash);
|
||||
}
|
||||
|
||||
return static_cast<size_t>(hash);
|
||||
|
|
|
@ -368,6 +368,7 @@ static void AppendAsString(arangodb::AqlTransaction* trx,
|
|||
|
||||
/// @brief Checks if the given list contains the element
|
||||
static bool ListContainsElement(arangodb::AqlTransaction* trx,
|
||||
VPackOptions const* options,
|
||||
AqlValue const& list,
|
||||
AqlValue const& testee, size_t& index) {
|
||||
TRI_ASSERT(list.isArray());
|
||||
|
@ -379,7 +380,7 @@ static bool ListContainsElement(arangodb::AqlTransaction* trx,
|
|||
|
||||
VPackArrayIterator it(slice);
|
||||
while (it.valid()) {
|
||||
if (arangodb::basics::VelocyPackHelper::compare(testeeSlice, it.value(), false) == 0) {
|
||||
if (arangodb::basics::VelocyPackHelper::compare(testeeSlice, it.value(), false, options) == 0) {
|
||||
index = it.index();
|
||||
return true;
|
||||
}
|
||||
|
@ -390,12 +391,13 @@ static bool ListContainsElement(arangodb::AqlTransaction* trx,
|
|||
|
||||
/// @brief Checks if the given list contains the element
|
||||
/// DEPRECATED
|
||||
static bool ListContainsElement(VPackSlice const& list,
|
||||
static bool ListContainsElement(VPackOptions const* options,
|
||||
VPackSlice const& list,
|
||||
VPackSlice const& testee, size_t& index) {
|
||||
TRI_ASSERT(list.isArray());
|
||||
for (size_t i = 0; i < static_cast<size_t>(list.length()); ++i) {
|
||||
if (arangodb::basics::VelocyPackHelper::compare(testee, list.at(i),
|
||||
false) == 0) {
|
||||
false, options) == 0) {
|
||||
index = i;
|
||||
return true;
|
||||
}
|
||||
|
@ -403,9 +405,11 @@ static bool ListContainsElement(VPackSlice const& list,
|
|||
return false;
|
||||
}
|
||||
|
||||
static bool ListContainsElement(VPackSlice const& list, VPackSlice const& testee) {
|
||||
static bool ListContainsElement(VPackOptions const* options,
|
||||
VPackSlice const& list,
|
||||
VPackSlice const& testee) {
|
||||
size_t unused;
|
||||
return ListContainsElement(list, testee, unused);
|
||||
return ListContainsElement(options, list, testee, unused);
|
||||
}
|
||||
|
||||
/// @brief Computes the Variance of the given list.
|
||||
|
@ -743,8 +747,6 @@ static AqlValue VertexIdsToAqlValueVPack(arangodb::aql::Query* query,
|
|||
return AqlValue(builder.get());
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// @brief Load geoindex for collection name
|
||||
static arangodb::Index* getGeoIndex(arangodb::AqlTransaction* trx,
|
||||
TRI_voc_cid_t const& cid,
|
||||
|
@ -1688,11 +1690,13 @@ AqlValue Functions::Unique(arangodb::aql::Query* query,
|
|||
AqlValueMaterializer materializer(trx);
|
||||
VPackSlice slice = materializer.slice(value, false);
|
||||
|
||||
std::unordered_set<VPackSlice,
|
||||
arangodb::basics::VelocyPackHelper::VPackHash,
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
std::unordered_set<VPackSlice, arangodb::basics::VelocyPackHelper::VPackHash,
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual>
|
||||
values(512, arangodb::basics::VelocyPackHelper::VPackHash(),
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual());
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual(&options));
|
||||
|
||||
for (auto const& s : VPackArrayIterator(slice)) {
|
||||
if (!s.isNone()) {
|
||||
|
@ -1806,13 +1810,16 @@ AqlValue Functions::UnionDistinct(arangodb::aql::Query* query,
|
|||
ValidateParameters(parameters, "UNION_DISTINCT", 2);
|
||||
size_t const n = parameters.size();
|
||||
|
||||
std::unordered_set<VPackSlice,
|
||||
arangodb::basics::VelocyPackHelper::VPackHash,
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
std::unordered_set<VPackSlice, arangodb::basics::VelocyPackHelper::VPackHash,
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual>
|
||||
values(512, arangodb::basics::VelocyPackHelper::VPackHash(),
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual());
|
||||
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual(&options));
|
||||
|
||||
std::vector<AqlValueMaterializer> materializers;
|
||||
materializers.reserve(n);
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
AqlValue value = ExtractFunctionParameterValue(trx, parameters, i);
|
||||
|
||||
|
@ -1822,8 +1829,8 @@ AqlValue Functions::UnionDistinct(arangodb::aql::Query* query,
|
|||
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
|
||||
}
|
||||
|
||||
AqlValueMaterializer materializer(trx);
|
||||
VPackSlice slice = materializer.slice(value, false);
|
||||
materializers.emplace_back(trx);
|
||||
VPackSlice slice = materializers.back().slice(value, false);
|
||||
|
||||
for (auto const& v : VPackArrayIterator(slice)) {
|
||||
if (values.find(v) == values.end()) {
|
||||
|
@ -1865,14 +1872,20 @@ AqlValue Functions::Intersection(arangodb::aql::Query* query,
|
|||
arangodb::AqlTransaction* trx,
|
||||
VPackFunctionParameters const& parameters) {
|
||||
ValidateParameters(parameters, "INTERSECTION", 2);
|
||||
|
||||
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
|
||||
std::unordered_map<VPackSlice, size_t,
|
||||
arangodb::basics::VelocyPackHelper::VPackHash,
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual>
|
||||
values(512, arangodb::basics::VelocyPackHelper::VPackHash(),
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual());
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual(&options));
|
||||
|
||||
size_t const n = parameters.size();
|
||||
std::vector<AqlValueMaterializer> materializers;
|
||||
materializers.reserve(n);
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
AqlValue value = ExtractFunctionParameterValue(trx, parameters, i);
|
||||
|
||||
|
@ -1882,8 +1895,8 @@ AqlValue Functions::Intersection(arangodb::aql::Query* query,
|
|||
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
|
||||
}
|
||||
|
||||
AqlValueMaterializer materializer(trx);
|
||||
VPackSlice slice = materializer.slice(value, false);
|
||||
materializers.emplace_back(trx);
|
||||
VPackSlice slice = materializers.back().slice(value, false);
|
||||
|
||||
for (auto const& it : VPackArrayIterator(slice)) {
|
||||
if (i == 0) {
|
||||
|
@ -2350,12 +2363,14 @@ AqlValue Functions::Minus(arangodb::aql::Query* query,
|
|||
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
|
||||
}
|
||||
|
||||
std::unordered_map<VPackSlice,
|
||||
size_t,
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
std::unordered_map<VPackSlice, size_t,
|
||||
arangodb::basics::VelocyPackHelper::VPackHash,
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual>
|
||||
contains(512, arangodb::basics::VelocyPackHelper::VPackHash(),
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual());
|
||||
arangodb::basics::VelocyPackHelper::VPackEqual(&options));
|
||||
|
||||
// Fill the original map
|
||||
AqlValueMaterializer materializer(trx);
|
||||
|
@ -2545,6 +2560,7 @@ AqlValue Functions::Edges(arangodb::aql::Query* query,
|
|||
// We might have examples
|
||||
AqlValue exampleValue = ExtractFunctionParameterValue(trx, parameters, 3);
|
||||
if ((exampleValue.isArray() && exampleValue.length() != 0) || exampleValue.isObject()) {
|
||||
// TODO CHECK SURVIVAL
|
||||
AqlValueMaterializer materializer(trx);
|
||||
VPackSlice exampleSlice = materializer.slice(exampleValue, false);
|
||||
|
||||
|
@ -2754,9 +2770,12 @@ AqlValue Functions::Push(arangodb::aql::Query* query,
|
|||
for (auto const& it : VPackArrayIterator(l)) {
|
||||
builder->add(it);
|
||||
}
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
if (parameters.size() == 3) {
|
||||
AqlValue unique = ExtractFunctionParameterValue(trx, parameters, 2);
|
||||
if (!unique.toBoolean() || !ListContainsElement(l, p)) {
|
||||
if (!unique.toBoolean() || !ListContainsElement(&options, l, p)) {
|
||||
builder->add(p);
|
||||
}
|
||||
} else {
|
||||
|
@ -2813,36 +2832,54 @@ AqlValue Functions::Append(arangodb::aql::Query* query,
|
|||
return list.clone();
|
||||
}
|
||||
|
||||
AqlValueMaterializer toAppendMaterializer(trx);
|
||||
VPackSlice t = toAppendMaterializer.slice(toAppend, false);
|
||||
|
||||
if (t.isArray() && t.length() == 0) {
|
||||
return list.clone();
|
||||
}
|
||||
|
||||
bool unique = false;
|
||||
if (parameters.size() == 3) {
|
||||
AqlValue a = ExtractFunctionParameterValue(trx, parameters, 2);
|
||||
unique = a.toBoolean();
|
||||
}
|
||||
|
||||
AqlValueMaterializer toAppendMaterializer(trx);
|
||||
VPackSlice t = toAppendMaterializer.slice(toAppend, false);
|
||||
|
||||
AqlValueMaterializer materializer(trx);
|
||||
VPackSlice l = materializer.slice(list, false);
|
||||
|
||||
TransactionBuilderLeaser builder(trx);
|
||||
builder->openArray();
|
||||
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
if (!list.isNull(true)) {
|
||||
TRI_ASSERT(list.isArray());
|
||||
for (auto const& it : VPackArrayIterator(l)) {
|
||||
builder->add(it);
|
||||
if (list.isArray()) {
|
||||
for (auto const& it : VPackArrayIterator(l)) {
|
||||
builder->add(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!toAppend.isArray()) {
|
||||
if (!unique || !ListContainsElement(l, t)) {
|
||||
if (!unique || !ListContainsElement(&options, l, t)) {
|
||||
builder->add(t);
|
||||
}
|
||||
} else {
|
||||
AqlValueMaterializer materializer(trx);
|
||||
VPackSlice slice = materializer.slice(toAppend, false);
|
||||
for (auto const& it : VPackArrayIterator(slice)) {
|
||||
if (!unique || !ListContainsElement(l, it)) {
|
||||
if (unique) {
|
||||
std::unordered_set<VPackSlice> added;
|
||||
added.reserve(slice.length());
|
||||
for (auto const& it : VPackArrayIterator(slice)) {
|
||||
if (added.find(it) == added.end() &&
|
||||
!ListContainsElement(&options, l, it)) {
|
||||
builder->add(it);
|
||||
added.emplace(it);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (auto const& it : VPackArrayIterator(slice)) {
|
||||
builder->add(it);
|
||||
}
|
||||
}
|
||||
|
@ -2870,8 +2907,12 @@ AqlValue Functions::Unshift(arangodb::aql::Query* query,
|
|||
unique = a.toBoolean();
|
||||
}
|
||||
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
size_t unused;
|
||||
if (unique && list.isArray() && ListContainsElement(trx, list, toAppend, unused)) {
|
||||
if (unique && list.isArray() &&
|
||||
ListContainsElement(trx, &options, list, toAppend, unused)) {
|
||||
// Short circuit, nothing to do return list
|
||||
return list.clone();
|
||||
}
|
||||
|
@ -3007,6 +3048,9 @@ AqlValue Functions::RemoveValues(arangodb::aql::Query* query,
|
|||
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
|
||||
}
|
||||
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
try {
|
||||
AqlValueMaterializer valuesMaterializer(trx);
|
||||
VPackSlice v = valuesMaterializer.slice(values, false);
|
||||
|
@ -3017,13 +3061,13 @@ AqlValue Functions::RemoveValues(arangodb::aql::Query* query,
|
|||
TransactionBuilderLeaser builder(trx);
|
||||
builder->openArray();
|
||||
for (auto const& it : VPackArrayIterator(l)) {
|
||||
if (!ListContainsElement(v, it)) {
|
||||
if (!ListContainsElement(&options, v, it)) {
|
||||
builder->add(it);
|
||||
}
|
||||
}
|
||||
builder->close();
|
||||
return AqlValue(builder.get());
|
||||
} catch (...) {
|
||||
} catch (std::bad_alloc const&) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
}
|
||||
|
@ -3448,9 +3492,12 @@ AqlValue Functions::Position(arangodb::aql::Query* query,
|
|||
|
||||
if (list.length() > 0) {
|
||||
AqlValue searchValue = ExtractFunctionParameterValue(trx, parameters, 1);
|
||||
VPackOptions options;
|
||||
options.customTypeHandler =
|
||||
trx->transactionContext()->orderCustomTypeHandler().get();
|
||||
|
||||
size_t index;
|
||||
if (ListContainsElement(trx, list, searchValue, index)) {
|
||||
if (ListContainsElement(trx, &options, list, searchValue, index)) {
|
||||
if (!returnIndex) {
|
||||
// return true
|
||||
return AqlValue(arangodb::basics::VelocyPackHelper::TrueValue());
|
||||
|
|
|
@ -403,7 +403,8 @@ AqlItemBlock* InsertBlock::work(std::vector<AqlItemBlock*>& blocks) {
|
|||
size_t const n = res->size();
|
||||
|
||||
throwIfKilled(); // check if we were aborted
|
||||
bool isMultiple = (n > 1);
|
||||
bool const isMultiple = (n > 1);
|
||||
|
||||
if (!isMultiple) {
|
||||
// loop over the complete block. Well it is one element only
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
|
@ -453,7 +454,7 @@ AqlItemBlock* InsertBlock::work(std::vector<AqlItemBlock*>& blocks) {
|
|||
dstRow -= n;
|
||||
VPackSlice resultList = opRes.slice();
|
||||
TRI_ASSERT(resultList.isArray());
|
||||
for (auto const& elm: VPackArrayIterator(resultList)) {
|
||||
for (auto const& elm: VPackArrayIterator(resultList, false)) {
|
||||
bool wasError = arangodb::basics::VelocyPackHelper::getBooleanValue(
|
||||
elm, "error", false);
|
||||
if (!wasError) {
|
||||
|
@ -715,13 +716,14 @@ AqlItemBlock* UpsertBlock::work(std::vector<AqlItemBlock*>& blocks) {
|
|||
auto* res = *it;
|
||||
|
||||
throwIfKilled(); // check if we were aborted
|
||||
|
||||
insertBuilder.clear();
|
||||
updateBuilder.clear();
|
||||
|
||||
size_t const n = res->size();
|
||||
|
||||
bool isMultiple = (n > 1);
|
||||
|
||||
bool const isMultiple = (n > 1);
|
||||
if (isMultiple) {
|
||||
insertBuilder.clear();
|
||||
updateBuilder.clear();
|
||||
insertBuilder.openArray();
|
||||
updateBuilder.openArray();
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ struct OperationCursor {
|
|||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
std::shared_ptr<OperationResult> getMore(uint64_t batchSize = UINT64_MAX,
|
||||
bool useExternals = false);
|
||||
bool useExternals = true);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Get next batchSize many elements.
|
||||
|
@ -120,7 +120,7 @@ struct OperationCursor {
|
|||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void getMore(std::shared_ptr<OperationResult>&, uint64_t batchSize = UINT64_MAX,
|
||||
bool useExternals = false);
|
||||
bool useExternals = true);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Get next batchSize many elements. mptr variant
|
||||
|
|
|
@ -1,161 +0,0 @@
|
|||
#.rst:
|
||||
# CMakeParseArguments
|
||||
# -------------------
|
||||
#
|
||||
#
|
||||
#
|
||||
# CMAKE_PARSE_ARGUMENTS(<prefix> <options> <one_value_keywords>
|
||||
# <multi_value_keywords> args...)
|
||||
#
|
||||
# CMAKE_PARSE_ARGUMENTS() is intended to be used in macros or functions
|
||||
# for parsing the arguments given to that macro or function. It
|
||||
# processes the arguments and defines a set of variables which hold the
|
||||
# values of the respective options.
|
||||
#
|
||||
# The <options> argument contains all options for the respective macro,
|
||||
# i.e. keywords which can be used when calling the macro without any
|
||||
# value following, like e.g. the OPTIONAL keyword of the install()
|
||||
# command.
|
||||
#
|
||||
# The <one_value_keywords> argument contains all keywords for this macro
|
||||
# which are followed by one value, like e.g. DESTINATION keyword of the
|
||||
# install() command.
|
||||
#
|
||||
# The <multi_value_keywords> argument contains all keywords for this
|
||||
# macro which can be followed by more than one value, like e.g. the
|
||||
# TARGETS or FILES keywords of the install() command.
|
||||
#
|
||||
# When done, CMAKE_PARSE_ARGUMENTS() will have defined for each of the
|
||||
# keywords listed in <options>, <one_value_keywords> and
|
||||
# <multi_value_keywords> a variable composed of the given <prefix>
|
||||
# followed by "_" and the name of the respective keyword. These
|
||||
# variables will then hold the respective value from the argument list.
|
||||
# For the <options> keywords this will be TRUE or FALSE.
|
||||
#
|
||||
# All remaining arguments are collected in a variable
|
||||
# <prefix>_UNPARSED_ARGUMENTS, this can be checked afterwards to see
|
||||
# whether your macro was called with unrecognized parameters.
|
||||
#
|
||||
# As an example here a my_install() macro, which takes similar arguments
|
||||
# as the real install() command:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# function(MY_INSTALL)
|
||||
# set(options OPTIONAL FAST)
|
||||
# set(oneValueArgs DESTINATION RENAME)
|
||||
# set(multiValueArgs TARGETS CONFIGURATIONS)
|
||||
# cmake_parse_arguments(MY_INSTALL "${options}" "${oneValueArgs}"
|
||||
# "${multiValueArgs}" ${ARGN} )
|
||||
# ...
|
||||
#
|
||||
#
|
||||
#
|
||||
# Assume my_install() has been called like this:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# my_install(TARGETS foo bar DESTINATION bin OPTIONAL blub)
|
||||
#
|
||||
#
|
||||
#
|
||||
# After the cmake_parse_arguments() call the macro will have set the
|
||||
# following variables:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# MY_INSTALL_OPTIONAL = TRUE
|
||||
# MY_INSTALL_FAST = FALSE (this option was not used when calling my_install()
|
||||
# MY_INSTALL_DESTINATION = "bin"
|
||||
# MY_INSTALL_RENAME = "" (was not used)
|
||||
# MY_INSTALL_TARGETS = "foo;bar"
|
||||
# MY_INSTALL_CONFIGURATIONS = "" (was not used)
|
||||
# MY_INSTALL_UNPARSED_ARGUMENTS = "blub" (no value expected after "OPTIONAL"
|
||||
#
|
||||
#
|
||||
#
|
||||
# You can then continue and process these variables.
|
||||
#
|
||||
# Keywords terminate lists of values, e.g. if directly after a
|
||||
# one_value_keyword another recognized keyword follows, this is
|
||||
# interpreted as the beginning of the new option. E.g.
|
||||
# my_install(TARGETS foo DESTINATION OPTIONAL) would result in
|
||||
# MY_INSTALL_DESTINATION set to "OPTIONAL", but MY_INSTALL_DESTINATION
|
||||
# would be empty and MY_INSTALL_OPTIONAL would be set to TRUE therefor.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2010 Alexander Neundorf <neundorf@kde.org>
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
|
||||
if(__CMAKE_PARSE_ARGUMENTS_INCLUDED)
|
||||
return()
|
||||
endif()
|
||||
set(__CMAKE_PARSE_ARGUMENTS_INCLUDED TRUE)
|
||||
|
||||
|
||||
function(CMAKE_PARSE_ARGUMENTS prefix _optionNames _singleArgNames _multiArgNames)
|
||||
# first set all result variables to empty/FALSE
|
||||
foreach(arg_name ${_singleArgNames} ${_multiArgNames})
|
||||
set(${prefix}_${arg_name})
|
||||
endforeach()
|
||||
|
||||
foreach(option ${_optionNames})
|
||||
set(${prefix}_${option} FALSE)
|
||||
endforeach()
|
||||
|
||||
set(${prefix}_UNPARSED_ARGUMENTS)
|
||||
|
||||
set(insideValues FALSE)
|
||||
set(currentArgName)
|
||||
|
||||
# now iterate over all arguments and fill the result variables
|
||||
foreach(currentArg ${ARGN})
|
||||
list(FIND _optionNames "${currentArg}" optionIndex) # ... then this marks the end of the arguments belonging to this keyword
|
||||
list(FIND _singleArgNames "${currentArg}" singleArgIndex) # ... then this marks the end of the arguments belonging to this keyword
|
||||
list(FIND _multiArgNames "${currentArg}" multiArgIndex) # ... then this marks the end of the arguments belonging to this keyword
|
||||
|
||||
if(${optionIndex} EQUAL -1 AND ${singleArgIndex} EQUAL -1 AND ${multiArgIndex} EQUAL -1)
|
||||
if(insideValues)
|
||||
if("${insideValues}" STREQUAL "SINGLE")
|
||||
set(${prefix}_${currentArgName} ${currentArg})
|
||||
set(insideValues FALSE)
|
||||
elseif("${insideValues}" STREQUAL "MULTI")
|
||||
list(APPEND ${prefix}_${currentArgName} ${currentArg})
|
||||
endif()
|
||||
else()
|
||||
list(APPEND ${prefix}_UNPARSED_ARGUMENTS ${currentArg})
|
||||
endif()
|
||||
else()
|
||||
if(NOT ${optionIndex} EQUAL -1)
|
||||
set(${prefix}_${currentArg} TRUE)
|
||||
set(insideValues FALSE)
|
||||
elseif(NOT ${singleArgIndex} EQUAL -1)
|
||||
set(currentArgName ${currentArg})
|
||||
set(${prefix}_${currentArgName})
|
||||
set(insideValues "SINGLE")
|
||||
elseif(NOT ${multiArgIndex} EQUAL -1)
|
||||
set(currentArgName ${currentArg})
|
||||
set(${prefix}_${currentArgName})
|
||||
set(insideValues "MULTI")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endforeach()
|
||||
|
||||
# propagate the result variables to the caller:
|
||||
foreach(arg_name ${_singleArgNames} ${_multiArgNames} ${_optionNames})
|
||||
set(${prefix}_${arg_name} ${${prefix}_${arg_name}} PARENT_SCOPE)
|
||||
endforeach()
|
||||
set(${prefix}_UNPARSED_ARGUMENTS ${${prefix}_UNPARSED_ARGUMENTS} PARENT_SCOPE)
|
||||
|
||||
endfunction()
|
|
@ -1,538 +0,0 @@
|
|||
#.rst:
|
||||
# FindOpenSSL
|
||||
# -----------
|
||||
#
|
||||
# Find the OpenSSL encryption library.
|
||||
#
|
||||
# Imported Targets
|
||||
# ^^^^^^^^^^^^^^^^
|
||||
#
|
||||
# This module defines the following :prop_tgt:`IMPORTED` targets:
|
||||
#
|
||||
# ``OpenSSL::SSL``
|
||||
# The OpenSSL ``ssl`` library, if found.
|
||||
# ``OpenSSL::Crypto``
|
||||
# The OpenSSL ``crypto`` library, if found.
|
||||
#
|
||||
# Result Variables
|
||||
# ^^^^^^^^^^^^^^^^
|
||||
#
|
||||
# This module will set the following variables in your project:
|
||||
#
|
||||
# ``OPENSSL_FOUND``
|
||||
# System has the OpenSSL library.
|
||||
# ``OPENSSL_INCLUDE_DIR``
|
||||
# The OpenSSL include directory.
|
||||
# ``OPENSSL_CRYPTO_LIBRARY``
|
||||
# The OpenSSL crypto library.
|
||||
# ``OPENSSL_SSL_LIBRARY``
|
||||
# The OpenSSL SSL library.
|
||||
# ``OPENSSL_LIBRARIES``
|
||||
# All OpenSSL libraries.
|
||||
# ``OPENSSL_VERSION``
|
||||
# This is set to ``$major.$minor.$revision$patch`` (e.g. ``0.9.8s``).
|
||||
#
|
||||
# Hints
|
||||
# ^^^^^
|
||||
#
|
||||
# Set ``OPENSSL_ROOT_DIR`` to the root directory of an OpenSSL installation.
|
||||
# Set ``OPENSSL_USE_STATIC_LIBS`` to ``TRUE`` to look for static libraries.
|
||||
# Set ``OPENSSL_MSVC_STATIC_RT`` set ``TRUE`` to choose the MT version of the lib.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2006-2009 Kitware, Inc.
|
||||
# Copyright 2006 Alexander Neundorf <neundorf@kde.org>
|
||||
# Copyright 2009-2011 Mathieu Malaterre <mathieu.malaterre@gmail.com>
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
if (UNIX)
|
||||
find_package(PkgConfig QUIET)
|
||||
pkg_check_modules(_OPENSSL QUIET openssl)
|
||||
endif ()
|
||||
|
||||
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
if(OPENSSL_USE_STATIC_LIBS)
|
||||
set(_openssl_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
if(WIN32)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
else()
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
if (WIN32)
|
||||
# 32-bit not officially supported anymore anyway
|
||||
set(SSL_BITS "x64")
|
||||
# v140 corresponds to VS 2015
|
||||
set(SSL_NUGET_DIR "lib/native/v140/windesktop/msvcstl/dyn/rt-dyn")
|
||||
set(SSL_NUGET_ROOT "$ENV{USERPROFILE}/.nuget/packages/openssl.v140.windesktop.msvcstl.dyn.rt-dyn.${SSL_BITS}")
|
||||
if (NOT OPENSSL_ROOT_DIR AND IS_DIRECTORY ${SSL_NUGET_ROOT})
|
||||
# find latest version based on folder name and assign to OPENSSL_ROOT_DIR
|
||||
FILE(GLOB dirlist RELATIVE ${SSL_NUGET_ROOT} ${SSL_NUGET_ROOT}/*)
|
||||
list(SORT dirlist)
|
||||
list(LENGTH dirlist listlength)
|
||||
math(EXPR lastindex "${listlength}-1")
|
||||
list(GET dirlist ${lastindex} latestversion)
|
||||
set(OPENSSL_ROOT_DIR
|
||||
"${SSL_NUGET_ROOT}/${latestversion}"
|
||||
)
|
||||
endif()
|
||||
if (IS_DIRECTORY "${OPENSSL_ROOT_DIR}/build/native/")
|
||||
set(SSL_NUGET TRUE)
|
||||
else()
|
||||
set(SSL_NUGET FALSE)
|
||||
endif()
|
||||
if (OPENSSL_ROOT_DIR AND SSL_NUGET)
|
||||
message("Found nuGET installation of OpenSSL!")
|
||||
# its an openssl downloaded via nuget!
|
||||
set(OPENSSL_INCLUDE "${OPENSSL_ROOT_DIR}/build/native/include")
|
||||
set(_OPENSSL_ROOT_HINTS "${OPENSSL_ROOT_DIR}/build/native/include")
|
||||
|
||||
set(OPENSSL_LIB_DIR "${OPENSSL_ROOT_DIR}/${SSL_NUGET_DIR}/${SSL_BITS}")
|
||||
set(_OPENSSL_ROOT_HINTS "${OPENSSL_ROOT_DIR}/build/native/include")
|
||||
|
||||
set(_OPENSSL_ROOT_PATHS
|
||||
"${OPENSSL_ROOT_DIR}/build/native/include"
|
||||
"${OPENSSL_ROOT_DIR}/${SSL_NUGET_DIR}/${SSL_BITS}/")
|
||||
else()
|
||||
# http://www.slproweb.com/products/Win32OpenSSL.html
|
||||
set(_OPENSSL_ROOT_HINTS
|
||||
${OPENSSL_ROOT_DIR}
|
||||
"[HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\OpenSSL (32-bit)_is1;Inno Setup: App Path]"
|
||||
"[HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\OpenSSL (64-bit)_is1;Inno Setup: App Path]"
|
||||
$ENV{OPENSSL_ROOT_DIR}
|
||||
)
|
||||
set(_OPENSSL_ROOT_PATHS
|
||||
$ENV{PATH}
|
||||
)
|
||||
file(TO_CMAKE_PATH "$ENV{PROGRAMFILES}" _programfiles)
|
||||
set(_OPENSSL_ROOT_PATHS
|
||||
"${_programfiles}/OpenSSL"
|
||||
"${_programfiles}/OpenSSL-Win32"
|
||||
"${_programfiles}/OpenSSL-Win64"
|
||||
"C:/OpenSSL/"
|
||||
"C:/OpenSSL-Win32/"
|
||||
"C:/OpenSSL-Win64/"
|
||||
)
|
||||
unset(_programfiles)
|
||||
# none of the above will actually set OPENSSL_ROOT_DIR, but one could
|
||||
# simply use the parent dir of the include dir (defined further below!)
|
||||
#get_filename_component(OPENSSL_ROOT_DIR ${OPENSSL_INCLUDE_DIR} DIRECTORY)
|
||||
endif()
|
||||
else ()
|
||||
set(_OPENSSL_ROOT_HINTS
|
||||
${OPENSSL_ROOT_DIR}
|
||||
ENV OPENSSL_ROOT_DIR
|
||||
)
|
||||
endif ()
|
||||
|
||||
set(_OPENSSL_ROOT_HINTS_AND_PATHS
|
||||
HINTS ${_OPENSSL_ROOT_HINTS}
|
||||
PATHS ${_OPENSSL_ROOT_PATHS}
|
||||
)
|
||||
|
||||
find_path(OPENSSL_INCLUDE_DIR
|
||||
NAMES
|
||||
openssl/ssl.h
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
HINTS
|
||||
${_OPENSSL_INCLUDEDIR}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
)
|
||||
|
||||
if(WIN32 AND NOT CYGWIN)
|
||||
if (SSL_NUGET)
|
||||
# /MD and /MDd are the standard values - if someone wants to use
|
||||
# others, the libnames have to change here too
|
||||
# use also ssl and ssleay32 in debug as fallback for openssl < 0.9.8b
|
||||
# enable OPENSSL_MSVC_STATIC_RT to get the libs build /MT (Multithreaded no-DLL)
|
||||
|
||||
# Implementation details:
|
||||
# We are using the libraries located in the VC subdir instead of the parent directory eventhough :
|
||||
# libeay32MD.lib is identical to ../libeay32.lib, and
|
||||
# ssleay32MD.lib is identical to ../ssleay32.lib
|
||||
# enable OPENSSL_USE_STATIC_LIBS to use the static libs located in lib/VC/static
|
||||
|
||||
#if (OPENSSL_MSVC_STATIC_RT)
|
||||
# set(_OPENSSL_MSVC_RT_MODE "MT")
|
||||
#else ()
|
||||
# set(_OPENSSL_MSVC_RT_MODE "MD")
|
||||
#endif ()
|
||||
|
||||
set(LIB_EAY_DEBUG LIB_EAY_DEBUG-NOTFOUND)
|
||||
if (EXISTS "${OPENSSL_LIB_DIR}/debug/libeay32.lib")
|
||||
set(LIB_EAY_DEBUG "${OPENSSL_LIB_DIR}/debug/libeay32.lib")
|
||||
endif()
|
||||
|
||||
set(LIB_EAY_RELEASE LIB_EAY_RELEASE-NOTFOUND)
|
||||
if (EXISTS "${OPENSSL_LIB_DIR}/release/libeay32.lib")
|
||||
set(LIB_EAY_RELEASE "${OPENSSL_LIB_DIR}/release/libeay32.lib")
|
||||
endif()
|
||||
|
||||
set(SSL_EAY_DEBUG SSL_EAY_DEBUG-NOTFOUND)
|
||||
if (EXISTS "${OPENSSL_LIB_DIR}/debug/ssleay32.lib")
|
||||
set(SSL_EAY_DEBUG "${OPENSSL_LIB_DIR}/debug/ssleay32.lib")
|
||||
endif()
|
||||
|
||||
set(SSL_EAY_RELEASE SSL_EAY_RELEASE-NOTFOUND)
|
||||
if (EXISTS "${OPENSSL_LIB_DIR}/release/ssleay32.lib")
|
||||
set(SSL_EAY_RELEASE "${OPENSSL_LIB_DIR}/release/ssleay32.lib")
|
||||
endif()
|
||||
|
||||
set(LIB_EAY_LIBRARY_DEBUG "${LIB_EAY_DEBUG}")
|
||||
set(LIB_EAY_LIBRARY_RELEASE "${LIB_EAY_RELEASE}")
|
||||
set(SSL_EAY_LIBRARY_DEBUG "${SSL_EAY_DEBUG}")
|
||||
set(SSL_EAY_LIBRARY_RELEASE "${SSL_EAY_RELEASE}")
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/SelectLibraryConfigurations.cmake)
|
||||
select_library_configurations(LIB_EAY)
|
||||
select_library_configurations(SSL_EAY)
|
||||
|
||||
mark_as_advanced(LIB_EAY_LIBRARY_DEBUG LIB_EAY_LIBRARY_RELEASE
|
||||
SSL_EAY_LIBRARY_DEBUG SSL_EAY_LIBRARY_RELEASE)
|
||||
set(OPENSSL_SSL_LIBRARY ${SSL_EAY_LIBRARY} )
|
||||
set(OPENSSL_CRYPTO_LIBRARY ${LIB_EAY_LIBRARY} )
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY_LIBRARY} ${LIB_EAY_LIBRARY} )
|
||||
|
||||
elseif(MSVC)
|
||||
# /MD and /MDd are the standard values - if someone wants to use
|
||||
# others, the libnames have to change here too
|
||||
# use also ssl and ssleay32 in debug as fallback for openssl < 0.9.8b
|
||||
# enable OPENSSL_MSVC_STATIC_RT to get the libs build /MT (Multithreaded no-DLL)
|
||||
# In Visual C++ naming convention each of these four kinds of Windows libraries has it's standard suffix:
|
||||
# * MD for dynamic-release
|
||||
# * MDd for dynamic-debug
|
||||
# * MT for static-release
|
||||
# * MTd for static-debug
|
||||
|
||||
# Implementation details:
|
||||
# We are using the libraries located in the VC subdir instead of the parent directory eventhough :
|
||||
# libeay32MD.lib is identical to ../libeay32.lib, and
|
||||
# ssleay32MD.lib is identical to ../ssleay32.lib
|
||||
# enable OPENSSL_USE_STATIC_LIBS to use the static libs located in lib/VC/static
|
||||
|
||||
if (OPENSSL_MSVC_STATIC_RT)
|
||||
set(_OPENSSL_MSVC_RT_MODE "MT")
|
||||
else ()
|
||||
set(_OPENSSL_MSVC_RT_MODE "MD")
|
||||
endif ()
|
||||
|
||||
if(OPENSSL_USE_STATIC_LIBS)
|
||||
set(_OPENSSL_PATH_SUFFIXES
|
||||
"lib"
|
||||
"VC/static"
|
||||
"lib/VC/static"
|
||||
)
|
||||
else()
|
||||
set(_OPENSSL_PATH_SUFFIXES
|
||||
"lib"
|
||||
"VC"
|
||||
"lib/VC"
|
||||
)
|
||||
endif ()
|
||||
|
||||
find_library(LIB_EAY_DEBUG
|
||||
NAMES
|
||||
libeay32${_OPENSSL_MSVC_RT_MODE}d
|
||||
libeay32d
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
PATH_SUFFIXES
|
||||
${_OPENSSL_PATH_SUFFIXES}
|
||||
)
|
||||
|
||||
find_library(LIB_EAY_RELEASE
|
||||
NAMES
|
||||
libeay32${_OPENSSL_MSVC_RT_MODE}
|
||||
libeay32
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
PATH_SUFFIXES
|
||||
${_OPENSSL_PATH_SUFFIXES}
|
||||
)
|
||||
|
||||
find_library(SSL_EAY_DEBUG
|
||||
NAMES
|
||||
ssleay32${_OPENSSL_MSVC_RT_MODE}d
|
||||
ssleay32d
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
PATH_SUFFIXES
|
||||
${_OPENSSL_PATH_SUFFIXES}
|
||||
)
|
||||
|
||||
find_library(SSL_EAY_RELEASE
|
||||
NAMES
|
||||
ssleay32${_OPENSSL_MSVC_RT_MODE}
|
||||
ssleay32
|
||||
ssl
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
PATH_SUFFIXES
|
||||
${_OPENSSL_PATH_SUFFIXES}
|
||||
)
|
||||
|
||||
set(LIB_EAY_LIBRARY_DEBUG "${LIB_EAY_DEBUG}")
|
||||
set(LIB_EAY_LIBRARY_RELEASE "${LIB_EAY_RELEASE}")
|
||||
set(SSL_EAY_LIBRARY_DEBUG "${SSL_EAY_DEBUG}")
|
||||
set(SSL_EAY_LIBRARY_RELEASE "${SSL_EAY_RELEASE}")
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/SelectLibraryConfigurations.cmake)
|
||||
select_library_configurations(LIB_EAY)
|
||||
select_library_configurations(SSL_EAY)
|
||||
|
||||
mark_as_advanced(LIB_EAY_LIBRARY_DEBUG LIB_EAY_LIBRARY_RELEASE
|
||||
SSL_EAY_LIBRARY_DEBUG SSL_EAY_LIBRARY_RELEASE)
|
||||
set(OPENSSL_SSL_LIBRARY ${SSL_EAY_LIBRARY} )
|
||||
set(OPENSSL_CRYPTO_LIBRARY ${LIB_EAY_LIBRARY} )
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY_LIBRARY} ${LIB_EAY_LIBRARY} )
|
||||
elseif(MINGW)
|
||||
# same player, for MinGW
|
||||
set(LIB_EAY_NAMES crypto libeay32)
|
||||
set(SSL_EAY_NAMES ssl ssleay32)
|
||||
find_library(LIB_EAY
|
||||
NAMES
|
||||
${LIB_EAY_NAMES}
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
PATH_SUFFIXES
|
||||
"lib"
|
||||
"lib/MinGW"
|
||||
)
|
||||
|
||||
find_library(SSL_EAY
|
||||
NAMES
|
||||
${SSL_EAY_NAMES}
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
PATH_SUFFIXES
|
||||
"lib"
|
||||
"lib/MinGW"
|
||||
)
|
||||
|
||||
mark_as_advanced(SSL_EAY LIB_EAY)
|
||||
set(OPENSSL_SSL_LIBRARY ${SSL_EAY} )
|
||||
set(OPENSSL_CRYPTO_LIBRARY ${LIB_EAY} )
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY} ${LIB_EAY} )
|
||||
unset(LIB_EAY_NAMES)
|
||||
unset(SSL_EAY_NAMES)
|
||||
else()
|
||||
# Not sure what to pick for -say- intel, let's use the toplevel ones and hope someone report issues:
|
||||
find_library(LIB_EAY
|
||||
NAMES
|
||||
libeay32
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
HINTS
|
||||
${_OPENSSL_LIBDIR}
|
||||
PATH_SUFFIXES
|
||||
lib
|
||||
)
|
||||
|
||||
find_library(SSL_EAY
|
||||
NAMES
|
||||
ssleay32
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
HINTS
|
||||
${_OPENSSL_LIBDIR}
|
||||
PATH_SUFFIXES
|
||||
lib
|
||||
)
|
||||
|
||||
mark_as_advanced(SSL_EAY LIB_EAY)
|
||||
set(OPENSSL_SSL_LIBRARY ${SSL_EAY} )
|
||||
set(OPENSSL_CRYPTO_LIBRARY ${LIB_EAY} )
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY} ${LIB_EAY} )
|
||||
endif()
|
||||
else()
|
||||
|
||||
find_library(OPENSSL_SSL_LIBRARY
|
||||
NAMES
|
||||
ssl
|
||||
ssleay32
|
||||
ssleay32MD
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
HINTS
|
||||
${_OPENSSL_LIBDIR}
|
||||
PATH_SUFFIXES
|
||||
lib
|
||||
)
|
||||
|
||||
find_library(OPENSSL_CRYPTO_LIBRARY
|
||||
NAMES
|
||||
crypto
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS}
|
||||
HINTS
|
||||
${_OPENSSL_LIBDIR}
|
||||
PATH_SUFFIXES
|
||||
lib
|
||||
)
|
||||
|
||||
mark_as_advanced(OPENSSL_CRYPTO_LIBRARY OPENSSL_SSL_LIBRARY)
|
||||
|
||||
# compat defines
|
||||
set(OPENSSL_SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY})
|
||||
set(OPENSSL_CRYPTO_LIBRARIES ${OPENSSL_CRYPTO_LIBRARY})
|
||||
|
||||
set(OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
|
||||
|
||||
endif()
|
||||
|
||||
function(from_hex HEX DEC)
|
||||
string(TOUPPER "${HEX}" HEX)
|
||||
set(_res 0)
|
||||
string(LENGTH "${HEX}" _strlen)
|
||||
|
||||
while (_strlen GREATER 0)
|
||||
math(EXPR _res "${_res} * 16")
|
||||
string(SUBSTRING "${HEX}" 0 1 NIBBLE)
|
||||
string(SUBSTRING "${HEX}" 1 -1 HEX)
|
||||
if (NIBBLE STREQUAL "A")
|
||||
math(EXPR _res "${_res} + 10")
|
||||
elseif (NIBBLE STREQUAL "B")
|
||||
math(EXPR _res "${_res} + 11")
|
||||
elseif (NIBBLE STREQUAL "C")
|
||||
math(EXPR _res "${_res} + 12")
|
||||
elseif (NIBBLE STREQUAL "D")
|
||||
math(EXPR _res "${_res} + 13")
|
||||
elseif (NIBBLE STREQUAL "E")
|
||||
math(EXPR _res "${_res} + 14")
|
||||
elseif (NIBBLE STREQUAL "F")
|
||||
math(EXPR _res "${_res} + 15")
|
||||
else()
|
||||
math(EXPR _res "${_res} + ${NIBBLE}")
|
||||
endif()
|
||||
|
||||
string(LENGTH "${HEX}" _strlen)
|
||||
endwhile()
|
||||
|
||||
set(${DEC} ${_res} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if (OPENSSL_INCLUDE_DIR)
|
||||
if(OPENSSL_INCLUDE_DIR AND EXISTS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h")
|
||||
file(STRINGS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h" openssl_version_str
|
||||
REGEX "^#[\t ]*define[\t ]+OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])+.*")
|
||||
|
||||
# The version number is encoded as 0xMNNFFPPS: major minor fix patch status
|
||||
# The status gives if this is a developer or prerelease and is ignored here.
|
||||
# Major, minor, and fix directly translate into the version numbers shown in
|
||||
# the string. The patch field translates to the single character suffix that
|
||||
# indicates the bug fix state, which 00 -> nothing, 01 -> a, 02 -> b and so
|
||||
# on.
|
||||
|
||||
string(REGEX REPLACE "^.*OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F]).*$"
|
||||
"\\1;\\2;\\3;\\4;\\5" OPENSSL_VERSION_LIST "${openssl_version_str}")
|
||||
list(GET OPENSSL_VERSION_LIST 0 OPENSSL_VERSION_MAJOR)
|
||||
list(GET OPENSSL_VERSION_LIST 1 OPENSSL_VERSION_MINOR)
|
||||
from_hex("${OPENSSL_VERSION_MINOR}" OPENSSL_VERSION_MINOR)
|
||||
list(GET OPENSSL_VERSION_LIST 2 OPENSSL_VERSION_FIX)
|
||||
from_hex("${OPENSSL_VERSION_FIX}" OPENSSL_VERSION_FIX)
|
||||
list(GET OPENSSL_VERSION_LIST 3 OPENSSL_VERSION_PATCH)
|
||||
|
||||
if (NOT OPENSSL_VERSION_PATCH STREQUAL "00")
|
||||
from_hex("${OPENSSL_VERSION_PATCH}" _tmp)
|
||||
# 96 is the ASCII code of 'a' minus 1
|
||||
math(EXPR OPENSSL_VERSION_PATCH_ASCII "${_tmp} + 96")
|
||||
unset(_tmp)
|
||||
# Once anyone knows how OpenSSL would call the patch versions beyond 'z'
|
||||
# this should be updated to handle that, too. This has not happened yet
|
||||
# so it is simply ignored here for now.
|
||||
string(ASCII "${OPENSSL_VERSION_PATCH_ASCII}" OPENSSL_VERSION_PATCH_STRING)
|
||||
endif ()
|
||||
|
||||
set(OPENSSL_VERSION "${OPENSSL_VERSION_MAJOR}.${OPENSSL_VERSION_MINOR}.${OPENSSL_VERSION_FIX}${OPENSSL_VERSION_PATCH_STRING}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake)
|
||||
|
||||
if (OPENSSL_VERSION)
|
||||
find_package_handle_standard_args(OpenSSL
|
||||
REQUIRED_VARS
|
||||
OPENSSL_LIBRARIES
|
||||
OPENSSL_INCLUDE_DIR
|
||||
VERSION_VAR
|
||||
OPENSSL_VERSION
|
||||
FAIL_MESSAGE
|
||||
"Could NOT find OpenSSL, try to set the path to OpenSSL root folder in the system variable OPENSSL_ROOT_DIR"
|
||||
)
|
||||
else ()
|
||||
find_package_handle_standard_args(OpenSSL "Could NOT find OpenSSL, try to set the path to OpenSSL root folder in the system variable OPENSSL_ROOT_DIR"
|
||||
OPENSSL_LIBRARIES
|
||||
OPENSSL_INCLUDE_DIR
|
||||
)
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(OPENSSL_INCLUDE_DIR OPENSSL_LIBRARIES)
|
||||
if(OPENSSL_FOUND)
|
||||
if(NOT TARGET OpenSSL::Crypto AND
|
||||
(EXISTS "${OPENSSL_CRYPTO_LIBRARY}" OR
|
||||
EXISTS "${LIB_EAY_LIBRARY_DEBUG}" OR
|
||||
EXISTS "${LIB_EAY_LIBRARY_RELEASE}")
|
||||
)
|
||||
add_library(OpenSSL::Crypto UNKNOWN IMPORTED)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
|
||||
if(EXISTS "${OPENSSL_CRYPTO_LIBRARY}")
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES "C"
|
||||
IMPORTED_LOCATION "${OPENSSL_CRYPTO_LIBRARY}")
|
||||
endif()
|
||||
if(EXISTS "${LIB_EAY_LIBRARY_DEBUG}")
|
||||
set_property(TARGET OpenSSL::Crypto APPEND PROPERTY
|
||||
IMPORTED_CONFIGURATIONS DEBUG)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
|
||||
IMPORTED_LOCATION_DEBUG "${LIB_EAY_LIBRARY_DEBUG}")
|
||||
endif()
|
||||
if(EXISTS "${LIB_EAY_LIBRARY_RELEASE}")
|
||||
set_property(TARGET OpenSSL::Crypto APPEND PROPERTY
|
||||
IMPORTED_CONFIGURATIONS RELEASE)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
|
||||
IMPORTED_LOCATION_RELEASE "${LIB_EAY_LIBRARY_RELEASE}")
|
||||
endif()
|
||||
endif()
|
||||
if(NOT TARGET OpenSSL::SSL AND
|
||||
(EXISTS "${OPENSSL_SSL_LIBRARY}" OR
|
||||
EXISTS "${SSL_EAY_LIBRARY_DEBUG}" OR
|
||||
EXISTS "${SSL_EAY_LIBRARY_RELEASE}")
|
||||
)
|
||||
add_library(OpenSSL::SSL UNKNOWN IMPORTED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
|
||||
if(EXISTS "${OPENSSL_SSL_LIBRARY}")
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES "C"
|
||||
IMPORTED_LOCATION "${OPENSSL_SSL_LIBRARY}")
|
||||
endif()
|
||||
if(EXISTS "${SSL_EAY_LIBRARY_DEBUG}")
|
||||
set_property(TARGET OpenSSL::SSL APPEND PROPERTY
|
||||
IMPORTED_CONFIGURATIONS DEBUG)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
|
||||
IMPORTED_LOCATION_DEBUG "${SSL_EAY_LIBRARY_DEBUG}")
|
||||
endif()
|
||||
if(EXISTS "${SSL_EAY_LIBRARY_RELEASE}")
|
||||
set_property(TARGET OpenSSL::SSL APPEND PROPERTY
|
||||
IMPORTED_CONFIGURATIONS RELEASE)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
|
||||
IMPORTED_LOCATION_RELEASE "${SSL_EAY_LIBRARY_RELEASE}")
|
||||
endif()
|
||||
if(TARGET OpenSSL::Crypto)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES OpenSSL::Crypto)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Restore the original find library ordering
|
||||
if(OPENSSL_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_openssl_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
endif()
|
|
@ -1,269 +0,0 @@
|
|||
|
||||
#=============================================================================
|
||||
# Copyright 2007-2009 Kitware, Inc.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageMessage.cmake)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/CMakeParseArguments.cmake)
|
||||
|
||||
# internal helper macro
|
||||
macro(_FPHSA_FAILURE_MESSAGE _msg)
|
||||
if (${_NAME}_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${_msg}")
|
||||
else ()
|
||||
if (NOT ${_NAME}_FIND_QUIETLY)
|
||||
message(STATUS "${_msg}")
|
||||
endif ()
|
||||
endif ()
|
||||
endmacro()
|
||||
|
||||
|
||||
# internal helper macro to generate the failure message when used in CONFIG_MODE:
|
||||
macro(_FPHSA_HANDLE_FAILURE_CONFIG_MODE)
|
||||
# <name>_CONFIG is set, but FOUND is false, this means that some other of the REQUIRED_VARS was not found:
|
||||
if(${_NAME}_CONFIG)
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: missing: ${MISSING_VARS} (found ${${_NAME}_CONFIG} ${VERSION_MSG})")
|
||||
else()
|
||||
# If _CONSIDERED_CONFIGS is set, the config-file has been found, but no suitable version.
|
||||
# List them all in the error message:
|
||||
if(${_NAME}_CONSIDERED_CONFIGS)
|
||||
set(configsText "")
|
||||
list(LENGTH ${_NAME}_CONSIDERED_CONFIGS configsCount)
|
||||
math(EXPR configsCount "${configsCount} - 1")
|
||||
foreach(currentConfigIndex RANGE ${configsCount})
|
||||
list(GET ${_NAME}_CONSIDERED_CONFIGS ${currentConfigIndex} filename)
|
||||
list(GET ${_NAME}_CONSIDERED_VERSIONS ${currentConfigIndex} version)
|
||||
set(configsText "${configsText} ${filename} (version ${version})\n")
|
||||
endforeach()
|
||||
if (${_NAME}_NOT_FOUND_MESSAGE)
|
||||
set(configsText "${configsText} Reason given by package: ${${_NAME}_NOT_FOUND_MESSAGE}\n")
|
||||
endif()
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} ${VERSION_MSG}, checked the following files:\n${configsText}")
|
||||
|
||||
else()
|
||||
# Simple case: No Config-file was found at all:
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: found neither ${_NAME}Config.cmake nor ${_NAME_LOWER}-config.cmake ${VERSION_MSG}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
|
||||
function(FIND_PACKAGE_HANDLE_STANDARD_ARGS _NAME _FIRST_ARG)
|
||||
|
||||
# set up the arguments for CMAKE_PARSE_ARGUMENTS and check whether we are in
|
||||
# new extended or in the "old" mode:
|
||||
set(options CONFIG_MODE HANDLE_COMPONENTS)
|
||||
set(oneValueArgs FAIL_MESSAGE VERSION_VAR FOUND_VAR)
|
||||
set(multiValueArgs REQUIRED_VARS)
|
||||
set(_KEYWORDS_FOR_EXTENDED_MODE ${options} ${oneValueArgs} ${multiValueArgs} )
|
||||
list(FIND _KEYWORDS_FOR_EXTENDED_MODE "${_FIRST_ARG}" INDEX)
|
||||
|
||||
if(${INDEX} EQUAL -1)
|
||||
set(FPHSA_FAIL_MESSAGE ${_FIRST_ARG})
|
||||
set(FPHSA_REQUIRED_VARS ${ARGN})
|
||||
set(FPHSA_VERSION_VAR)
|
||||
else()
|
||||
|
||||
CMAKE_PARSE_ARGUMENTS(FPHSA "${options}" "${oneValueArgs}" "${multiValueArgs}" ${_FIRST_ARG} ${ARGN})
|
||||
|
||||
if(FPHSA_UNPARSED_ARGUMENTS)
|
||||
message(FATAL_ERROR "Unknown keywords given to FIND_PACKAGE_HANDLE_STANDARD_ARGS(): \"${FPHSA_UNPARSED_ARGUMENTS}\"")
|
||||
endif()
|
||||
|
||||
if(NOT FPHSA_FAIL_MESSAGE)
|
||||
set(FPHSA_FAIL_MESSAGE "DEFAULT_MSG")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# now that we collected all arguments, process them
|
||||
|
||||
if("x${FPHSA_FAIL_MESSAGE}" STREQUAL "xDEFAULT_MSG")
|
||||
set(FPHSA_FAIL_MESSAGE "Could NOT find ${_NAME}")
|
||||
endif()
|
||||
|
||||
# In config-mode, we rely on the variable <package>_CONFIG, which is set by find_package()
|
||||
# when it successfully found the config-file, including version checking:
|
||||
if(FPHSA_CONFIG_MODE)
|
||||
list(INSERT FPHSA_REQUIRED_VARS 0 ${_NAME}_CONFIG)
|
||||
list(REMOVE_DUPLICATES FPHSA_REQUIRED_VARS)
|
||||
set(FPHSA_VERSION_VAR ${_NAME}_VERSION)
|
||||
endif()
|
||||
|
||||
if(NOT FPHSA_REQUIRED_VARS)
|
||||
message(FATAL_ERROR "No REQUIRED_VARS specified for FIND_PACKAGE_HANDLE_STANDARD_ARGS()")
|
||||
endif()
|
||||
|
||||
list(GET FPHSA_REQUIRED_VARS 0 _FIRST_REQUIRED_VAR)
|
||||
|
||||
string(TOUPPER ${_NAME} _NAME_UPPER)
|
||||
string(TOLOWER ${_NAME} _NAME_LOWER)
|
||||
|
||||
if(FPHSA_FOUND_VAR)
|
||||
if(FPHSA_FOUND_VAR MATCHES "^${_NAME}_FOUND$" OR FPHSA_FOUND_VAR MATCHES "^${_NAME_UPPER}_FOUND$")
|
||||
set(_FOUND_VAR ${FPHSA_FOUND_VAR})
|
||||
else()
|
||||
message(FATAL_ERROR "The argument for FOUND_VAR is \"${FPHSA_FOUND_VAR}\", but only \"${_NAME}_FOUND\" and \"${_NAME_UPPER}_FOUND\" are valid names.")
|
||||
endif()
|
||||
else()
|
||||
set(_FOUND_VAR ${_NAME_UPPER}_FOUND)
|
||||
endif()
|
||||
|
||||
# collect all variables which were not found, so they can be printed, so the
|
||||
# user knows better what went wrong (#6375)
|
||||
set(MISSING_VARS "")
|
||||
set(DETAILS "")
|
||||
# check if all passed variables are valid
|
||||
set(FPHSA_FOUND_${_NAME} TRUE)
|
||||
foreach(_CURRENT_VAR ${FPHSA_REQUIRED_VARS})
|
||||
if(NOT ${_CURRENT_VAR})
|
||||
set(FPHSA_FOUND_${_NAME} FALSE)
|
||||
set(MISSING_VARS "${MISSING_VARS} ${_CURRENT_VAR}")
|
||||
else()
|
||||
set(DETAILS "${DETAILS}[${${_CURRENT_VAR}}]")
|
||||
endif()
|
||||
endforeach()
|
||||
if(FPHSA_FOUND_${_NAME})
|
||||
set(${_NAME}_FOUND TRUE)
|
||||
set(${_NAME_UPPER}_FOUND TRUE)
|
||||
else()
|
||||
set(${_NAME}_FOUND FALSE)
|
||||
set(${_NAME_UPPER}_FOUND FALSE)
|
||||
endif()
|
||||
|
||||
# component handling
|
||||
unset(FOUND_COMPONENTS_MSG)
|
||||
unset(MISSING_COMPONENTS_MSG)
|
||||
|
||||
if(FPHSA_HANDLE_COMPONENTS)
|
||||
foreach(comp ${${_NAME}_FIND_COMPONENTS})
|
||||
if(${_NAME}_${comp}_FOUND)
|
||||
|
||||
if(NOT DEFINED FOUND_COMPONENTS_MSG)
|
||||
set(FOUND_COMPONENTS_MSG "found components: ")
|
||||
endif()
|
||||
set(FOUND_COMPONENTS_MSG "${FOUND_COMPONENTS_MSG} ${comp}")
|
||||
|
||||
else()
|
||||
|
||||
if(NOT DEFINED MISSING_COMPONENTS_MSG)
|
||||
set(MISSING_COMPONENTS_MSG "missing components: ")
|
||||
endif()
|
||||
set(MISSING_COMPONENTS_MSG "${MISSING_COMPONENTS_MSG} ${comp}")
|
||||
|
||||
if(${_NAME}_FIND_REQUIRED_${comp})
|
||||
set(${_NAME}_FOUND FALSE)
|
||||
set(MISSING_VARS "${MISSING_VARS} ${comp}")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
endforeach()
|
||||
set(COMPONENT_MSG "${FOUND_COMPONENTS_MSG} ${MISSING_COMPONENTS_MSG}")
|
||||
set(DETAILS "${DETAILS}[c${COMPONENT_MSG}]")
|
||||
endif()
|
||||
|
||||
# version handling:
|
||||
set(VERSION_MSG "")
|
||||
set(VERSION_OK TRUE)
|
||||
set(VERSION ${${FPHSA_VERSION_VAR}})
|
||||
|
||||
# check with DEFINED here as the requested or found version may be "0"
|
||||
if (DEFINED ${_NAME}_FIND_VERSION)
|
||||
if(DEFINED ${FPHSA_VERSION_VAR})
|
||||
|
||||
if(${_NAME}_FIND_VERSION_EXACT) # exact version required
|
||||
# count the dots in the version string
|
||||
string(REGEX REPLACE "[^.]" "" _VERSION_DOTS "${VERSION}")
|
||||
# add one dot because there is one dot more than there are components
|
||||
string(LENGTH "${_VERSION_DOTS}." _VERSION_DOTS)
|
||||
if (_VERSION_DOTS GREATER ${_NAME}_FIND_VERSION_COUNT)
|
||||
# Because of the C++ implementation of find_package() ${_NAME}_FIND_VERSION_COUNT
|
||||
# is at most 4 here. Therefore a simple lookup table is used.
|
||||
if (${_NAME}_FIND_VERSION_COUNT EQUAL 1)
|
||||
set(_VERSION_REGEX "[^.]*")
|
||||
elseif (${_NAME}_FIND_VERSION_COUNT EQUAL 2)
|
||||
set(_VERSION_REGEX "[^.]*\\.[^.]*")
|
||||
elseif (${_NAME}_FIND_VERSION_COUNT EQUAL 3)
|
||||
set(_VERSION_REGEX "[^.]*\\.[^.]*\\.[^.]*")
|
||||
else ()
|
||||
set(_VERSION_REGEX "[^.]*\\.[^.]*\\.[^.]*\\.[^.]*")
|
||||
endif ()
|
||||
string(REGEX REPLACE "^(${_VERSION_REGEX})\\..*" "\\1" _VERSION_HEAD "${VERSION}")
|
||||
unset(_VERSION_REGEX)
|
||||
if (NOT ${_NAME}_FIND_VERSION VERSION_EQUAL _VERSION_HEAD)
|
||||
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is exact version \"${${_NAME}_FIND_VERSION}\"")
|
||||
set(VERSION_OK FALSE)
|
||||
else ()
|
||||
set(VERSION_MSG "(found suitable exact version \"${VERSION}\")")
|
||||
endif ()
|
||||
unset(_VERSION_HEAD)
|
||||
else ()
|
||||
if (NOT ${_NAME}_FIND_VERSION VERSION_EQUAL VERSION)
|
||||
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is exact version \"${${_NAME}_FIND_VERSION}\"")
|
||||
set(VERSION_OK FALSE)
|
||||
else ()
|
||||
set(VERSION_MSG "(found suitable exact version \"${VERSION}\")")
|
||||
endif ()
|
||||
endif ()
|
||||
unset(_VERSION_DOTS)
|
||||
|
||||
else() # minimum version specified:
|
||||
if (${_NAME}_FIND_VERSION VERSION_GREATER VERSION)
|
||||
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is at least \"${${_NAME}_FIND_VERSION}\"")
|
||||
set(VERSION_OK FALSE)
|
||||
else ()
|
||||
set(VERSION_MSG "(found suitable version \"${VERSION}\", minimum required is \"${${_NAME}_FIND_VERSION}\")")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
else()
|
||||
|
||||
# if the package was not found, but a version was given, add that to the output:
|
||||
if(${_NAME}_FIND_VERSION_EXACT)
|
||||
set(VERSION_MSG "(Required is exact version \"${${_NAME}_FIND_VERSION}\")")
|
||||
else()
|
||||
set(VERSION_MSG "(Required is at least version \"${${_NAME}_FIND_VERSION}\")")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
else ()
|
||||
if(VERSION)
|
||||
set(VERSION_MSG "(found version \"${VERSION}\")")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if(VERSION_OK)
|
||||
set(DETAILS "${DETAILS}[v${VERSION}(${${_NAME}_FIND_VERSION})]")
|
||||
else()
|
||||
set(${_NAME}_FOUND FALSE)
|
||||
endif()
|
||||
|
||||
|
||||
# print the result:
|
||||
if (${_NAME}_FOUND)
|
||||
FIND_PACKAGE_MESSAGE(${_NAME} "Found ${_NAME}: ${${_FIRST_REQUIRED_VAR}} ${VERSION_MSG} ${COMPONENT_MSG}" "${DETAILS}")
|
||||
else ()
|
||||
|
||||
if(FPHSA_CONFIG_MODE)
|
||||
_FPHSA_HANDLE_FAILURE_CONFIG_MODE()
|
||||
else()
|
||||
if(NOT VERSION_OK)
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: ${VERSION_MSG} (found ${${_FIRST_REQUIRED_VAR}})")
|
||||
else()
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} (missing: ${MISSING_VARS}) ${VERSION_MSG}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif ()
|
||||
|
||||
set(${_NAME}_FOUND ${${_NAME}_FOUND} PARENT_SCOPE)
|
||||
set(${_NAME_UPPER}_FOUND ${${_NAME}_FOUND} PARENT_SCOPE)
|
||||
endfunction()
|
|
@ -1,57 +0,0 @@
|
|||
#.rst:
|
||||
# FindPackageMessage
|
||||
# ------------------
|
||||
#
|
||||
#
|
||||
#
|
||||
# FIND_PACKAGE_MESSAGE(<name> "message for user" "find result details")
|
||||
#
|
||||
# This macro is intended to be used in FindXXX.cmake modules files. It
|
||||
# will print a message once for each unique find result. This is useful
|
||||
# for telling the user where a package was found. The first argument
|
||||
# specifies the name (XXX) of the package. The second argument
|
||||
# specifies the message to display. The third argument lists details
|
||||
# about the find result so that if they change the message will be
|
||||
# displayed again. The macro also obeys the QUIET argument to the
|
||||
# find_package command.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# if(X11_FOUND)
|
||||
# FIND_PACKAGE_MESSAGE(X11 "Found X11: ${X11_X11_LIB}"
|
||||
# "[${X11_X11_LIB}][${X11_INCLUDE_DIR}]")
|
||||
# else()
|
||||
# ...
|
||||
# endif()
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2008-2009 Kitware, Inc.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
function(FIND_PACKAGE_MESSAGE pkg msg details)
|
||||
# Avoid printing a message repeatedly for the same find result.
|
||||
if(NOT ${pkg}_FIND_QUIETLY)
|
||||
string(REPLACE "\n" "" details "${details}")
|
||||
set(DETAILS_VAR FIND_PACKAGE_MESSAGE_DETAILS_${pkg})
|
||||
if(NOT "${details}" STREQUAL "${${DETAILS_VAR}}")
|
||||
# The message has not yet been printed.
|
||||
message(STATUS "${msg}")
|
||||
|
||||
# Save the find details in the cache to avoid printing the same
|
||||
# message again.
|
||||
set("${DETAILS_VAR}" "${details}"
|
||||
CACHE INTERNAL "Details about finding ${pkg}")
|
||||
endif()
|
||||
endif()
|
||||
endfunction()
|
|
@ -518,7 +518,6 @@ function checkArangoAlive(arangod, options) {
|
|||
arangod.exitStatus = res;
|
||||
analyzeServerCrash(arangod, options, "health Check");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return ret;
|
||||
|
@ -3968,7 +3967,7 @@ function unitTest(cases, options) {
|
|||
UNITTESTS_DIR = fs.join(UNITTESTS_DIR, options.buildType);
|
||||
}
|
||||
|
||||
CONFIG_DIR = fs.join(TOP_DIR, builddir, "etc", "arangodb");
|
||||
CONFIG_DIR = fs.join(TOP_DIR, builddir, "etc", "arangodb3");
|
||||
ARANGOBENCH_BIN = fs.join(BIN_DIR, "arangobench");
|
||||
ARANGODUMP_BIN = fs.join(BIN_DIR, "arangodump");
|
||||
ARANGOD_BIN = fs.join(BIN_DIR, "arangod");
|
||||
|
|
|
@ -182,7 +182,7 @@
|
|||
"ERROR_QUERY_EXCEPTION_OPTIONS" : { "code" : 1576, "message" : "query options expected" },
|
||||
"ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION" : { "code" : 1577, "message" : "collection '%s' used as expression operand" },
|
||||
"ERROR_QUERY_DISALLOWED_DYNAMIC_CALL" : { "code" : 1578, "message" : "disallowed dynamic call to '%s'" },
|
||||
"ERROR_QUERY_ACCESS_AFTER_MODIFICATION" : { "code" : 1579, "message" : "access after data-modification" },
|
||||
"ERROR_QUERY_ACCESS_AFTER_MODIFICATION" : { "code" : 1579, "message" : "access after data-modification by %s" },
|
||||
"ERROR_QUERY_FUNCTION_INVALID_NAME" : { "code" : 1580, "message" : "invalid user function name" },
|
||||
"ERROR_QUERY_FUNCTION_INVALID_CODE" : { "code" : 1581, "message" : "invalid user function code" },
|
||||
"ERROR_QUERY_FUNCTION_NOT_FOUND" : { "code" : 1582, "message" : "user function '%s()' not found" },
|
||||
|
|
|
@ -7965,7 +7965,11 @@ function AQL_GRAPH_CLOSENESS (graphName, options) {
|
|||
function AQL_GRAPH_ABSOLUTE_BETWEENNESS (graphName, options) {
|
||||
'use strict';
|
||||
|
||||
options = CLONE(options) || {};
|
||||
if (typeof options !== "object" || Array.isArray(options)) {
|
||||
options = {};
|
||||
} else {
|
||||
options = CLONE(options);
|
||||
}
|
||||
if (! options.direction) {
|
||||
options.direction = 'any';
|
||||
}
|
||||
|
|
|
@ -0,0 +1,461 @@
|
|||
/*jshint globalstrict:false, strict:false, maxlen: 500 */
|
||||
/*global AQL_EXECUTE */
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief tests for query language, functions
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2010-2012 triagens GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Steemann
|
||||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var internal = require("internal");
|
||||
var errors = internal.errors;
|
||||
var jsunity = require("jsunity");
|
||||
var db = internal.db;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function ahuacatlFunctionsBruteTestSuite () {
|
||||
var c = null;
|
||||
|
||||
var all = [
|
||||
"IS_NULL",
|
||||
"IS_BOOL",
|
||||
"IS_NUMBER",
|
||||
"IS_STRING",
|
||||
"IS_ARRAY",
|
||||
"IS_LIST",
|
||||
"IS_OBJECT",
|
||||
"IS_DOCUMENT",
|
||||
"IS_DATESTRING",
|
||||
"TO_NUMBER",
|
||||
"TO_STRING",
|
||||
"TO_BOOL",
|
||||
"TO_ARRAY",
|
||||
"TO_LIST",
|
||||
"CONCAT",
|
||||
"CONCAT_SEPARATOR",
|
||||
"CHAR_LENGTH",
|
||||
"LOWER",
|
||||
"UPPER",
|
||||
"SUBSTRING",
|
||||
"CONTAINS",
|
||||
"LIKE",
|
||||
"LEFT",
|
||||
"RIGHT",
|
||||
"TRIM",
|
||||
"LTRIM",
|
||||
"RTRIM",
|
||||
"FIND_FIRST",
|
||||
"FIND_LAST",
|
||||
"SPLIT",
|
||||
"SUBSTITUTE",
|
||||
"MD5",
|
||||
"SHA1",
|
||||
"RANDOM_TOKEN",
|
||||
"FLOOR",
|
||||
"CEIL",
|
||||
"ROUND",
|
||||
"ABS",
|
||||
"RAND",
|
||||
"SQRT",
|
||||
"POW",
|
||||
"RANGE",
|
||||
"UNION",
|
||||
"UNION_DISTINCT",
|
||||
"MINUS",
|
||||
"INTERSECTION",
|
||||
"FLATTEN",
|
||||
"LENGTH",
|
||||
"COUNT",
|
||||
"MIN",
|
||||
"MAX",
|
||||
"SUM",
|
||||
"MEDIAN",
|
||||
"PERCENTILE",
|
||||
"AVERAGE",
|
||||
"AVG",
|
||||
"VARIANCE_SAMPLE",
|
||||
"VARIANCE_POPULATION",
|
||||
"STDDEV_SAMPLE",
|
||||
"STDDEV_POPULATION",
|
||||
"STDDEV",
|
||||
"UNIQUE",
|
||||
"SLICE",
|
||||
"REVERSE",
|
||||
"FIRST",
|
||||
"LAST",
|
||||
"NTH",
|
||||
"POSITION",
|
||||
"CALL",
|
||||
"APPLY",
|
||||
"PUSH",
|
||||
"APPEND",
|
||||
"POP",
|
||||
"SHIFT",
|
||||
"UNSHIFT",
|
||||
"REMOVE_VALUE",
|
||||
"REMOVE_VALUES",
|
||||
"REMOVE_NTH",
|
||||
"HAS",
|
||||
"ATTRIBUTES",
|
||||
"VALUES",
|
||||
"MERGE",
|
||||
"MERGE_RECURSIVE",
|
||||
"DOCUMENT",
|
||||
"MATCHES",
|
||||
"UNSET",
|
||||
"UNSET_RECURSIVE",
|
||||
"KEEP",
|
||||
"TRANSLATE",
|
||||
"ZIP",
|
||||
"NEAR",
|
||||
"WITHIN",
|
||||
"WITHIN_RECTANGLE",
|
||||
"IS_IN_POLYGON",
|
||||
"FULLTEXT",
|
||||
"PATHS",
|
||||
"GRAPH_PATHS",
|
||||
"SHORTEST_PATH",
|
||||
"GRAPH_SHORTEST_PATH",
|
||||
"GRAPH_DISTANCE_TO",
|
||||
"TRAVERSAL",
|
||||
"GRAPH_TRAVERSAL",
|
||||
"TRAVERSAL_TREE",
|
||||
"GRAPH_TRAVERSAL_TREE",
|
||||
"EDGES",
|
||||
"GRAPH_EDGES",
|
||||
"GRAPH_VERTICES",
|
||||
"NEIGHBORS",
|
||||
"GRAPH_NEIGHBORS",
|
||||
"GRAPH_COMMON_NEIGHBORS",
|
||||
"GRAPH_COMMON_PROPERTIES",
|
||||
"GRAPH_ECCENTRICITY",
|
||||
"GRAPH_BETWEENNESS",
|
||||
"GRAPH_CLOSENESS",
|
||||
"GRAPH_ABSOLUTE_ECCENTRICITY",
|
||||
"GRAPH_ABSOLUTE_BETWEENNESS",
|
||||
"GRAPH_ABSOLUTE_CLOSENESS",
|
||||
"GRAPH_DIAMETER",
|
||||
"GRAPH_RADIUS",
|
||||
"DATE_NOW",
|
||||
"DATE_TIMESTAMP",
|
||||
"DATE_ISO8601",
|
||||
"DATE_DAYOFWEEK",
|
||||
"DATE_YEAR",
|
||||
"DATE_MONTH",
|
||||
"DATE_DAY",
|
||||
"DATE_HOUR",
|
||||
"DATE_MINUTE",
|
||||
"DATE_SECOND",
|
||||
"DATE_MILLISECOND",
|
||||
"DATE_DAYOFYEAR",
|
||||
"DATE_ISOWEEK",
|
||||
"DATE_LEAPYEAR",
|
||||
"DATE_QUARTER",
|
||||
"DATE_DAYS_IN_MONTH",
|
||||
"DATE_ADD",
|
||||
"DATE_SUBTRACT",
|
||||
"DATE_DIFF",
|
||||
"DATE_COMPARE",
|
||||
"DATE_FORMAT",
|
||||
"NOT_NULL",
|
||||
"FIRST_LIST",
|
||||
"FIRST_DOCUMENT",
|
||||
"PARSE_IDENTIFIER",
|
||||
"IS_SAME_COLLECTION"
|
||||
];
|
||||
|
||||
// find all functions that have parameters
|
||||
var funcs = [];
|
||||
all.forEach(function(func) {
|
||||
var query = "RETURN " + func + "()";
|
||||
try {
|
||||
AQL_EXECUTE(query);
|
||||
} catch (err) {
|
||||
var s = String(err);
|
||||
var re = s.match(/minimum: (\d+), maximum: (\d+)/);
|
||||
if (re) {
|
||||
var min = Number(re[1]);
|
||||
var max = Number(re[2]);
|
||||
|
||||
if (max >= 1000) {
|
||||
max = min + 1;
|
||||
}
|
||||
if (max > 0) {
|
||||
funcs.push({ name: func, min: min, max: max });
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
var oneArgument = function(func) {
|
||||
return func.min === 1;
|
||||
};
|
||||
|
||||
var twoArguments = function(func) {
|
||||
return func.min === 2;
|
||||
};
|
||||
|
||||
var skip = function(err) {
|
||||
if (!err.hasOwnProperty('errorNum')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return [
|
||||
errors.ERROR_ARANGO_CROSS_COLLECTION_REQUEST.code,
|
||||
errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code,
|
||||
errors.ERROR_QUERY_FUNCTION_NOT_FOUND.code,
|
||||
errors.ERROR_GRAPH_NOT_FOUND.code,
|
||||
errors.ERROR_GRAPH_INVALID_GRAPH.code,
|
||||
].indexOf(err.errorNum) !== -1;
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief set up
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
setUp : function () {
|
||||
db._drop("UnitTestsFunctions");
|
||||
c = db._create("UnitTestsFunctions");
|
||||
c.insert({ _key: "test", value: "test" });
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief tear down
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
tearDown : function () {
|
||||
db._drop("UnitTestsFunctions");
|
||||
},
|
||||
|
||||
testFunctionsOneWithDoc : function() {
|
||||
funcs.filter(oneArgument).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsOneWithDocArray : function() {
|
||||
funcs.filter(oneArgument).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "([ doc ])";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsOneWithKey : function() {
|
||||
funcs.filter(oneArgument).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc._key)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsOneWithId : function() {
|
||||
funcs.filter(oneArgument).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc._id)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsOneWithIdArray : function() {
|
||||
funcs.filter(oneArgument).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "([ doc._id ])";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithDoc : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc, doc)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithDocArray : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "([ doc ], [ doc ])";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
require("internal").print(query);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionTwoWithKey : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc._key, doc._key)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithId : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc._id, doc._id)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithIdArray : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "([ doc._id ], [ doc._id ])";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithDocId : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc, doc._id)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithDocIdArray : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc, [ doc._id ])";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithIdDoc : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "(doc._id, doc)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithIdDocArray : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "([ doc._id ], doc)";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
testFunctionsTwoWithIdArrayDocArray : function() {
|
||||
funcs.filter(twoArguments).forEach(function(func) {
|
||||
var query = "FOR doc IN @@collection RETURN " + func.name + "([ doc._id ], [ doc ])";
|
||||
try {
|
||||
AQL_EXECUTE(query, { "@collection" : c.name() });
|
||||
} catch (err) {
|
||||
if (!skip(err)) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief executes the test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
jsunity.run(ahuacatlFunctionsBruteTestSuite);
|
||||
|
||||
return jsunity.done();
|
||||
|
|
@ -245,10 +245,7 @@ function ahuacatlMultiModifySuite () {
|
|||
testMultiInsertLoopSubquerySameCollection : function () {
|
||||
AQL_EXECUTE("FOR i IN 1..10 INSERT { value: i } INTO @@cn", { "@cn" : cn1 });
|
||||
var q = "FOR i IN @@cn LET sub = (FOR j IN 1..2 INSERT { value: j } INTO @@cn) RETURN 1";
|
||||
var actual = AQL_EXECUTE(q, { "@cn": cn1 });
|
||||
assertEqual(10, actual.json.length);
|
||||
assertEqual(20, actual.stats.writesExecuted);
|
||||
assertEqual(30, c1.count());
|
||||
assertQueryError(errors.ERROR_QUERY_ACCESS_AFTER_MODIFICATION.code, q, { "@cn": cn1 });
|
||||
},
|
||||
|
||||
testMultiInsertLoopSubqueryOtherCollection : function () {
|
||||
|
@ -333,10 +330,7 @@ function ahuacatlMultiModifySuite () {
|
|||
testMultiRemoveLoopSubquerySameCollection : function () {
|
||||
AQL_EXECUTE("FOR i IN 1..2010 INSERT { _key: CONCAT('test', i) } INTO @@cn", { "@cn" : cn1 });
|
||||
var q = "FOR i IN @@cn LET sub = (REMOVE { _key: i._key } INTO @@cn) RETURN 1";
|
||||
var actual = AQL_EXECUTE(q, { "@cn": cn1 });
|
||||
assertEqual(2010, actual.json.length);
|
||||
assertEqual(2010, actual.stats.writesExecuted);
|
||||
assertEqual(0, c1.count());
|
||||
assertQueryError(errors.ERROR_QUERY_ACCESS_AFTER_MODIFICATION.code, q, { "@cn": cn1 });
|
||||
},
|
||||
|
||||
testMultiRemoveLoopSubqueryOtherCollection : function () {
|
||||
|
@ -358,35 +352,20 @@ function ahuacatlMultiModifySuite () {
|
|||
|
||||
testRemoveInSubqueryNoResult : function () {
|
||||
AQL_EXECUTE("FOR i IN 1..2010 INSERT { value: i } INTO @@cn", { "@cn" : cn1 });
|
||||
var actual = AQL_EXECUTE("FOR doc IN @@cn SORT doc.value LET f = (REMOVE doc IN @@cn) RETURN f", { "@cn" : cn1 }).json;
|
||||
var expected = [ ];
|
||||
for (var i = 1; i <= 2010; ++i) {
|
||||
expected.push([ ]);
|
||||
}
|
||||
assertEqual(expected, actual);
|
||||
assertEqual(0, c1.count());
|
||||
var q = "FOR doc IN @@cn SORT doc.value LET f = (REMOVE doc IN @@cn) RETURN f";
|
||||
assertQueryError(errors.ERROR_QUERY_ACCESS_AFTER_MODIFICATION.code, q, {"@cn": cn1 });
|
||||
},
|
||||
|
||||
testRemoveInSubqueryReturnKeys : function () {
|
||||
AQL_EXECUTE("FOR i IN 1..2010 INSERT { value: i } INTO @@cn", { "@cn" : cn1 });
|
||||
var actual = AQL_EXECUTE("FOR doc IN @@cn SORT doc.value LET f = (REMOVE doc IN @@cn RETURN OLD.value) RETURN f", { "@cn" : cn1 }).json;
|
||||
var expected = [ ];
|
||||
for (var i = 1; i <= 2010; ++i) {
|
||||
expected.push([ i ]);
|
||||
}
|
||||
assertEqual(expected, actual);
|
||||
assertEqual(0, c1.count());
|
||||
var q = "FOR doc IN @@cn SORT doc.value LET f = (REMOVE doc IN @@cn RETURN OLD.value) RETURN f";
|
||||
assertQueryError(errors.ERROR_QUERY_ACCESS_AFTER_MODIFICATION.code, q, {"@cn": cn1 });
|
||||
},
|
||||
|
||||
testRemoveInSubqueryReturnKeysDoc : function () {
|
||||
AQL_EXECUTE("FOR i IN 1..2010 INSERT { value: i } INTO @@cn", { "@cn" : cn1 });
|
||||
var actual = AQL_EXECUTE("FOR doc IN @@cn SORT doc.value LET f = (REMOVE doc IN @@cn RETURN OLD) RETURN f[0].value", { "@cn" : cn1 }).json;
|
||||
var expected = [ ];
|
||||
for (var i = 1; i <= 2010; ++i) {
|
||||
expected.push(i);
|
||||
}
|
||||
assertEqual(expected, actual);
|
||||
assertEqual(0, c1.count());
|
||||
var q = "FOR doc IN @@cn SORT doc.value LET f = (REMOVE doc IN @@cn RETURN OLD) RETURN f[0].value";
|
||||
assertQueryError(errors.ERROR_QUERY_ACCESS_AFTER_MODIFICATION.code, q, {"@cn": cn1 });
|
||||
},
|
||||
|
||||
testInsertRemove : function () {
|
||||
|
|
|
@ -168,7 +168,7 @@ size_t VelocyPackHelper::VPackStringHash::operator()(VPackSlice const& slice) co
|
|||
};
|
||||
|
||||
bool VelocyPackHelper::VPackEqual::operator()(VPackSlice const& lhs, VPackSlice const& rhs) const {
|
||||
return VelocyPackHelper::compare(lhs, rhs, false) == 0;
|
||||
return VelocyPackHelper::compare(lhs, rhs, false, _options) == 0;
|
||||
};
|
||||
|
||||
bool VelocyPackHelper::VPackStringEqual::operator()(VPackSlice const& lhs, VPackSlice const& rhs) const noexcept {
|
||||
|
|
|
@ -71,8 +71,15 @@ class VelocyPackHelper {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
struct VPackEqual {
|
||||
bool operator()(arangodb::velocypack::Slice const&,
|
||||
arangodb::velocypack::Slice const&) const;
|
||||
private:
|
||||
arangodb::velocypack::Options const* _options;
|
||||
public:
|
||||
VPackEqual() : _options(nullptr) {}
|
||||
explicit VPackEqual(arangodb::velocypack::Options const* opts)
|
||||
: _options(opts) {}
|
||||
|
||||
bool operator()(arangodb::velocypack::Slice const&,
|
||||
arangodb::velocypack::Slice const&) const;
|
||||
};
|
||||
|
||||
struct VPackStringEqual {
|
||||
|
|
|
@ -223,7 +223,7 @@ ERROR_QUERY_COMPILE_TIME_OPTIONS,1575,"query options must be readable at query c
|
|||
ERROR_QUERY_EXCEPTION_OPTIONS,1576,"query options expected", "Will be raised when an AQL data-modification query contains an invalid options specification."
|
||||
ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION,1577,"collection '%s' used as expression operand", "Will be raised when a collection is used as an operand in an AQL expression."
|
||||
ERROR_QUERY_DISALLOWED_DYNAMIC_CALL,1578,"disallowed dynamic call to '%s'", "Will be raised when a dynamic function call is made to a function that cannot be called dynamically."
|
||||
ERROR_QUERY_ACCESS_AFTER_MODIFICATION,1579,"access after data-modification", "Will be raised when collection data are accessed after a data-modification operation."
|
||||
ERROR_QUERY_ACCESS_AFTER_MODIFICATION,1579,"access after data-modification by %s", "Will be raised when collection data are accessed after a data-modification operation."
|
||||
|
||||
################################################################################
|
||||
## AQL user functions
|
||||
|
|
|
@ -178,7 +178,7 @@ void TRI_InitializeErrorMessages () {
|
|||
REG_ERROR(ERROR_QUERY_EXCEPTION_OPTIONS, "query options expected");
|
||||
REG_ERROR(ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION, "collection '%s' used as expression operand");
|
||||
REG_ERROR(ERROR_QUERY_DISALLOWED_DYNAMIC_CALL, "disallowed dynamic call to '%s'");
|
||||
REG_ERROR(ERROR_QUERY_ACCESS_AFTER_MODIFICATION, "access after data-modification");
|
||||
REG_ERROR(ERROR_QUERY_ACCESS_AFTER_MODIFICATION, "access after data-modification by %s");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_INVALID_NAME, "invalid user function name");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_INVALID_CODE, "invalid user function code");
|
||||
REG_ERROR(ERROR_QUERY_FUNCTION_NOT_FOUND, "user function '%s()' not found");
|
||||
|
|
|
@ -441,7 +441,7 @@
|
|||
/// - 1578: @LIT{disallowed dynamic call to '\%s'}
|
||||
/// "Will be raised when a dynamic function call is made to a function that
|
||||
/// cannot be called dynamically."
|
||||
/// - 1579: @LIT{access after data-modification}
|
||||
/// - 1579: @LIT{access after data-modification by \%s}
|
||||
/// "Will be raised when collection data are accessed after a
|
||||
/// data-modification operation."
|
||||
/// - 1580: @LIT{invalid user function name}
|
||||
|
@ -573,7 +573,7 @@
|
|||
/// - 1934: @LIT{Invalid example type. Has to be Array or Object}
|
||||
/// Invalid example type. Has to be Array or Object.
|
||||
/// - 1935: @LIT{Invalid number of arguments. Expected: }
|
||||
/// Invalid number of arguments. Expected:
|
||||
/// Invalid number of arguments. Expected:
|
||||
/// - 1936: @LIT{Invalid parameter type.}
|
||||
/// Invalid parameter type.
|
||||
/// - 1937: @LIT{Invalid id}
|
||||
|
@ -2443,7 +2443,7 @@ void TRI_InitializeErrorMessages ();
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1579: ERROR_QUERY_ACCESS_AFTER_MODIFICATION
|
||||
///
|
||||
/// access after data-modification
|
||||
/// access after data-modification by %s
|
||||
///
|
||||
/// "Will be raised when collection data are accessed after a
|
||||
/// data-modification operation."
|
||||
|
@ -3043,9 +3043,9 @@ void TRI_InitializeErrorMessages ();
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1935: ERROR_GRAPH_INVALID_NUMBER_OF_ARGUMENTS
|
||||
///
|
||||
/// Invalid number of arguments. Expected:
|
||||
/// Invalid number of arguments. Expected:
|
||||
///
|
||||
/// Invalid number of arguments. Expected:
|
||||
/// Invalid number of arguments. Expected:
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define TRI_ERROR_GRAPH_INVALID_NUMBER_OF_ARGUMENTS (1935)
|
||||
|
|
Loading…
Reference in New Issue