mirror of https://gitee.com/bigwinds/arangodb
locally tested only (#4476)
This commit is contained in:
parent
28e645af2a
commit
aa041ddfb9
|
@ -138,10 +138,18 @@ if (USE_IRESEARCH)
|
||||||
set(ICU_FOUND TRUE) # ICU built from source in 3rdParty directory
|
set(ICU_FOUND TRUE) # ICU built from source in 3rdParty directory
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
if(NOT PERL_FOUND)
|
||||||
|
set(PERL_FOUND TRUE) # suppress error for Perl not-found
|
||||||
|
|
||||||
|
# MSVC will execute ADD_CUSTOM_COMMAND even though OUTPUT is already present
|
||||||
|
if(MSVC)
|
||||||
|
set(PERL_EXECUTABLE echo)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
set(LZ4_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/lz4")
|
set(LZ4_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/lz4")
|
||||||
set(SNOWBALL_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/snowball")
|
set(SNOWBALL_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/snowball")
|
||||||
set(UNWIND_ROOT "invalid")
|
set(UNWIND_ROOT "invalid")
|
||||||
set(USE_IQL OFF CACHE BOOL "Use IQL" FORCE) # skip IQL
|
|
||||||
|
|
||||||
set(IRESEARCH_EXCLUDE_STATIC_THIRD_PARTY_LIBS TRUE) # disable linking in of 3rd party libraries automatically
|
set(IRESEARCH_EXCLUDE_STATIC_THIRD_PARTY_LIBS TRUE) # disable linking in of 3rd party libraries automatically
|
||||||
find_package(IResearch REQUIRED) # set IRESEARCH_BUILD_DIR
|
find_package(IResearch REQUIRED) # set IRESEARCH_BUILD_DIR
|
||||||
|
@ -151,6 +159,8 @@ if (USE_IRESEARCH)
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}/cmake" # cmake overrides (must be first)
|
"${CMAKE_CURRENT_SOURCE_DIR}/cmake" # cmake overrides (must be first)
|
||||||
"${IRESEARCH_ROOT}/cmake" # to find iResearch dependencies
|
"${IRESEARCH_ROOT}/cmake" # to find iResearch dependencies
|
||||||
)
|
)
|
||||||
|
|
||||||
|
file(COPY "${IRESEARCH_ROOT}.build/" DESTINATION "${IRESEARCH_BUILD_DIR}" FILES_MATCHING PATTERN "*")
|
||||||
add_subdirectory("${IRESEARCH_ROOT}" "${IRESEARCH_BUILD_DIR}" EXCLUDE_FROM_ALL) # do not build unused targets
|
add_subdirectory("${IRESEARCH_ROOT}" "${IRESEARCH_BUILD_DIR}" EXCLUDE_FROM_ALL) # do not build unused targets
|
||||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH_ORIGINAL}) # restore CMAKE_MODULE_PATH
|
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH_ORIGINAL}) # restore CMAKE_MODULE_PATH
|
||||||
unset(BUILD_SHARED_LIBS) # otherwise ZLib (below) does not build on win32
|
unset(BUILD_SHARED_LIBS) # otherwise ZLib (below) does not build on win32
|
||||||
|
|
|
@ -0,0 +1,168 @@
|
||||||
|
// A Bison parser, made by GNU Bison 3.0.4.
|
||||||
|
|
||||||
|
// Locations for Bison parsers in C++
|
||||||
|
|
||||||
|
// Copyright (C) 2002-2015 Free Software Foundation, Inc.
|
||||||
|
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// As a special exception, you may create a larger work that contains
|
||||||
|
// part or all of the Bison parser skeleton and distribute that work
|
||||||
|
// under terms of your choice, so long as that work isn't itself a
|
||||||
|
// parser generator using the skeleton or a modified version thereof
|
||||||
|
// as a parser skeleton. Alternatively, if you modify or redistribute
|
||||||
|
// the parser skeleton itself, you may (at your option) remove this
|
||||||
|
// special exception, which will cause the skeleton and the resulting
|
||||||
|
// Bison output files to be licensed under the GNU General Public
|
||||||
|
// License without this special exception.
|
||||||
|
|
||||||
|
// This special exception was added by the Free Software Foundation in
|
||||||
|
// version 2.2 of Bison.
|
||||||
|
|
||||||
|
/**
|
||||||
|
** \file location.hh
|
||||||
|
** Define the iresearch::iql::location class.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef YY_YY_LOCATION_HH_INCLUDED
|
||||||
|
# define YY_YY_LOCATION_HH_INCLUDED
|
||||||
|
|
||||||
|
# include "position.hh"
|
||||||
|
|
||||||
|
#line 31 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // location.cc:296
|
||||||
|
namespace iresearch { namespace iql {
|
||||||
|
#line 46 "location.hh" // location.cc:296
|
||||||
|
/// Abstract a location.
|
||||||
|
class location
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
|
||||||
|
/// Initialization.
|
||||||
|
void initialize (std::string* f = YY_NULLPTR,
|
||||||
|
unsigned int l = 1u,
|
||||||
|
unsigned int c = 1u)
|
||||||
|
{
|
||||||
|
begin.initialize (f, l, c);
|
||||||
|
end = begin;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \name Line and Column related manipulators
|
||||||
|
** \{ */
|
||||||
|
public:
|
||||||
|
/// Reset initial location to final location.
|
||||||
|
void step ()
|
||||||
|
{
|
||||||
|
begin = end;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extend the current location to the COUNT next columns.
|
||||||
|
void columns (int count = 1)
|
||||||
|
{
|
||||||
|
end += count;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extend the current location to the COUNT next lines.
|
||||||
|
void lines (int count = 1)
|
||||||
|
{
|
||||||
|
end.lines (count);
|
||||||
|
}
|
||||||
|
/** \} */
|
||||||
|
|
||||||
|
|
||||||
|
public:
|
||||||
|
/// Beginning of the located region.
|
||||||
|
position begin;
|
||||||
|
/// End of the located region.
|
||||||
|
position end;
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Join two locations, in place.
|
||||||
|
inline location& operator+= (location& res, const location& end)
|
||||||
|
{
|
||||||
|
res.end = end.end;
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Join two locations.
|
||||||
|
inline location operator+ (location res, const location& end)
|
||||||
|
{
|
||||||
|
return res += end;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add \a width columns to the end position, in place.
|
||||||
|
inline location& operator+= (location& res, int width)
|
||||||
|
{
|
||||||
|
res.columns (width);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add \a width columns to the end position.
|
||||||
|
inline location operator+ (location res, int width)
|
||||||
|
{
|
||||||
|
return res += width;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Subtract \a width columns to the end position, in place.
|
||||||
|
inline location& operator-= (location& res, int width)
|
||||||
|
{
|
||||||
|
return res += -width;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Subtract \a width columns to the end position.
|
||||||
|
inline location operator- (location res, int width)
|
||||||
|
{
|
||||||
|
return res -= width;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compare two location objects.
|
||||||
|
inline bool
|
||||||
|
operator== (const location& loc1, const location& loc2)
|
||||||
|
{
|
||||||
|
return loc1.begin == loc2.begin && loc1.end == loc2.end;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compare two location objects.
|
||||||
|
inline bool
|
||||||
|
operator!= (const location& loc1, const location& loc2)
|
||||||
|
{
|
||||||
|
return !(loc1 == loc2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \brief Intercept output stream redirection.
|
||||||
|
** \param ostr the destination output stream
|
||||||
|
** \param loc a reference to the location to redirect
|
||||||
|
**
|
||||||
|
** Avoid duplicate information.
|
||||||
|
*/
|
||||||
|
template <typename YYChar>
|
||||||
|
inline std::basic_ostream<YYChar>&
|
||||||
|
operator<< (std::basic_ostream<YYChar>& ostr, const location& loc)
|
||||||
|
{
|
||||||
|
unsigned int end_col = 0 < loc.end.column ? loc.end.column - 1 : 0;
|
||||||
|
ostr << loc.begin;
|
||||||
|
if (loc.end.filename
|
||||||
|
&& (!loc.begin.filename
|
||||||
|
|| *loc.begin.filename != *loc.end.filename))
|
||||||
|
ostr << '-' << loc.end.filename << ':' << loc.end.line << '.' << end_col;
|
||||||
|
else if (loc.begin.line < loc.end.line)
|
||||||
|
ostr << '-' << loc.end.line << '.' << end_col;
|
||||||
|
else if (loc.begin.column < end_col)
|
||||||
|
ostr << '-' << end_col;
|
||||||
|
return ostr;
|
||||||
|
}
|
||||||
|
|
||||||
|
#line 31 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // location.cc:296
|
||||||
|
} } // iresearch::iql
|
||||||
|
#line 168 "location.hh" // location.cc:296
|
||||||
|
#endif // !YY_YY_LOCATION_HH_INCLUDED
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,379 @@
|
||||||
|
// A Bison parser, made by GNU Bison 3.0.4.
|
||||||
|
|
||||||
|
// Skeleton interface for Bison GLR parsers in C++
|
||||||
|
|
||||||
|
// Copyright (C) 2002-2015 Free Software Foundation, Inc.
|
||||||
|
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// As a special exception, you may create a larger work that contains
|
||||||
|
// part or all of the Bison parser skeleton and distribute that work
|
||||||
|
// under terms of your choice, so long as that work isn't itself a
|
||||||
|
// parser generator using the skeleton or a modified version thereof
|
||||||
|
// as a parser skeleton. Alternatively, if you modify or redistribute
|
||||||
|
// the parser skeleton itself, you may (at your option) remove this
|
||||||
|
// special exception, which will cause the skeleton and the resulting
|
||||||
|
// Bison output files to be licensed under the GNU General Public
|
||||||
|
// License without this special exception.
|
||||||
|
|
||||||
|
// This special exception was added by the Free Software Foundation in
|
||||||
|
// version 2.2 of Bison.
|
||||||
|
|
||||||
|
// C++ GLR parser skeleton written by Akim Demaille.
|
||||||
|
|
||||||
|
#ifndef YY_YY_PARSER_HH_INCLUDED
|
||||||
|
# define YY_YY_PARSER_HH_INCLUDED
|
||||||
|
// // "%code requires" blocks.
|
||||||
|
#line 39 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // glr.cc:329
|
||||||
|
|
||||||
|
#define YYSTYPE size_t
|
||||||
|
|
||||||
|
#if YYDEBUG
|
||||||
|
#define YYERROR_VERBOSE 1
|
||||||
|
|
||||||
|
// this is required only for %skeleton "glr.cc" since it lacks default init
|
||||||
|
// not needed for %skeleton "lalr1.cc" or without %skeleton
|
||||||
|
#define YYLTYPE iresearch::iql::location_type1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace iresearch {
|
||||||
|
namespace iql {
|
||||||
|
class context;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Suppress warnings due to Bison generated code (push section, pop just below)
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
#pragma warning(disable : 4512)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// ALWAYS!!! define YYDEBUG 1 for the length of the header so as to avoid
|
||||||
|
// alignment issues when linkning without YYDEBUG agains a library that was
|
||||||
|
// built with YYDEBUG or vice versa, reverse at end of header
|
||||||
|
#undef YYDEBUG_REQUESTED
|
||||||
|
#if YYDEBUG
|
||||||
|
#define YYDEBUG_REQUESTED
|
||||||
|
#else
|
||||||
|
#undef YYDEBUG
|
||||||
|
#define YYDEBUG 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#line 73 "parser.hh" // glr.cc:329
|
||||||
|
|
||||||
|
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
#include <iostream>
|
||||||
|
#include "location.hh"
|
||||||
|
|
||||||
|
/* Debug traces. */
|
||||||
|
#ifndef YYDEBUG
|
||||||
|
# define YYDEBUG 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#line 31 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // glr.cc:329
|
||||||
|
namespace iresearch { namespace iql {
|
||||||
|
#line 88 "parser.hh" // glr.cc:329
|
||||||
|
|
||||||
|
|
||||||
|
/// A Bison parser.
|
||||||
|
class parser
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
#ifndef YYSTYPE
|
||||||
|
/// Symbol semantic values.
|
||||||
|
typedef int semantic_type;
|
||||||
|
#else
|
||||||
|
typedef YYSTYPE semantic_type;
|
||||||
|
#endif
|
||||||
|
/// Symbol locations.
|
||||||
|
typedef location location_type;
|
||||||
|
|
||||||
|
/// Syntax errors thrown from user actions.
|
||||||
|
struct syntax_error : std::runtime_error
|
||||||
|
{
|
||||||
|
syntax_error (const location_type& l, const std::string& m);
|
||||||
|
location_type location;
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Tokens.
|
||||||
|
struct token
|
||||||
|
{
|
||||||
|
enum yytokentype
|
||||||
|
{
|
||||||
|
IQL_EOF = 0,
|
||||||
|
IQL_UNKNOWN = 258,
|
||||||
|
IQL_SEP = 259,
|
||||||
|
IQL_SEQUENCE = 260,
|
||||||
|
IQL_NOT = 261,
|
||||||
|
IQL_EXCLAIM = 262,
|
||||||
|
IQL_AND = 263,
|
||||||
|
IQL_AMPAMP = 264,
|
||||||
|
IQL_OR = 265,
|
||||||
|
IQL_PIPEPIPE = 266,
|
||||||
|
IQL_NE = 267,
|
||||||
|
IQL_LE = 268,
|
||||||
|
IQL_EQ = 269,
|
||||||
|
IQL_GE = 270,
|
||||||
|
IQL_LIKE = 271,
|
||||||
|
IQL_ASTERISK = 272,
|
||||||
|
IQL_ASC = 273,
|
||||||
|
IQL_DESC = 274,
|
||||||
|
IQL_COMMA = 275,
|
||||||
|
IQL_SQUOTE = 276,
|
||||||
|
IQL_DQUOTE = 277,
|
||||||
|
IQL_LCHEVRON = 278,
|
||||||
|
IQL_RCHEVRON = 279,
|
||||||
|
IQL_LPAREN = 280,
|
||||||
|
IQL_RPAREN = 281,
|
||||||
|
IQL_LSBRACKET = 282,
|
||||||
|
IQL_RSBRACKET = 283,
|
||||||
|
IQL_LIMIT = 284,
|
||||||
|
IQL_ORDER = 285
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/// (External) token type, as returned by yylex.
|
||||||
|
typedef token::yytokentype token_type;
|
||||||
|
|
||||||
|
/// Symbol type: an internal symbol number.
|
||||||
|
typedef int symbol_number_type;
|
||||||
|
|
||||||
|
/// The symbol type number to denote an empty symbol.
|
||||||
|
enum { empty_symbol = -2 };
|
||||||
|
|
||||||
|
/// Internal symbol number for tokens (subsumed by symbol_number_type).
|
||||||
|
typedef unsigned char token_number_type;
|
||||||
|
|
||||||
|
/// A complete symbol.
|
||||||
|
///
|
||||||
|
/// Expects its Base type to provide access to the symbol type
|
||||||
|
/// via type_get().
|
||||||
|
///
|
||||||
|
/// Provide access to semantic value and location.
|
||||||
|
template <typename Base>
|
||||||
|
struct basic_symbol : Base
|
||||||
|
{
|
||||||
|
/// Alias to Base.
|
||||||
|
typedef Base super_type;
|
||||||
|
|
||||||
|
/// Default constructor.
|
||||||
|
basic_symbol ();
|
||||||
|
|
||||||
|
/// Copy constructor.
|
||||||
|
basic_symbol (const basic_symbol& other);
|
||||||
|
|
||||||
|
/// Constructor for valueless symbols.
|
||||||
|
basic_symbol (typename Base::kind_type t,
|
||||||
|
const location_type& l);
|
||||||
|
|
||||||
|
/// Constructor for symbols with semantic value.
|
||||||
|
basic_symbol (typename Base::kind_type t,
|
||||||
|
const semantic_type& v,
|
||||||
|
const location_type& l);
|
||||||
|
|
||||||
|
/// Destroy the symbol.
|
||||||
|
~basic_symbol ();
|
||||||
|
|
||||||
|
/// Destroy contents, and record that is empty.
|
||||||
|
void clear ();
|
||||||
|
|
||||||
|
/// Whether empty.
|
||||||
|
bool empty () const;
|
||||||
|
|
||||||
|
/// Destructive move, \a s is emptied into this.
|
||||||
|
void move (basic_symbol& s);
|
||||||
|
|
||||||
|
/// The semantic value.
|
||||||
|
semantic_type value;
|
||||||
|
|
||||||
|
/// The location.
|
||||||
|
location_type location;
|
||||||
|
|
||||||
|
private:
|
||||||
|
/// Assignment operator.
|
||||||
|
basic_symbol& operator= (const basic_symbol& other);
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Type access provider for token (enum) based symbols.
|
||||||
|
struct by_type
|
||||||
|
{
|
||||||
|
/// Default constructor.
|
||||||
|
by_type ();
|
||||||
|
|
||||||
|
/// Copy constructor.
|
||||||
|
by_type (const by_type& other);
|
||||||
|
|
||||||
|
/// The symbol type as needed by the constructor.
|
||||||
|
typedef token_type kind_type;
|
||||||
|
|
||||||
|
/// Constructor from (external) token numbers.
|
||||||
|
by_type (kind_type t);
|
||||||
|
|
||||||
|
/// Record that this symbol is empty.
|
||||||
|
void clear ();
|
||||||
|
|
||||||
|
/// Steal the symbol type from \a that.
|
||||||
|
void move (by_type& that);
|
||||||
|
|
||||||
|
/// The (internal) type number (corresponding to \a type).
|
||||||
|
/// \a empty when empty.
|
||||||
|
symbol_number_type type_get () const;
|
||||||
|
|
||||||
|
/// The token.
|
||||||
|
token_type token () const;
|
||||||
|
|
||||||
|
/// The symbol type.
|
||||||
|
/// \a empty_symbol when empty.
|
||||||
|
/// An int, not token_number_type, to be able to store empty_symbol.
|
||||||
|
int type;
|
||||||
|
};
|
||||||
|
|
||||||
|
/// "External" symbols: returned by the scanner.
|
||||||
|
typedef basic_symbol<by_type> symbol_type;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/// Build a parser object.
|
||||||
|
parser (iresearch::iql::context& ctx_yyarg);
|
||||||
|
virtual ~parser ();
|
||||||
|
|
||||||
|
/// Parse.
|
||||||
|
/// \returns 0 iff parsing succeeded.
|
||||||
|
virtual int parse ();
|
||||||
|
|
||||||
|
/// The current debugging stream.
|
||||||
|
std::ostream& debug_stream () const;
|
||||||
|
/// Set the current debugging stream.
|
||||||
|
void set_debug_stream (std::ostream &);
|
||||||
|
|
||||||
|
/// Type for debugging levels.
|
||||||
|
typedef int debug_level_type;
|
||||||
|
/// The current debugging level.
|
||||||
|
debug_level_type debug_level () const;
|
||||||
|
/// Set the current debugging level.
|
||||||
|
void set_debug_level (debug_level_type l);
|
||||||
|
|
||||||
|
public:
|
||||||
|
/// Report a syntax error.
|
||||||
|
/// \param loc where the syntax error is found.
|
||||||
|
/// \param msg a description of the syntax error.
|
||||||
|
virtual void error (const location_type& loc, const std::string& msg);
|
||||||
|
|
||||||
|
# if YYDEBUG
|
||||||
|
public:
|
||||||
|
/// \brief Report a symbol value on the debug stream.
|
||||||
|
/// \param yytype The token type.
|
||||||
|
/// \param yyvaluep Its semantic value.
|
||||||
|
/// \param yylocationp Its location.
|
||||||
|
virtual void yy_symbol_value_print_ (int yytype,
|
||||||
|
const semantic_type* yyvaluep,
|
||||||
|
const location_type* yylocationp);
|
||||||
|
/// \brief Report a symbol on the debug stream.
|
||||||
|
/// \param yytype The token type.
|
||||||
|
/// \param yyvaluep Its semantic value.
|
||||||
|
/// \param yylocationp Its location.
|
||||||
|
virtual void yy_symbol_print_ (int yytype,
|
||||||
|
const semantic_type* yyvaluep,
|
||||||
|
const location_type* yylocationp);
|
||||||
|
private:
|
||||||
|
// Debugging.
|
||||||
|
std::ostream* yycdebug_;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
// User arguments.
|
||||||
|
iresearch::iql::context& ctx;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#ifndef YYSTYPE
|
||||||
|
# define YYSTYPE iresearch::iql::parser::semantic_type
|
||||||
|
#endif
|
||||||
|
#ifndef YYLTYPE
|
||||||
|
# define YYLTYPE iresearch::iql::parser::location_type
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#line 31 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // glr.cc:329
|
||||||
|
} } // iresearch::iql
|
||||||
|
#line 312 "parser.hh" // glr.cc:329
|
||||||
|
// // "%code provides" blocks.
|
||||||
|
#line 76 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // glr.cc:329
|
||||||
|
|
||||||
|
// ALWAYS!!! define YYDEBUG 1 for the length of the header so as to avoid
|
||||||
|
// alignment issues when linkning without YYDEBUG agains a library that was
|
||||||
|
// built with YYDEBUG or vice versa, reverse at end of header
|
||||||
|
#ifdef YYDEBUG_REQUESTED
|
||||||
|
#undef YYDEBUG_REQUESTED
|
||||||
|
#else
|
||||||
|
#undef YYDEBUG
|
||||||
|
#define YYDEBUG 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// end of suppress of warnings due to Bison generated code (pop section, push just above)
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
#pragma warning(default : 4512)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace iresearch {
|
||||||
|
namespace iql {
|
||||||
|
class context {
|
||||||
|
public:
|
||||||
|
// destructor
|
||||||
|
virtual ~context() = default;
|
||||||
|
|
||||||
|
// parser operations
|
||||||
|
virtual void yyerror(parser::location_type const& location, std::string const& sError) = 0;
|
||||||
|
virtual parser::token_type yylex(parser::semantic_type& value, parser::location_type& location) = 0;
|
||||||
|
|
||||||
|
// value operations
|
||||||
|
virtual parser::semantic_type sequence(parser::location_type const& location) = 0;
|
||||||
|
|
||||||
|
// node operations
|
||||||
|
virtual parser::semantic_type append(parser::semantic_type const& value, parser::location_type const& location) = 0;
|
||||||
|
virtual parser::semantic_type boost(parser::semantic_type const& value, parser::location_type const& location) = 0;
|
||||||
|
virtual parser::semantic_type function(parser::semantic_type const& name, parser::semantic_type const& args) = 0;
|
||||||
|
virtual parser::semantic_type list(parser::semantic_type const& value1, parser::semantic_type const& value2) = 0;
|
||||||
|
virtual parser::semantic_type negation(parser::semantic_type const& value) = 0;
|
||||||
|
virtual parser::semantic_type range(parser::semantic_type const& value1, bool bInclusive1, parser::semantic_type const& value2, bool bInclusive2) = 0;
|
||||||
|
|
||||||
|
// comparison operations
|
||||||
|
virtual parser::semantic_type op_eq(parser::semantic_type const& value1, parser::semantic_type const& value2) = 0;
|
||||||
|
virtual parser::semantic_type op_like(parser::semantic_type const& value1, parser::semantic_type const& value2) = 0;
|
||||||
|
|
||||||
|
// filter operations
|
||||||
|
virtual parser::semantic_type op_and(parser::semantic_type const& value1, parser::semantic_type const& value2) = 0;
|
||||||
|
virtual parser::semantic_type op_or(parser::semantic_type const& value1, parser::semantic_type const& value2) = 0;
|
||||||
|
|
||||||
|
// query operations
|
||||||
|
virtual bool addOrder(parser::semantic_type const& value, bool bAscending = true) = 0;
|
||||||
|
virtual bool setLimit(parser::semantic_type const& value) = 0;
|
||||||
|
virtual bool setQuery(parser::semantic_type const& value) = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
void debug(parser& parser, bool bEnable);
|
||||||
|
|
||||||
|
// this is required only for %skeleton "glr.cc" since it lacks default init
|
||||||
|
// not needed for %skeleton "lalr1.cc" or without %skeleton
|
||||||
|
#if YYDEBUG
|
||||||
|
class location_type1: public location {};
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#line 377 "parser.hh" // glr.cc:329
|
||||||
|
|
||||||
|
|
||||||
|
#endif // !YY_YY_PARSER_HH_INCLUDED
|
|
@ -0,0 +1,169 @@
|
||||||
|
// A Bison parser, made by GNU Bison 3.0.4.
|
||||||
|
|
||||||
|
// Positions for Bison parsers in C++
|
||||||
|
|
||||||
|
// Copyright (C) 2002-2015 Free Software Foundation, Inc.
|
||||||
|
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// As a special exception, you may create a larger work that contains
|
||||||
|
// part or all of the Bison parser skeleton and distribute that work
|
||||||
|
// under terms of your choice, so long as that work isn't itself a
|
||||||
|
// parser generator using the skeleton or a modified version thereof
|
||||||
|
// as a parser skeleton. Alternatively, if you modify or redistribute
|
||||||
|
// the parser skeleton itself, you may (at your option) remove this
|
||||||
|
// special exception, which will cause the skeleton and the resulting
|
||||||
|
// Bison output files to be licensed under the GNU General Public
|
||||||
|
// License without this special exception.
|
||||||
|
|
||||||
|
// This special exception was added by the Free Software Foundation in
|
||||||
|
// version 2.2 of Bison.
|
||||||
|
|
||||||
|
/**
|
||||||
|
** \file position.hh
|
||||||
|
** Define the iresearch::iql::position class.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef YY_YY_POSITION_HH_INCLUDED
|
||||||
|
# define YY_YY_POSITION_HH_INCLUDED
|
||||||
|
|
||||||
|
# include <algorithm> // std::max
|
||||||
|
# include <iostream>
|
||||||
|
# include <string>
|
||||||
|
|
||||||
|
# ifndef YY_NULLPTR
|
||||||
|
# if defined __cplusplus && 201103L <= __cplusplus
|
||||||
|
# define YY_NULLPTR nullptr
|
||||||
|
# else
|
||||||
|
# define YY_NULLPTR 0
|
||||||
|
# endif
|
||||||
|
# endif
|
||||||
|
|
||||||
|
#line 31 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // location.cc:296
|
||||||
|
namespace iresearch { namespace iql {
|
||||||
|
#line 56 "position.hh" // location.cc:296
|
||||||
|
/// Abstract a position.
|
||||||
|
class position
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
/// Initialization.
|
||||||
|
void initialize (std::string* fn = YY_NULLPTR,
|
||||||
|
unsigned int l = 1u,
|
||||||
|
unsigned int c = 1u)
|
||||||
|
{
|
||||||
|
filename = fn;
|
||||||
|
line = l;
|
||||||
|
column = c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \name Line and Column related manipulators
|
||||||
|
** \{ */
|
||||||
|
/// (line related) Advance to the COUNT next lines.
|
||||||
|
void lines (int count = 1)
|
||||||
|
{
|
||||||
|
if (count)
|
||||||
|
{
|
||||||
|
column = 1u;
|
||||||
|
line = add_ (line, count, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// (column related) Advance to the COUNT next columns.
|
||||||
|
void columns (int count = 1)
|
||||||
|
{
|
||||||
|
column = add_ (column, count, 1);
|
||||||
|
}
|
||||||
|
/** \} */
|
||||||
|
|
||||||
|
/// File name to which this position refers.
|
||||||
|
std::string* filename;
|
||||||
|
/// Current line number.
|
||||||
|
unsigned int line;
|
||||||
|
/// Current column number.
|
||||||
|
unsigned int column;
|
||||||
|
|
||||||
|
private:
|
||||||
|
/// Compute max(min, lhs+rhs) (provided min <= lhs).
|
||||||
|
static unsigned int add_ (unsigned int lhs, int rhs, unsigned int min)
|
||||||
|
{
|
||||||
|
return (0 < rhs || -static_cast<unsigned int>(rhs) < lhs
|
||||||
|
? rhs + lhs
|
||||||
|
: min);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Add \a width columns, in place.
|
||||||
|
inline position&
|
||||||
|
operator+= (position& res, int width)
|
||||||
|
{
|
||||||
|
res.columns (width);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add \a width columns.
|
||||||
|
inline position
|
||||||
|
operator+ (position res, int width)
|
||||||
|
{
|
||||||
|
return res += width;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Subtract \a width columns, in place.
|
||||||
|
inline position&
|
||||||
|
operator-= (position& res, int width)
|
||||||
|
{
|
||||||
|
return res += -width;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Subtract \a width columns.
|
||||||
|
inline position
|
||||||
|
operator- (position res, int width)
|
||||||
|
{
|
||||||
|
return res -= width;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compare two position objects.
|
||||||
|
inline bool
|
||||||
|
operator== (const position& pos1, const position& pos2)
|
||||||
|
{
|
||||||
|
return (pos1.line == pos2.line
|
||||||
|
&& pos1.column == pos2.column
|
||||||
|
&& (pos1.filename == pos2.filename
|
||||||
|
|| (pos1.filename && pos2.filename
|
||||||
|
&& *pos1.filename == *pos2.filename)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compare two position objects.
|
||||||
|
inline bool
|
||||||
|
operator!= (const position& pos1, const position& pos2)
|
||||||
|
{
|
||||||
|
return !(pos1 == pos2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \brief Intercept output stream redirection.
|
||||||
|
** \param ostr the destination output stream
|
||||||
|
** \param pos a reference to the position to redirect
|
||||||
|
*/
|
||||||
|
template <typename YYChar>
|
||||||
|
inline std::basic_ostream<YYChar>&
|
||||||
|
operator<< (std::basic_ostream<YYChar>& ostr, const position& pos)
|
||||||
|
{
|
||||||
|
if (pos.filename)
|
||||||
|
ostr << *pos.filename << ':';
|
||||||
|
return ostr << pos.line << '.' << pos.column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#line 31 "/home/user/git-root/arangodb-iresearch/3rdParty/iresearch/core/iql/parser.yy" // location.cc:296
|
||||||
|
} } // iresearch::iql
|
||||||
|
#line 169 "position.hh" // location.cc:296
|
||||||
|
#endif // !YY_YY_POSITION_HH_INCLUDED
|
|
@ -0,0 +1,209 @@
|
||||||
|
/* /home/user/git-root/arangodb-iresearch/build/qtcreator/Debug/3rdParty/iresearch/external/snowball/libstemmer/modules.h: List of stemming modules.
|
||||||
|
*
|
||||||
|
* This file is generated by mkmodules.pl from a list of module names.
|
||||||
|
* Do not edit manually.
|
||||||
|
*
|
||||||
|
* Modules included by this file are: arabic, danish, dutch, english, finnish,
|
||||||
|
* french, german, hungarian, irish, italian, norwegian, porter, portuguese,
|
||||||
|
* romanian, russian, spanish, swedish, tamil, turkish
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "../libstemmer/stem_UTF_8_arabic.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_danish.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_danish.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_dutch.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_dutch.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_english.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_english.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_finnish.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_finnish.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_french.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_french.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_german.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_german.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_2_hungarian.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_hungarian.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_irish.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_irish.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_italian.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_italian.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_norwegian.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_norwegian.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_porter.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_porter.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_portuguese.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_portuguese.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_2_romanian.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_romanian.h"
|
||||||
|
#include "../libstemmer/stem_KOI8_R_russian.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_russian.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_spanish.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_spanish.h"
|
||||||
|
#include "../libstemmer/stem_ISO_8859_1_swedish.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_swedish.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_tamil.h"
|
||||||
|
#include "../libstemmer/stem_UTF_8_turkish.h"
|
||||||
|
|
||||||
|
typedef enum {
|
||||||
|
ENC_UNKNOWN=0,
|
||||||
|
ENC_ISO_8859_1,
|
||||||
|
ENC_ISO_8859_2,
|
||||||
|
ENC_KOI8_R,
|
||||||
|
ENC_UTF_8
|
||||||
|
} stemmer_encoding_t;
|
||||||
|
|
||||||
|
struct stemmer_encoding {
|
||||||
|
const char * name;
|
||||||
|
stemmer_encoding_t enc;
|
||||||
|
};
|
||||||
|
static struct stemmer_encoding encodings[] = {
|
||||||
|
{"ISO_8859_1", ENC_ISO_8859_1},
|
||||||
|
{"ISO_8859_2", ENC_ISO_8859_2},
|
||||||
|
{"KOI8_R", ENC_KOI8_R},
|
||||||
|
{"UTF_8", ENC_UTF_8},
|
||||||
|
{0,ENC_UNKNOWN}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct stemmer_modules {
|
||||||
|
const char * name;
|
||||||
|
stemmer_encoding_t enc;
|
||||||
|
struct SN_env * (*create)(void);
|
||||||
|
void (*close)(struct SN_env *);
|
||||||
|
int (*stem)(struct SN_env *);
|
||||||
|
};
|
||||||
|
static struct stemmer_modules modules[] = {
|
||||||
|
{"ar", ENC_UTF_8, arabic_UTF_8_create_env, arabic_UTF_8_close_env, arabic_UTF_8_stem},
|
||||||
|
{"ara", ENC_UTF_8, arabic_UTF_8_create_env, arabic_UTF_8_close_env, arabic_UTF_8_stem},
|
||||||
|
{"arabic", ENC_UTF_8, arabic_UTF_8_create_env, arabic_UTF_8_close_env, arabic_UTF_8_stem},
|
||||||
|
{"da", ENC_ISO_8859_1, danish_ISO_8859_1_create_env, danish_ISO_8859_1_close_env, danish_ISO_8859_1_stem},
|
||||||
|
{"da", ENC_UTF_8, danish_UTF_8_create_env, danish_UTF_8_close_env, danish_UTF_8_stem},
|
||||||
|
{"dan", ENC_ISO_8859_1, danish_ISO_8859_1_create_env, danish_ISO_8859_1_close_env, danish_ISO_8859_1_stem},
|
||||||
|
{"dan", ENC_UTF_8, danish_UTF_8_create_env, danish_UTF_8_close_env, danish_UTF_8_stem},
|
||||||
|
{"danish", ENC_ISO_8859_1, danish_ISO_8859_1_create_env, danish_ISO_8859_1_close_env, danish_ISO_8859_1_stem},
|
||||||
|
{"danish", ENC_UTF_8, danish_UTF_8_create_env, danish_UTF_8_close_env, danish_UTF_8_stem},
|
||||||
|
{"de", ENC_ISO_8859_1, german_ISO_8859_1_create_env, german_ISO_8859_1_close_env, german_ISO_8859_1_stem},
|
||||||
|
{"de", ENC_UTF_8, german_UTF_8_create_env, german_UTF_8_close_env, german_UTF_8_stem},
|
||||||
|
{"deu", ENC_ISO_8859_1, german_ISO_8859_1_create_env, german_ISO_8859_1_close_env, german_ISO_8859_1_stem},
|
||||||
|
{"deu", ENC_UTF_8, german_UTF_8_create_env, german_UTF_8_close_env, german_UTF_8_stem},
|
||||||
|
{"dut", ENC_ISO_8859_1, dutch_ISO_8859_1_create_env, dutch_ISO_8859_1_close_env, dutch_ISO_8859_1_stem},
|
||||||
|
{"dut", ENC_UTF_8, dutch_UTF_8_create_env, dutch_UTF_8_close_env, dutch_UTF_8_stem},
|
||||||
|
{"dutch", ENC_ISO_8859_1, dutch_ISO_8859_1_create_env, dutch_ISO_8859_1_close_env, dutch_ISO_8859_1_stem},
|
||||||
|
{"dutch", ENC_UTF_8, dutch_UTF_8_create_env, dutch_UTF_8_close_env, dutch_UTF_8_stem},
|
||||||
|
{"en", ENC_ISO_8859_1, english_ISO_8859_1_create_env, english_ISO_8859_1_close_env, english_ISO_8859_1_stem},
|
||||||
|
{"en", ENC_UTF_8, english_UTF_8_create_env, english_UTF_8_close_env, english_UTF_8_stem},
|
||||||
|
{"eng", ENC_ISO_8859_1, english_ISO_8859_1_create_env, english_ISO_8859_1_close_env, english_ISO_8859_1_stem},
|
||||||
|
{"eng", ENC_UTF_8, english_UTF_8_create_env, english_UTF_8_close_env, english_UTF_8_stem},
|
||||||
|
{"english", ENC_ISO_8859_1, english_ISO_8859_1_create_env, english_ISO_8859_1_close_env, english_ISO_8859_1_stem},
|
||||||
|
{"english", ENC_UTF_8, english_UTF_8_create_env, english_UTF_8_close_env, english_UTF_8_stem},
|
||||||
|
{"es", ENC_ISO_8859_1, spanish_ISO_8859_1_create_env, spanish_ISO_8859_1_close_env, spanish_ISO_8859_1_stem},
|
||||||
|
{"es", ENC_UTF_8, spanish_UTF_8_create_env, spanish_UTF_8_close_env, spanish_UTF_8_stem},
|
||||||
|
{"esl", ENC_ISO_8859_1, spanish_ISO_8859_1_create_env, spanish_ISO_8859_1_close_env, spanish_ISO_8859_1_stem},
|
||||||
|
{"esl", ENC_UTF_8, spanish_UTF_8_create_env, spanish_UTF_8_close_env, spanish_UTF_8_stem},
|
||||||
|
{"fi", ENC_ISO_8859_1, finnish_ISO_8859_1_create_env, finnish_ISO_8859_1_close_env, finnish_ISO_8859_1_stem},
|
||||||
|
{"fi", ENC_UTF_8, finnish_UTF_8_create_env, finnish_UTF_8_close_env, finnish_UTF_8_stem},
|
||||||
|
{"fin", ENC_ISO_8859_1, finnish_ISO_8859_1_create_env, finnish_ISO_8859_1_close_env, finnish_ISO_8859_1_stem},
|
||||||
|
{"fin", ENC_UTF_8, finnish_UTF_8_create_env, finnish_UTF_8_close_env, finnish_UTF_8_stem},
|
||||||
|
{"finnish", ENC_ISO_8859_1, finnish_ISO_8859_1_create_env, finnish_ISO_8859_1_close_env, finnish_ISO_8859_1_stem},
|
||||||
|
{"finnish", ENC_UTF_8, finnish_UTF_8_create_env, finnish_UTF_8_close_env, finnish_UTF_8_stem},
|
||||||
|
{"fr", ENC_ISO_8859_1, french_ISO_8859_1_create_env, french_ISO_8859_1_close_env, french_ISO_8859_1_stem},
|
||||||
|
{"fr", ENC_UTF_8, french_UTF_8_create_env, french_UTF_8_close_env, french_UTF_8_stem},
|
||||||
|
{"fra", ENC_ISO_8859_1, french_ISO_8859_1_create_env, french_ISO_8859_1_close_env, french_ISO_8859_1_stem},
|
||||||
|
{"fra", ENC_UTF_8, french_UTF_8_create_env, french_UTF_8_close_env, french_UTF_8_stem},
|
||||||
|
{"fre", ENC_ISO_8859_1, french_ISO_8859_1_create_env, french_ISO_8859_1_close_env, french_ISO_8859_1_stem},
|
||||||
|
{"fre", ENC_UTF_8, french_UTF_8_create_env, french_UTF_8_close_env, french_UTF_8_stem},
|
||||||
|
{"french", ENC_ISO_8859_1, french_ISO_8859_1_create_env, french_ISO_8859_1_close_env, french_ISO_8859_1_stem},
|
||||||
|
{"french", ENC_UTF_8, french_UTF_8_create_env, french_UTF_8_close_env, french_UTF_8_stem},
|
||||||
|
{"ga", ENC_ISO_8859_1, irish_ISO_8859_1_create_env, irish_ISO_8859_1_close_env, irish_ISO_8859_1_stem},
|
||||||
|
{"ga", ENC_UTF_8, irish_UTF_8_create_env, irish_UTF_8_close_env, irish_UTF_8_stem},
|
||||||
|
{"ger", ENC_ISO_8859_1, german_ISO_8859_1_create_env, german_ISO_8859_1_close_env, german_ISO_8859_1_stem},
|
||||||
|
{"ger", ENC_UTF_8, german_UTF_8_create_env, german_UTF_8_close_env, german_UTF_8_stem},
|
||||||
|
{"german", ENC_ISO_8859_1, german_ISO_8859_1_create_env, german_ISO_8859_1_close_env, german_ISO_8859_1_stem},
|
||||||
|
{"german", ENC_UTF_8, german_UTF_8_create_env, german_UTF_8_close_env, german_UTF_8_stem},
|
||||||
|
{"gle", ENC_ISO_8859_1, irish_ISO_8859_1_create_env, irish_ISO_8859_1_close_env, irish_ISO_8859_1_stem},
|
||||||
|
{"gle", ENC_UTF_8, irish_UTF_8_create_env, irish_UTF_8_close_env, irish_UTF_8_stem},
|
||||||
|
{"hu", ENC_ISO_8859_2, hungarian_ISO_8859_2_create_env, hungarian_ISO_8859_2_close_env, hungarian_ISO_8859_2_stem},
|
||||||
|
{"hu", ENC_UTF_8, hungarian_UTF_8_create_env, hungarian_UTF_8_close_env, hungarian_UTF_8_stem},
|
||||||
|
{"hun", ENC_ISO_8859_2, hungarian_ISO_8859_2_create_env, hungarian_ISO_8859_2_close_env, hungarian_ISO_8859_2_stem},
|
||||||
|
{"hun", ENC_UTF_8, hungarian_UTF_8_create_env, hungarian_UTF_8_close_env, hungarian_UTF_8_stem},
|
||||||
|
{"hungarian", ENC_ISO_8859_2, hungarian_ISO_8859_2_create_env, hungarian_ISO_8859_2_close_env, hungarian_ISO_8859_2_stem},
|
||||||
|
{"hungarian", ENC_UTF_8, hungarian_UTF_8_create_env, hungarian_UTF_8_close_env, hungarian_UTF_8_stem},
|
||||||
|
{"irish", ENC_ISO_8859_1, irish_ISO_8859_1_create_env, irish_ISO_8859_1_close_env, irish_ISO_8859_1_stem},
|
||||||
|
{"irish", ENC_UTF_8, irish_UTF_8_create_env, irish_UTF_8_close_env, irish_UTF_8_stem},
|
||||||
|
{"it", ENC_ISO_8859_1, italian_ISO_8859_1_create_env, italian_ISO_8859_1_close_env, italian_ISO_8859_1_stem},
|
||||||
|
{"it", ENC_UTF_8, italian_UTF_8_create_env, italian_UTF_8_close_env, italian_UTF_8_stem},
|
||||||
|
{"ita", ENC_ISO_8859_1, italian_ISO_8859_1_create_env, italian_ISO_8859_1_close_env, italian_ISO_8859_1_stem},
|
||||||
|
{"ita", ENC_UTF_8, italian_UTF_8_create_env, italian_UTF_8_close_env, italian_UTF_8_stem},
|
||||||
|
{"italian", ENC_ISO_8859_1, italian_ISO_8859_1_create_env, italian_ISO_8859_1_close_env, italian_ISO_8859_1_stem},
|
||||||
|
{"italian", ENC_UTF_8, italian_UTF_8_create_env, italian_UTF_8_close_env, italian_UTF_8_stem},
|
||||||
|
{"nl", ENC_ISO_8859_1, dutch_ISO_8859_1_create_env, dutch_ISO_8859_1_close_env, dutch_ISO_8859_1_stem},
|
||||||
|
{"nl", ENC_UTF_8, dutch_UTF_8_create_env, dutch_UTF_8_close_env, dutch_UTF_8_stem},
|
||||||
|
{"nld", ENC_ISO_8859_1, dutch_ISO_8859_1_create_env, dutch_ISO_8859_1_close_env, dutch_ISO_8859_1_stem},
|
||||||
|
{"nld", ENC_UTF_8, dutch_UTF_8_create_env, dutch_UTF_8_close_env, dutch_UTF_8_stem},
|
||||||
|
{"no", ENC_ISO_8859_1, norwegian_ISO_8859_1_create_env, norwegian_ISO_8859_1_close_env, norwegian_ISO_8859_1_stem},
|
||||||
|
{"no", ENC_UTF_8, norwegian_UTF_8_create_env, norwegian_UTF_8_close_env, norwegian_UTF_8_stem},
|
||||||
|
{"nor", ENC_ISO_8859_1, norwegian_ISO_8859_1_create_env, norwegian_ISO_8859_1_close_env, norwegian_ISO_8859_1_stem},
|
||||||
|
{"nor", ENC_UTF_8, norwegian_UTF_8_create_env, norwegian_UTF_8_close_env, norwegian_UTF_8_stem},
|
||||||
|
{"norwegian", ENC_ISO_8859_1, norwegian_ISO_8859_1_create_env, norwegian_ISO_8859_1_close_env, norwegian_ISO_8859_1_stem},
|
||||||
|
{"norwegian", ENC_UTF_8, norwegian_UTF_8_create_env, norwegian_UTF_8_close_env, norwegian_UTF_8_stem},
|
||||||
|
{"por", ENC_ISO_8859_1, portuguese_ISO_8859_1_create_env, portuguese_ISO_8859_1_close_env, portuguese_ISO_8859_1_stem},
|
||||||
|
{"por", ENC_UTF_8, portuguese_UTF_8_create_env, portuguese_UTF_8_close_env, portuguese_UTF_8_stem},
|
||||||
|
{"porter", ENC_ISO_8859_1, porter_ISO_8859_1_create_env, porter_ISO_8859_1_close_env, porter_ISO_8859_1_stem},
|
||||||
|
{"porter", ENC_UTF_8, porter_UTF_8_create_env, porter_UTF_8_close_env, porter_UTF_8_stem},
|
||||||
|
{"portuguese", ENC_ISO_8859_1, portuguese_ISO_8859_1_create_env, portuguese_ISO_8859_1_close_env, portuguese_ISO_8859_1_stem},
|
||||||
|
{"portuguese", ENC_UTF_8, portuguese_UTF_8_create_env, portuguese_UTF_8_close_env, portuguese_UTF_8_stem},
|
||||||
|
{"pt", ENC_ISO_8859_1, portuguese_ISO_8859_1_create_env, portuguese_ISO_8859_1_close_env, portuguese_ISO_8859_1_stem},
|
||||||
|
{"pt", ENC_UTF_8, portuguese_UTF_8_create_env, portuguese_UTF_8_close_env, portuguese_UTF_8_stem},
|
||||||
|
{"ro", ENC_ISO_8859_2, romanian_ISO_8859_2_create_env, romanian_ISO_8859_2_close_env, romanian_ISO_8859_2_stem},
|
||||||
|
{"ro", ENC_UTF_8, romanian_UTF_8_create_env, romanian_UTF_8_close_env, romanian_UTF_8_stem},
|
||||||
|
{"romanian", ENC_ISO_8859_2, romanian_ISO_8859_2_create_env, romanian_ISO_8859_2_close_env, romanian_ISO_8859_2_stem},
|
||||||
|
{"romanian", ENC_UTF_8, romanian_UTF_8_create_env, romanian_UTF_8_close_env, romanian_UTF_8_stem},
|
||||||
|
{"ron", ENC_ISO_8859_2, romanian_ISO_8859_2_create_env, romanian_ISO_8859_2_close_env, romanian_ISO_8859_2_stem},
|
||||||
|
{"ron", ENC_UTF_8, romanian_UTF_8_create_env, romanian_UTF_8_close_env, romanian_UTF_8_stem},
|
||||||
|
{"ru", ENC_KOI8_R, russian_KOI8_R_create_env, russian_KOI8_R_close_env, russian_KOI8_R_stem},
|
||||||
|
{"ru", ENC_UTF_8, russian_UTF_8_create_env, russian_UTF_8_close_env, russian_UTF_8_stem},
|
||||||
|
{"rum", ENC_ISO_8859_2, romanian_ISO_8859_2_create_env, romanian_ISO_8859_2_close_env, romanian_ISO_8859_2_stem},
|
||||||
|
{"rum", ENC_UTF_8, romanian_UTF_8_create_env, romanian_UTF_8_close_env, romanian_UTF_8_stem},
|
||||||
|
{"rus", ENC_KOI8_R, russian_KOI8_R_create_env, russian_KOI8_R_close_env, russian_KOI8_R_stem},
|
||||||
|
{"rus", ENC_UTF_8, russian_UTF_8_create_env, russian_UTF_8_close_env, russian_UTF_8_stem},
|
||||||
|
{"russian", ENC_KOI8_R, russian_KOI8_R_create_env, russian_KOI8_R_close_env, russian_KOI8_R_stem},
|
||||||
|
{"russian", ENC_UTF_8, russian_UTF_8_create_env, russian_UTF_8_close_env, russian_UTF_8_stem},
|
||||||
|
{"spa", ENC_ISO_8859_1, spanish_ISO_8859_1_create_env, spanish_ISO_8859_1_close_env, spanish_ISO_8859_1_stem},
|
||||||
|
{"spa", ENC_UTF_8, spanish_UTF_8_create_env, spanish_UTF_8_close_env, spanish_UTF_8_stem},
|
||||||
|
{"spanish", ENC_ISO_8859_1, spanish_ISO_8859_1_create_env, spanish_ISO_8859_1_close_env, spanish_ISO_8859_1_stem},
|
||||||
|
{"spanish", ENC_UTF_8, spanish_UTF_8_create_env, spanish_UTF_8_close_env, spanish_UTF_8_stem},
|
||||||
|
{"sv", ENC_ISO_8859_1, swedish_ISO_8859_1_create_env, swedish_ISO_8859_1_close_env, swedish_ISO_8859_1_stem},
|
||||||
|
{"sv", ENC_UTF_8, swedish_UTF_8_create_env, swedish_UTF_8_close_env, swedish_UTF_8_stem},
|
||||||
|
{"swe", ENC_ISO_8859_1, swedish_ISO_8859_1_create_env, swedish_ISO_8859_1_close_env, swedish_ISO_8859_1_stem},
|
||||||
|
{"swe", ENC_UTF_8, swedish_UTF_8_create_env, swedish_UTF_8_close_env, swedish_UTF_8_stem},
|
||||||
|
{"swedish", ENC_ISO_8859_1, swedish_ISO_8859_1_create_env, swedish_ISO_8859_1_close_env, swedish_ISO_8859_1_stem},
|
||||||
|
{"swedish", ENC_UTF_8, swedish_UTF_8_create_env, swedish_UTF_8_close_env, swedish_UTF_8_stem},
|
||||||
|
{"ta", ENC_UTF_8, tamil_UTF_8_create_env, tamil_UTF_8_close_env, tamil_UTF_8_stem},
|
||||||
|
{"tam", ENC_UTF_8, tamil_UTF_8_create_env, tamil_UTF_8_close_env, tamil_UTF_8_stem},
|
||||||
|
{"tamil", ENC_UTF_8, tamil_UTF_8_create_env, tamil_UTF_8_close_env, tamil_UTF_8_stem},
|
||||||
|
{"tr", ENC_UTF_8, turkish_UTF_8_create_env, turkish_UTF_8_close_env, turkish_UTF_8_stem},
|
||||||
|
{"tur", ENC_UTF_8, turkish_UTF_8_create_env, turkish_UTF_8_close_env, turkish_UTF_8_stem},
|
||||||
|
{"turkish", ENC_UTF_8, turkish_UTF_8_create_env, turkish_UTF_8_close_env, turkish_UTF_8_stem},
|
||||||
|
{0,ENC_UNKNOWN,0,0,0}
|
||||||
|
};
|
||||||
|
static const char * algorithm_names[] = {
|
||||||
|
"arabic",
|
||||||
|
"danish",
|
||||||
|
"dutch",
|
||||||
|
"english",
|
||||||
|
"finnish",
|
||||||
|
"french",
|
||||||
|
"german",
|
||||||
|
"hungarian",
|
||||||
|
"irish",
|
||||||
|
"italian",
|
||||||
|
"norwegian",
|
||||||
|
"porter",
|
||||||
|
"portuguese",
|
||||||
|
"romanian",
|
||||||
|
"russian",
|
||||||
|
"spanish",
|
||||||
|
"swedish",
|
||||||
|
"tamil",
|
||||||
|
"turkish",
|
||||||
|
0
|
||||||
|
};
|
|
@ -293,20 +293,24 @@ if(MSVC)
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (USE_IQL)
|
add_custom_command(
|
||||||
add_custom_command(
|
|
||||||
OUTPUT iql/parser.cc
|
OUTPUT iql/parser.cc
|
||||||
MAIN_DEPENDENCY ${CMAKE_CURRENT_SOURCE_DIR}/iql/parser.yy
|
MAIN_DEPENDENCY ${CMAKE_CURRENT_SOURCE_DIR}/iql/parser.yy
|
||||||
DEPENDS iql ${CMAKE_CURRENT_SOURCE_DIR}/iql/parser.yy
|
DEPENDS iql ${CMAKE_CURRENT_SOURCE_DIR}/iql/parser.yy touch_iql_parser.cc
|
||||||
COMMAND bison --graph --report=all -o parser.cc ${CMAKE_CURRENT_SOURCE_DIR}/iql/parser.yy
|
COMMAND bison --graph --report=all -o parser.cc ${CMAKE_CURRENT_SOURCE_DIR}/iql/parser.yy
|
||||||
WORKING_DIRECTORY iql
|
WORKING_DIRECTORY iql
|
||||||
)
|
)
|
||||||
|
|
||||||
add_custom_command(
|
add_custom_target(touch_iql_parser.cc
|
||||||
|
DEPENDS iql
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E touch_nocreate parser.cc
|
||||||
|
WORKING_DIRECTORY iql
|
||||||
|
)
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
OUTPUT iql
|
OUTPUT iql
|
||||||
COMMAND ${CMAKE_COMMAND} -E make_directory iql
|
COMMAND ${CMAKE_COMMAND} -E make_directory iql
|
||||||
)
|
)
|
||||||
endif()
|
|
||||||
|
|
||||||
add_custom_command(
|
add_custom_command(
|
||||||
OUTPUT utils
|
OUTPUT utils
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
#ifndef _MSC_VER
|
#ifndef _MSC_VER
|
||||||
#include <execinfo.h> // for backtrace(...)
|
#include <execinfo.h> // for backtrace(...)
|
||||||
|
|
||||||
#ifndef __APPLE__
|
#if !defined(__APPLE__) && defined(__GLIBC__)
|
||||||
#include <malloc.h>
|
#include <malloc.h>
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
@ -38,13 +38,9 @@ void dump_mem_stats_trace() NOEXCEPT {
|
||||||
#ifndef _MSC_VER
|
#ifndef _MSC_VER
|
||||||
|
|
||||||
// MacOS does not have malloc.h and hence no mallinfo() or malloc_stats()
|
// MacOS does not have malloc.h and hence no mallinfo() or malloc_stats()
|
||||||
#ifndef __APPLE__
|
// libmusl does no define mallinfo() or malloc_stats() in malloc.h
|
||||||
|
// enable mallinfo() and malloc_stats() for GLIBC only
|
||||||
// The following macro exists only in GLIBC, where we have mallinfo().
|
#if !defined(__APPLE__) && defined(__GLIBC__)
|
||||||
// If it is not defined, we assume to be on libmusl or something like this
|
|
||||||
// where we do not have mallinfo.
|
|
||||||
|
|
||||||
#ifdef M_ARENA_MAX
|
|
||||||
// ...........................................................................
|
// ...........................................................................
|
||||||
// output mallinfo()
|
// output mallinfo()
|
||||||
// ...........................................................................
|
// ...........................................................................
|
||||||
|
@ -81,7 +77,6 @@ Topmost releasable block (keepcost): %lu\n\
|
||||||
// output malloc_stats()
|
// output malloc_stats()
|
||||||
// ...........................................................................
|
// ...........................................................................
|
||||||
malloc_stats(); // outputs to stderr
|
malloc_stats(); // outputs to stderr
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// ...........................................................................
|
// ...........................................................................
|
||||||
|
|
|
@ -17,11 +17,11 @@ INCLUDE(CheckCCompilerFlag)
|
||||||
INCLUDE(FindPerl)
|
INCLUDE(FindPerl)
|
||||||
|
|
||||||
IF(NOT PERL_FOUND)
|
IF(NOT PERL_FOUND)
|
||||||
MESSAGE(FATAL_ERROR "Perl required to build snowball")
|
MESSAGE(FATAL "Perl required to build snowball")
|
||||||
ENDIF()
|
ENDIF()
|
||||||
|
|
||||||
IF(NOT STEMMER_SOURCE_DIR)
|
IF(NOT STEMMER_SOURCE_DIR)
|
||||||
MESSAGE(FATAL_ERROR "Source directory required to build snowball")
|
MESSAGE(FATAL "Source directory required to build snowball")
|
||||||
ENDIF()
|
ENDIF()
|
||||||
|
|
||||||
SET(CMAKE_CURRENT_SOURCE_DIR "${STEMMER_SOURCE_DIR}")
|
SET(CMAKE_CURRENT_SOURCE_DIR "${STEMMER_SOURCE_DIR}")
|
||||||
|
@ -227,6 +227,7 @@ ADD_CUSTOM_TARGET(modules DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/libstemmer/module
|
||||||
SET(STEMMER_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/libstemmer/libstemmer.c")
|
SET(STEMMER_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/libstemmer/libstemmer.c")
|
||||||
ADD_CUSTOM_TARGET(stemmer_deps ALL)
|
ADD_CUSTOM_TARGET(stemmer_deps ALL)
|
||||||
ADD_DEPENDENCIES(stemmer_deps modules)
|
ADD_DEPENDENCIES(stemmer_deps modules)
|
||||||
|
ADD_DEPENDENCIES(stemmer_deps snowball) # ADD_CUSTOM_COMMAND(... DEPENDS snowball) does not always seem to apply
|
||||||
|
|
||||||
gen_stem("${LIBSTEM_ALGORITHMS}" "UTF_8")
|
gen_stem("${LIBSTEM_ALGORITHMS}" "UTF_8")
|
||||||
gen_stem("${KOI8_ALGORITHMS}" "KOI8_R")
|
gen_stem("${KOI8_ALGORITHMS}" "KOI8_R")
|
||||||
|
|
|
@ -6,7 +6,7 @@ integrate with and natively expose the full power of the
|
||||||
[IResearch library](https://github.com/iresearch-toolkit/iresearch)
|
[IResearch library](https://github.com/iresearch-toolkit/iresearch)
|
||||||
to the ArangoDB user.
|
to the ArangoDB user.
|
||||||
|
|
||||||
It provides the capability to:
|
They provide the capability to:
|
||||||
* evaluate together documents located in different collections
|
* evaluate together documents located in different collections
|
||||||
* filter documents based on AQL boolean expressions and functions
|
* filter documents based on AQL boolean expressions and functions
|
||||||
* sort the resultset based on how closely each document matched the filter
|
* sort the resultset based on how closely each document matched the filter
|
||||||
|
@ -96,11 +96,11 @@ PHRASE(attribute-name,
|
||||||
|
|
||||||
Search for a phrase in the referenced attributes.
|
Search for a phrase in the referenced attributes.
|
||||||
|
|
||||||
The phrase can be expressed as an arbitrary number of *phraseParts* optionally separated by *skipToken* number of tokens.
|
The phrase can be expressed as an arbitrary number of *phraseParts* separated by *skipToken* number of tokens.
|
||||||
|
|
||||||
- *attribute-name* - the path of the attribute to compare against in the document
|
- *attribute-name* - the path of the attribute to compare against in the document
|
||||||
- *phrasePart* - a string to search in the token stream; may consist of several words; will be split using the specified *analyzer*
|
- *phrasePart* - a string to search in the token stream; may consist of several words; will be split using the specified *analyzer*
|
||||||
- *skipTokens* number of words or tokens to treat as wildcard
|
- *skipTokens* number of words or tokens to treat as wildcards
|
||||||
- *analyzer* - string with the analyzer used, i.e. *"text_en"* or [one of the other available string analyzers](../../../Manual/Views/ArangoSearch/Analyzers.html)
|
- *analyzer* - string with the analyzer used, i.e. *"text_en"* or [one of the other available string analyzers](../../../Manual/Views/ArangoSearch/Analyzers.html)
|
||||||
|
|
||||||
### STARTS_WITH()
|
### STARTS_WITH()
|
||||||
|
@ -109,7 +109,7 @@ The phrase can be expressed as an arbitrary number of *phraseParts* optionally s
|
||||||
|
|
||||||
Match the value of the **attribute-name** that starts with **prefix**
|
Match the value of the **attribute-name** that starts with **prefix**
|
||||||
|
|
||||||
- *attribute-name* - the path of the attribute to compare agaainst in the document
|
- *attribute-name* - the path of the attribute to compare against in the document
|
||||||
- *prefix* - a string to search at the start of the text
|
- *prefix* - a string to search at the start of the text
|
||||||
|
|
||||||
### TOKENS()
|
### TOKENS()
|
||||||
|
@ -164,13 +164,13 @@ or
|
||||||
to match documents where 'description' contains a phrase 'quick brown'
|
to match documents where 'description' contains a phrase 'quick brown'
|
||||||
|
|
||||||
FOR doc IN VIEW someView
|
FOR doc IN VIEW someView
|
||||||
FILTER PHRASE(doc.description, 'quick brown', 'text_en')
|
FILTER PHRASE(doc.description, [ 'quick brown' ], 'text_en')
|
||||||
RETURN doc
|
RETURN doc
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
FOR doc IN VIEW someView
|
FOR doc IN VIEW someView
|
||||||
FILTER PHRASE(doc['description'], 'quick brown', 'text_en')
|
FILTER PHRASE(doc['description'], [ 'quick brown' ], 'text_en')
|
||||||
RETURN doc
|
RETURN doc
|
||||||
|
|
||||||
to match documents where 'body' contains the phrase consisting of a sequence
|
to match documents where 'body' contains the phrase consisting of a sequence
|
||||||
|
@ -178,13 +178,13 @@ like this:
|
||||||
'quick' * 'fox jumps' (where the asterisk can be any single word)
|
'quick' * 'fox jumps' (where the asterisk can be any single word)
|
||||||
|
|
||||||
FOR doc IN VIEW someView
|
FOR doc IN VIEW someView
|
||||||
FILTER PHRASE(doc.body, 'quick', 1, 'fox jumps', 'text_en')
|
FILTER PHRASE(doc.body, [ 'quick', 1, 'fox jumps' ], 'text_en')
|
||||||
RETURN doc
|
RETURN doc
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
FOR doc IN VIEW someView
|
FOR doc IN VIEW someView
|
||||||
FILTER PHRASE(doc['body'], 'quick', 1, 'fox jumps', 'text_en')
|
FILTER PHRASE(doc['body'], [ 'quick', 1, 'fox jumps' ], 'text_en')
|
||||||
RETURN doc
|
RETURN doc
|
||||||
|
|
||||||
to match documents where 'story' starts with 'In the beginning'
|
to match documents where 'story' starts with 'In the beginning'
|
||||||
|
|
|
@ -220,3 +220,6 @@ and - (dash) characters only. Please refer to
|
||||||
[NamingConventions](../DataModeling/NamingConventions/CollectionNames.md) for
|
[NamingConventions](../DataModeling/NamingConventions/CollectionNames.md) for
|
||||||
more information on valid view names, which follow the same guidelines as
|
more information on valid view names, which follow the same guidelines as
|
||||||
collection names.
|
collection names.
|
||||||
|
|
||||||
|
### IFF
|
||||||
|
[if and only if](https://en.m.wikipedia.org/wiki/If_and_only_if)
|
||||||
|
|
|
@ -201,24 +201,24 @@ During view modification the following directives apply:
|
||||||
|
|
||||||
* bytes: (optional; for default values use an empty object: `{}`)
|
* bytes: (optional; for default values use an empty object: `{}`)
|
||||||
|
|
||||||
* intervalStep: (optional, default: `10`; to disable use: `0`)
|
* segmentThreshold: (optional, default: `300`; to disable use: `0`)
|
||||||
apply consolidation policy with every Nth commit
|
apply consolidation policy IFF {segmentThreshold} >= #segments
|
||||||
|
|
||||||
* threshold: (optional; default: `0.85`)
|
* threshold: (optional; default: `0.85`)
|
||||||
consolidate `IFF {threshold} > segment_bytes / (all_segment_bytes / #segments)`
|
consolidate `IFF {threshold} > segment_bytes / (all_segment_bytes / #segments)`
|
||||||
|
|
||||||
* bytes_accum: (optional; for default values use: `{}`)
|
* bytes_accum: (optional; for default values use: `{}`)
|
||||||
|
|
||||||
* intervalStep: (optional; default: `10`; to disable use: `0`)
|
* segmentThreshold: (optional; default: `300`; to disable use: `0`)
|
||||||
apply consolidation policy with every Nth commit
|
apply consolidation policy IFF {segmentThreshold} >= #segments
|
||||||
|
|
||||||
* threshold: (optional; default: `0.85`)
|
* threshold: (optional; default: `0.85`)
|
||||||
consolidate `IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes`
|
consolidate `IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes`
|
||||||
|
|
||||||
* count: (optional; for default values use: `{}`)
|
* count: (optional; for default values use: `{}`)
|
||||||
|
|
||||||
* intervalStep: (optional; default: `10`; to disable use: `0`)
|
* segmentThreshold: (optional; default: `300`; to disable use: `0`)
|
||||||
apply consolidation policy with every Nth commit
|
apply consolidation policy IFF {segmentThreshold} >= #segments
|
||||||
|
|
||||||
* threshold: (optional; default: `0.85`)
|
* threshold: (optional; default: `0.85`)
|
||||||
consolidate `IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments)`
|
consolidate `IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments)`
|
||||||
|
@ -226,8 +226,8 @@ During view modification the following directives apply:
|
||||||
* fill: (optional)
|
* fill: (optional)
|
||||||
if specified, use empty object for default values, i.e. `{}`
|
if specified, use empty object for default values, i.e. `{}`
|
||||||
|
|
||||||
* intervalStep: (optional; default: `10`; to disable use: `0`)
|
* segmentThreshold: (optional; default: `300`; to disable use: `0`)
|
||||||
apply consolidation policy with every Nth commit
|
apply consolidation policy IFF {segmentThreshold} >= #segments
|
||||||
|
|
||||||
* threshold: (optional; default: `0.85`)
|
* threshold: (optional; default: `0.85`)
|
||||||
consolidate `IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed})`
|
consolidate `IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed})`
|
||||||
|
|
|
@ -44,8 +44,8 @@ Specify properties for nested fields here
|
||||||
@RESTSTRUCT{bytes,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_bytes}
|
@RESTSTRUCT{bytes,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_bytes}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_patch_api_view_props_consolidation_bytes,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_patch_api_view_props_consolidation_bytes,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_bytes,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_bytes,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (default: 0.85)
|
Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (default: 0.85)
|
||||||
|
@ -53,8 +53,8 @@ Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (d
|
||||||
@RESTSTRUCT{bytes_accum,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_bytes_accum}
|
@RESTSTRUCT{bytes_accum,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_bytes_accum}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_patch_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_patch_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes (default: 0.85)
|
Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes (default: 0.85)
|
||||||
|
@ -62,8 +62,8 @@ Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_by
|
||||||
@RESTSTRUCT{count,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_count}
|
@RESTSTRUCT{count,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_count}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_patch_api_view_props_consolidation_count,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_patch_api_view_props_consolidation_count,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_count,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_count,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments) (default: 0.85)
|
Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments) (default: 0.85)
|
||||||
|
@ -71,8 +71,8 @@ Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #
|
||||||
@RESTSTRUCT{fill,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_fill}
|
@RESTSTRUCT{fill,JSF_patch_api_view_props_consolidation,object,optional,JSF_patch_api_view_props_consolidation_fill}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_patch_api_view_props_consolidation_fill,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_patch_api_view_props_consolidation_fill,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_fill,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_patch_api_view_props_consolidation_fill,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed}) (default: 0.85)
|
Consolidate IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed}) (default: 0.85)
|
||||||
|
|
|
@ -17,8 +17,8 @@ should be a JSON object containing the following attributes:
|
||||||
@RESTSTRUCT{bytes,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes}
|
@RESTSTRUCT{bytes,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (default: 0.85)
|
Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (default: 0.85)
|
||||||
|
@ -26,8 +26,8 @@ Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (d
|
||||||
@RESTSTRUCT{bytes_accum,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes_accum}
|
@RESTSTRUCT{bytes_accum,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes_accum}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes (default: 0.85)
|
Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes (default: 0.85)
|
||||||
|
@ -35,8 +35,8 @@ Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_by
|
||||||
@RESTSTRUCT{count,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_count}
|
@RESTSTRUCT{count,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_count}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments) (default: 0.85)
|
Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments) (default: 0.85)
|
||||||
|
@ -44,8 +44,8 @@ Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #
|
||||||
@RESTSTRUCT{fill,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_fill}
|
@RESTSTRUCT{fill,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_fill}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed}) (default: 0.85)
|
Consolidate IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed}) (default: 0.85)
|
||||||
|
|
|
@ -44,8 +44,8 @@ Specify properties for nested fields here
|
||||||
@RESTSTRUCT{bytes,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes}
|
@RESTSTRUCT{bytes,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (default: 0.85)
|
Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (default: 0.85)
|
||||||
|
@ -53,8 +53,8 @@ Consolidate IFF {threshold} > segment_bytes / (all_segment_bytes / #segments) (d
|
||||||
@RESTSTRUCT{bytes_accum,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes_accum}
|
@RESTSTRUCT{bytes_accum,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_bytes_accum}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_bytes_accum,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes (default: 0.85)
|
Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_bytes) / all_segment_bytes (default: 0.85)
|
||||||
|
@ -62,8 +62,8 @@ Consolidate IFF {threshold} > (segment_bytes + sum_of_merge_candidate_segment_by
|
||||||
@RESTSTRUCT{count,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_count}
|
@RESTSTRUCT{count,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_count}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_count,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments) (default: 0.85)
|
Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #segments) (default: 0.85)
|
||||||
|
@ -71,8 +71,8 @@ Consolidate IFF {threshold} > segment_docs{valid} / (all_segment_docs{valid} / #
|
||||||
@RESTSTRUCT{fill,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_fill}
|
@RESTSTRUCT{fill,JSF_post_api_view_props_consolidation,object,optional,JSF_post_api_view_props_consolidation_fill}
|
||||||
Use empty object for default values, i.e. {}
|
Use empty object for default values, i.e. {}
|
||||||
|
|
||||||
@RESTSTRUCT{intervalStep,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
@RESTSTRUCT{segmentThreshold,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
||||||
Apply consolidation policy with every Nth commit (default: 10, to disable use: 0)
|
Apply consolidation policy IFF {segmentThreshold} >= #segments (default: 300, to disable use: 0)
|
||||||
|
|
||||||
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
@RESTSTRUCT{threshold,JSF_post_api_view_props_consolidation_fill,integer,optional,uint64}
|
||||||
Consolidate IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed}) (default: 0.85)
|
Consolidate IFF {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed}) (default: 0.85)
|
||||||
|
|
|
@ -51,14 +51,16 @@ template<typename T>
|
||||||
class AsyncValue {
|
class AsyncValue {
|
||||||
typedef irs::async_utils::read_write_mutex::read_mutex ReadMutex;
|
typedef irs::async_utils::read_write_mutex::read_mutex ReadMutex;
|
||||||
public:
|
public:
|
||||||
|
AsyncValue(): _readMutex(_mutex) {}
|
||||||
T get() const { return _value.load(); }
|
T get() const { return _value.load(); }
|
||||||
ReadMutex mutex() const { return ReadMutex(_mutex); } // prevent modification
|
ReadMutex& mutex() const { return _readMutex; } // prevent modification
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
mutable irs::async_utils::read_write_mutex _mutex; // read-lock to prevent value modification
|
irs::async_utils::read_write_mutex _mutex; // read-lock to prevent value modification
|
||||||
|
mutable ReadMutex _readMutex; // object that can be referenced by std::unique_lock
|
||||||
std::atomic<T> _value;
|
std::atomic<T> _value;
|
||||||
|
|
||||||
explicit AsyncValue(T value): _value(value) {}
|
explicit AsyncValue(T value): _readMutex(_mutex), _value(value) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -687,7 +687,7 @@ bool fromInArray(
|
||||||
|
|
||||||
bool attributeAccessFound = false;
|
bool attributeAccessFound = false;
|
||||||
for (size_t i = 0; i < n; ++i) {
|
for (size_t i = 0; i < n; ++i) {
|
||||||
attributeAccessFound |= bool(arangodb::iresearch::checkAttributeAccess(
|
attributeAccessFound |= (nullptr != arangodb::iresearch::checkAttributeAccess(
|
||||||
valueNode->getMemberUnchecked(i), *ctx.ref
|
valueNode->getMemberUnchecked(i), *ctx.ref
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -1208,6 +1208,7 @@ bool fromFuncExists(
|
||||||
}
|
}
|
||||||
|
|
||||||
// PHRASE(<attribute>, <value> [, <offset>, <value>, ...], <analyzer>)
|
// PHRASE(<attribute>, <value> [, <offset>, <value>, ...], <analyzer>)
|
||||||
|
// PHRASE(<attribute>, '[' <value> [, <offset>, <value>, ...] ']', <analyzer>)
|
||||||
bool fromFuncPhrase(
|
bool fromFuncPhrase(
|
||||||
irs::boolean_filter* filter,
|
irs::boolean_filter* filter,
|
||||||
arangodb::iresearch::QueryContext const& ctx,
|
arangodb::iresearch::QueryContext const& ctx,
|
||||||
|
@ -1240,7 +1241,10 @@ bool fromFuncPhrase(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ...........................................................................
|
||||||
// 1st argument defines a field
|
// 1st argument defines a field
|
||||||
|
// ...........................................................................
|
||||||
|
|
||||||
auto const* fieldArg = arangodb::iresearch::checkAttributeAccess(
|
auto const* fieldArg = arangodb::iresearch::checkAttributeAccess(
|
||||||
args.getMemberUnchecked(0), *ctx.ref
|
args.getMemberUnchecked(0), *ctx.ref
|
||||||
);
|
);
|
||||||
|
@ -1251,38 +1255,9 @@ bool fromFuncPhrase(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2nd argument defines a value
|
// ...........................................................................
|
||||||
auto const* valueArg = args.getMemberUnchecked(1);
|
// last argument defines the analyzer to use
|
||||||
|
// ...........................................................................
|
||||||
if (!valueArg) {
|
|
||||||
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "'PHRASE' AQL function: 2nd argument is invalid";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
irs::string_ref value;
|
|
||||||
arangodb::iresearch::ScopedAqlValue inputValue(*valueArg);
|
|
||||||
|
|
||||||
if (filter || inputValue.isConstant()) {
|
|
||||||
if (!inputValue.execute(ctx)) {
|
|
||||||
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "'PHRASE' AQL function: Failed to evaluate 2nd argument";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (arangodb::iresearch::SCOPED_VALUE_TYPE_STRING != inputValue.type()) {
|
|
||||||
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "'PHRASE' AQL function: 2nd argument has invalid type '" << inputValue.type()
|
|
||||||
<< "' (string expected)";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!inputValue.getString(value)) {
|
|
||||||
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "'PHRASE' AQL function: Unable to parse 2nd argument as string";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
auto const* analyzerArg = args.getMemberUnchecked(argc - 1);
|
auto const* analyzerArg = args.getMemberUnchecked(argc - 1);
|
||||||
|
|
||||||
|
@ -1336,6 +1311,66 @@ bool fromFuncPhrase(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ...........................................................................
|
||||||
|
// 2nd argument defines a value
|
||||||
|
// ...........................................................................
|
||||||
|
|
||||||
|
auto const* valueArg = args.getMemberUnchecked(1);
|
||||||
|
|
||||||
|
if (!valueArg) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "'PHRASE' AQL function: 2nd argument is invalid";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto* valueArgs = &args;
|
||||||
|
size_t valueArgsBegin = 1;
|
||||||
|
size_t valueArgsEnd = argc - 1;
|
||||||
|
|
||||||
|
if (valueArg->isArray()) {
|
||||||
|
valueArgs = valueArg;
|
||||||
|
valueArgsBegin = 0;
|
||||||
|
valueArgsEnd = valueArg->numMembers();
|
||||||
|
|
||||||
|
if (!(valueArgsEnd & 1)) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "'PHRASE' AQL function: 2nd argument has an invalid number of members (must be an odd number)";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
valueArg = valueArgs->getMemberUnchecked(valueArgsBegin);
|
||||||
|
|
||||||
|
if (!valueArg) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "'PHRASE' AQL function: 2nd argument has an invalid member at offset: " << valueArg;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
irs::string_ref value;
|
||||||
|
arangodb::iresearch::ScopedAqlValue inputValue(*valueArg);
|
||||||
|
|
||||||
|
if (filter || inputValue.isConstant()) {
|
||||||
|
if (!inputValue.execute(ctx)) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "'PHRASE' AQL function: Failed to evaluate 2nd argument";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (arangodb::iresearch::SCOPED_VALUE_TYPE_STRING != inputValue.type()) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "'PHRASE' AQL function: 2nd argument has invalid type '" << inputValue.type()
|
||||||
|
<< "' (string expected)";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!inputValue.getString(value)) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "'PHRASE' AQL function: Unable to parse 2nd argument as string";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
irs::by_phrase* phrase = nullptr;
|
irs::by_phrase* phrase = nullptr;
|
||||||
|
|
||||||
if (filter) {
|
if (filter) {
|
||||||
|
@ -1359,8 +1394,8 @@ bool fromFuncPhrase(
|
||||||
decltype(fieldArg) offsetArg = nullptr;
|
decltype(fieldArg) offsetArg = nullptr;
|
||||||
size_t offset = 0;
|
size_t offset = 0;
|
||||||
|
|
||||||
for (size_t idx = 2, end = argc - 1; idx < end; idx += 2) {
|
for (size_t idx = valueArgsBegin + 1, end = valueArgsEnd; idx < end; idx += 2) {
|
||||||
offsetArg = args.getMemberUnchecked(idx);
|
offsetArg = valueArgs->getMemberUnchecked(idx);
|
||||||
|
|
||||||
if (!offsetArg) {
|
if (!offsetArg) {
|
||||||
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
@ -1368,7 +1403,7 @@ bool fromFuncPhrase(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
valueArg = args.getMemberUnchecked(idx + 1);
|
valueArg = valueArgs->getMemberUnchecked(idx + 1);
|
||||||
|
|
||||||
if (!valueArg) {
|
if (!valueArg) {
|
||||||
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
@ -1659,16 +1694,30 @@ NS_BEGIN(iresearch)
|
||||||
) {
|
) {
|
||||||
auto filter = irs::And::make();
|
auto filter = irs::And::make();
|
||||||
|
|
||||||
|
FilterFactory::filter(static_cast<irs::And&>(*filter), cid, rid);
|
||||||
|
|
||||||
|
return std::move(filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*static*/ irs::filter& FilterFactory::filter(
|
||||||
|
irs::boolean_filter& buf,
|
||||||
|
TRI_voc_cid_t cid,
|
||||||
|
TRI_voc_rid_t rid
|
||||||
|
) {
|
||||||
// filter matching on cid and rid
|
// filter matching on cid and rid
|
||||||
static_cast<irs::And&>(*filter).add<irs::by_term>()
|
auto& filter = buf.add<irs::And>();
|
||||||
|
|
||||||
|
// filter matching on cid
|
||||||
|
filter.add<irs::by_term>()
|
||||||
.field(DocumentPrimaryKey::CID()) // set field
|
.field(DocumentPrimaryKey::CID()) // set field
|
||||||
.term(DocumentPrimaryKey::encode(cid)); // set value
|
.term(DocumentPrimaryKey::encode(cid)); // set value
|
||||||
|
|
||||||
static_cast<irs::And&>(*filter).add<irs::by_term>()
|
// filter matching on rid
|
||||||
|
filter.add<irs::by_term>()
|
||||||
.field(DocumentPrimaryKey::RID()) // set field
|
.field(DocumentPrimaryKey::RID()) // set field
|
||||||
.term(DocumentPrimaryKey::encode(rid)); // set value
|
.term(DocumentPrimaryKey::encode(rid)); // set value
|
||||||
|
|
||||||
return std::move(filter);
|
return filter;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*static*/ bool FilterFactory::filter(
|
/*static*/ bool FilterFactory::filter(
|
||||||
|
|
|
@ -49,6 +49,16 @@ struct FilterFactory {
|
||||||
static irs::filter::ptr filter(TRI_voc_cid_t cid);
|
static irs::filter::ptr filter(TRI_voc_cid_t cid);
|
||||||
static irs::filter::ptr filter(TRI_voc_cid_t cid, TRI_voc_rid_t rid);
|
static irs::filter::ptr filter(TRI_voc_cid_t cid, TRI_voc_rid_t rid);
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief create a filter matching 'cid' + 'rid' pair and append to 'buf'
|
||||||
|
/// @return the appended filter portion
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
static irs::filter& filter(
|
||||||
|
irs::boolean_filter& buf,
|
||||||
|
TRI_voc_cid_t cid,
|
||||||
|
TRI_voc_rid_t rid
|
||||||
|
);
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief determine if the 'node' can be converted into an iresearch filter
|
/// @brief determine if the 'node' can be converted into an iresearch filter
|
||||||
/// if 'filter' != nullptr then also append the iresearch filter there
|
/// if 'filter' != nullptr then also append the iresearch filter there
|
||||||
|
|
|
@ -71,13 +71,29 @@ NS_END
|
||||||
NS_BEGIN(arangodb)
|
NS_BEGIN(arangodb)
|
||||||
NS_BEGIN(iresearch)
|
NS_BEGIN(iresearch)
|
||||||
|
|
||||||
|
IResearchLink::ViewRef::ViewRef(IResearchView::AsyncSelf::ptr const& view) {
|
||||||
|
if (view && view->get()) {
|
||||||
|
_view = view;
|
||||||
|
_lock = std::unique_lock<ReadMutex>(_view->mutex());
|
||||||
|
} else {
|
||||||
|
static const arangodb::iresearch::IResearchView::AsyncSelf::ptr view =
|
||||||
|
irs::memory::make_unique<arangodb::iresearch::IResearchView::AsyncSelf>(nullptr);
|
||||||
|
|
||||||
|
_view = view;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
IResearchView* IResearchLink::ViewRef::get() const noexcept {
|
||||||
|
return _view->get();
|
||||||
|
}
|
||||||
|
|
||||||
IResearchLink::IResearchLink(
|
IResearchLink::IResearchLink(
|
||||||
TRI_idx_iid_t iid,
|
TRI_idx_iid_t iid,
|
||||||
arangodb::LogicalCollection* collection
|
arangodb::LogicalCollection* collection
|
||||||
): _collection(collection),
|
): _collection(collection),
|
||||||
_defaultId(0), // 0 is never a valid id
|
_defaultId(0), // 0 is never a valid id
|
||||||
_id(iid),
|
_id(iid),
|
||||||
_view(NO_VIEW) {
|
_view(nullptr) {
|
||||||
}
|
}
|
||||||
|
|
||||||
IResearchLink::~IResearchLink() {
|
IResearchLink::~IResearchLink() {
|
||||||
|
@ -87,10 +103,7 @@ IResearchLink::~IResearchLink() {
|
||||||
bool IResearchLink::operator==(IResearchView const& view) const noexcept {
|
bool IResearchLink::operator==(IResearchView const& view) const noexcept {
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* thisView = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* thisView = _view->get();
|
|
||||||
|
|
||||||
return thisView && thisView->id() == view.id();
|
return thisView && thisView->id() == view.id();
|
||||||
}
|
}
|
||||||
|
@ -138,10 +151,7 @@ void IResearchLink::batchInsert(
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
queue->setStatus(TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED); // IResearchView required
|
queue->setStatus(TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED); // IResearchView required
|
||||||
|
@ -171,10 +181,7 @@ int IResearchLink::drop() {
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED; // IResearchView required
|
return TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED; // IResearchView required
|
||||||
|
@ -251,9 +258,9 @@ bool IResearchLink::init(arangodb::velocypack::Slice const& definition) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto viewSelf = view->self();
|
ViewRef viewSelf(view->self());
|
||||||
|
|
||||||
if (!viewSelf) {
|
if (!viewSelf.get()) {
|
||||||
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH) << "error getting view: '" << viewId << "' for link '" << _id << "'";
|
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH) << "error getting view: '" << viewId << "' for link '" << _id << "'";
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
@ -298,10 +305,7 @@ Result IResearchLink::insert(
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
return TRI_ERROR_ARANGO_INDEX_HANDLE_BAD; // IResearchView required
|
return TRI_ERROR_ARANGO_INDEX_HANDLE_BAD; // IResearchView required
|
||||||
|
@ -340,10 +344,7 @@ bool IResearchLink::json(
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (view) {
|
if (view) {
|
||||||
builder.add(VIEW_ID_FIELD, VPackValue(view->id()));
|
builder.add(VIEW_ID_FIELD, VPackValue(view->id()));
|
||||||
|
@ -356,17 +357,15 @@ bool IResearchLink::json(
|
||||||
}
|
}
|
||||||
|
|
||||||
void IResearchLink::load() {
|
void IResearchLink::load() {
|
||||||
|
// Note: this function is only used by RocksDB
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IResearchLink::matchesDefinition(VPackSlice const& slice) const {
|
bool IResearchLink::matchesDefinition(VPackSlice const& slice) const {
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
|
||||||
|
|
||||||
if (slice.hasKey(VIEW_ID_FIELD)) {
|
if (slice.hasKey(VIEW_ID_FIELD)) {
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
auto* view = _view.get();
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
return false; // slice has identifier but the current object does not
|
return false; // slice has identifier but the current object does not
|
||||||
|
@ -377,7 +376,7 @@ bool IResearchLink::matchesDefinition(VPackSlice const& slice) const {
|
||||||
if (!identifier.isNumber() || uint64_t(identifier.getInt()) != identifier.getUInt() || identifier.getUInt() != view->id()) {
|
if (!identifier.isNumber() || uint64_t(identifier.getInt()) != identifier.getUInt() || identifier.getUInt() != view->id()) {
|
||||||
return false; // iResearch View names of current object and slice do not match
|
return false; // iResearch View names of current object and slice do not match
|
||||||
}
|
}
|
||||||
} else if (_view->get()) { // do not need to lock since this is a single-call
|
} else if (_view.get()) { // do not need to lock since this is a single-call
|
||||||
return false; // slice has no 'name' but the current object does
|
return false; // slice has no 'name' but the current object does
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,10 +393,7 @@ size_t IResearchLink::memory() const {
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (view) {
|
if (view) {
|
||||||
size_t count = 0;
|
size_t count = 0;
|
||||||
|
@ -432,10 +428,7 @@ Result IResearchLink::remove(
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED; // IResearchView required
|
return TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED; // IResearchView required
|
||||||
|
@ -460,10 +453,7 @@ Result IResearchLink::remove(
|
||||||
|
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED; // IResearchView required
|
return TRI_ERROR_ARANGO_COLLECTION_NOT_LOADED; // IResearchView required
|
||||||
|
@ -496,6 +486,27 @@ Result IResearchLink::remove(
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
arangodb::Result IResearchLink::recover() {
|
||||||
|
if (!_collection) {
|
||||||
|
return {TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND}; // current link isn't associated with the collection
|
||||||
|
}
|
||||||
|
|
||||||
|
auto* view = _view.get();
|
||||||
|
|
||||||
|
if (!view) {
|
||||||
|
return {TRI_ERROR_ARANGO_VIEW_NOT_FOUND}; // slice has identifier but the current object does not
|
||||||
|
}
|
||||||
|
|
||||||
|
arangodb::velocypack::Builder link;
|
||||||
|
|
||||||
|
if (!json(link, false)) {
|
||||||
|
return {TRI_ERROR_INTERNAL};
|
||||||
|
}
|
||||||
|
|
||||||
|
// re-insert link into the view
|
||||||
|
return view->link(_collection->cid(), link.slice());
|
||||||
|
}
|
||||||
|
|
||||||
Index::IndexType IResearchLink::type() const {
|
Index::IndexType IResearchLink::type() const {
|
||||||
// TODO: don't use enum
|
// TODO: don't use enum
|
||||||
return Index::TRI_IDX_TYPE_IRESEARCH_LINK;
|
return Index::TRI_IDX_TYPE_IRESEARCH_LINK;
|
||||||
|
@ -508,14 +519,10 @@ char const* IResearchLink::typeName() const {
|
||||||
int IResearchLink::unload() {
|
int IResearchLink::unload() {
|
||||||
WriteMutex mutex(_mutex); // '_view' can be asynchronously read
|
WriteMutex mutex(_mutex); // '_view' can be asynchronously read
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
auto* view = _view.get();
|
||||||
auto viewCopy = _view; // retain a copy of the pointer for the case where nullyfying the original will call distructor of a locked mutex
|
|
||||||
auto viewMutex = _view->mutex(); // IResearchView can be asynchronously deallocated
|
|
||||||
SCOPED_LOCK(viewMutex);
|
|
||||||
auto* view = _view->get();
|
|
||||||
|
|
||||||
if (!view) {
|
if (!view) {
|
||||||
_view = NO_VIEW; // release reference to the IResearch View
|
_view = ViewRef(nullptr); // release reference to the IResearch View
|
||||||
|
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
}
|
}
|
||||||
|
@ -543,7 +550,7 @@ int IResearchLink::unload() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_view = NO_VIEW; // release reference to the iResearch View
|
_view = ViewRef(nullptr); // release reference to the IResearch View
|
||||||
|
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
}
|
}
|
||||||
|
@ -551,9 +558,8 @@ int IResearchLink::unload() {
|
||||||
const IResearchView* IResearchLink::view() const {
|
const IResearchView* IResearchLink::view() const {
|
||||||
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_view' can be asynchronously modified
|
||||||
SCOPED_LOCK(mutex);
|
SCOPED_LOCK(mutex);
|
||||||
assert(_view); // NO_VIEW used for unasociated links
|
|
||||||
|
|
||||||
return _view->get();
|
return _view.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
int EnhanceJsonIResearchLink(
|
int EnhanceJsonIResearchLink(
|
||||||
|
|
|
@ -159,6 +159,13 @@ class IResearchLink {
|
||||||
TRI_voc_cid_t value
|
TRI_voc_cid_t value
|
||||||
);
|
);
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief recover IResearch Link index in a view by dropping existing and
|
||||||
|
/// creating a new one
|
||||||
|
/// @return success
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
arangodb::Result recover();
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief iResearch Link index type enum value
|
/// @brief iResearch Link index type enum value
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -199,12 +206,22 @@ class IResearchLink {
|
||||||
// the update delta into the WAL marker instead of the full persisted state
|
// the update delta into the WAL marker instead of the full persisted state
|
||||||
friend arangodb::Result IResearchView::updateProperties(arangodb::velocypack::Slice const&, bool, bool);
|
friend arangodb::Result IResearchView::updateProperties(arangodb::velocypack::Slice const&, bool, bool);
|
||||||
|
|
||||||
|
class ViewRef {
|
||||||
|
public:
|
||||||
|
ViewRef(IResearchView::AsyncSelf::ptr const& view);
|
||||||
|
IResearchView* get() const noexcept;
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::unique_lock<irs::async_utils::read_write_mutex::read_mutex> _lock;
|
||||||
|
IResearchView::AsyncSelf::ptr _view;
|
||||||
|
};
|
||||||
|
|
||||||
LogicalCollection* _collection; // the linked collection
|
LogicalCollection* _collection; // the linked collection
|
||||||
TRI_voc_cid_t _defaultId; // the identifier of the desired view (iff _view == nullptr)
|
TRI_voc_cid_t _defaultId; // the identifier of the desired view (iff _view == nullptr)
|
||||||
TRI_idx_iid_t const _id; // the index identifier
|
TRI_idx_iid_t const _id; // the index identifier
|
||||||
IResearchLinkMeta _meta; // how this collection should be indexed
|
IResearchLinkMeta _meta; // how this collection should be indexed
|
||||||
mutable irs::async_utils::read_write_mutex _mutex; // for use with _view to allow asynchronous disassociation
|
mutable irs::async_utils::read_write_mutex _mutex; // for use with _view to allow asynchronous disassociation
|
||||||
IResearchView::AsyncSelf::ptr _view; // effectively the IResearch datastore itself (nullptr == not associated)
|
ViewRef _view; // effectively the IResearch datastore itself (nullptr == not associated)
|
||||||
}; // IResearchLink
|
}; // IResearchLink
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -46,7 +46,7 @@ class IResearchMMFilesLink final
|
||||||
transaction::Methods* trx,
|
transaction::Methods* trx,
|
||||||
std::vector<std::pair<arangodb::LocalDocumentId, arangodb::velocypack::Slice>> const& documents,
|
std::vector<std::pair<arangodb::LocalDocumentId, arangodb::velocypack::Slice>> const& documents,
|
||||||
std::shared_ptr<arangodb::basics::LocalTaskQueue> queue
|
std::shared_ptr<arangodb::basics::LocalTaskQueue> queue
|
||||||
) {
|
) override {
|
||||||
IResearchLink::batchInsert(trx, documents, queue);
|
IResearchLink::batchInsert(trx, documents, queue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ class IResearchRocksDBLink final
|
||||||
transaction::Methods* trx,
|
transaction::Methods* trx,
|
||||||
std::vector<std::pair<arangodb::LocalDocumentId, arangodb::velocypack::Slice>> const& documents,
|
std::vector<std::pair<arangodb::LocalDocumentId, arangodb::velocypack::Slice>> const& documents,
|
||||||
std::shared_ptr<arangodb::basics::LocalTaskQueue> queue
|
std::shared_ptr<arangodb::basics::LocalTaskQueue> queue
|
||||||
) {
|
) override {
|
||||||
IResearchLink::batchInsert(trx, documents, queue);
|
IResearchLink::batchInsert(trx, documents, queue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -316,6 +316,117 @@ inline void insertDocument(
|
||||||
doc.insert(irs::action::store, primaryKey);
|
doc.insert(irs::action::store, primaryKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief syncs an IResearch DataStore if required
|
||||||
|
/// @return a sync was executed
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
bool syncStore(
|
||||||
|
irs::directory& directory,
|
||||||
|
irs::directory_reader& reader,
|
||||||
|
irs::index_writer& writer,
|
||||||
|
std::atomic<size_t>& segmentCount,
|
||||||
|
arangodb::iresearch::IResearchViewMeta::CommitMeta::ConsolidationPolicies const& policies,
|
||||||
|
bool forceCommit,
|
||||||
|
bool runCleanupAfterCommit,
|
||||||
|
std::string const& viewName
|
||||||
|
) {
|
||||||
|
char runId = 0; // value not used
|
||||||
|
|
||||||
|
// ...........................................................................
|
||||||
|
// apply consolidation policies
|
||||||
|
// ...........................................................................
|
||||||
|
|
||||||
|
for (auto& entry: policies) {
|
||||||
|
if (!entry.segmentThreshold()
|
||||||
|
|| entry.segmentThreshold() > segmentCount.load()) {
|
||||||
|
continue; // skip if interval not reached or no valid policy to execute
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "registering consolidation policy '" << size_t(entry.type()) << "' with IResearch view '" << viewName << "' run id '" << size_t(&runId) << " segment threshold '" << entry.segmentThreshold() << "' segment count '" << segmentCount.load() << "'";
|
||||||
|
|
||||||
|
try {
|
||||||
|
writer.consolidate(entry.policy(), false);
|
||||||
|
forceCommit = true; // a consolidation policy was found requiring commit
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "caught exception during registeration of consolidation policy '" << size_t(entry.type()) << "' with IResearch view '" << viewName << "': " << e.what();
|
||||||
|
IR_EXCEPTION();
|
||||||
|
} catch (...) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "caught exception during registeration of consolidation policy '" << size_t(entry.type()) << "' with IResearch view '" << viewName << "'";
|
||||||
|
IR_EXCEPTION();
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "finished registering consolidation policy '" << size_t(entry.type()) << "' with IResearch view '" << viewName << "' run id '" << size_t(&runId) << "'";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!forceCommit) {
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "skipping store sync since no consolidation policies matched and sync not forced for IResearch view '" << viewName << "' run id '" << size_t(&runId) << "'";
|
||||||
|
|
||||||
|
return false; // commit not done
|
||||||
|
}
|
||||||
|
|
||||||
|
// ...........................................................................
|
||||||
|
// apply data store commit
|
||||||
|
// ...........................................................................
|
||||||
|
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "starting store sync for IResearch view '" << viewName << "' run id '" << size_t(&runId) << "' segment count before '" << segmentCount.load() << "'";
|
||||||
|
|
||||||
|
try {
|
||||||
|
segmentCount.store(0); // reset to zero to get count of new segments that appear during commit
|
||||||
|
writer.commit();
|
||||||
|
reader = reader.reopen(); // update reader
|
||||||
|
segmentCount += reader.size(); // add commited segments
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "caught exception during sync of IResearch view '" << viewName << "': " << e.what();
|
||||||
|
IR_EXCEPTION();
|
||||||
|
} catch (...) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "caught exception during sync of IResearch view '" << viewName << "'";
|
||||||
|
IR_EXCEPTION();
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "finished store sync for IResearch view '" << viewName << "' run id '" << size_t(&runId) << "' segment count after '" << segmentCount.load() << "'";
|
||||||
|
|
||||||
|
if (!runCleanupAfterCommit) {
|
||||||
|
return true; // commit done
|
||||||
|
}
|
||||||
|
|
||||||
|
// ...........................................................................
|
||||||
|
// apply cleanup
|
||||||
|
// ...........................................................................
|
||||||
|
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "starting cleanup for IResearch view '" << viewName << "' run id '" << size_t(&runId) << "'";
|
||||||
|
|
||||||
|
try {
|
||||||
|
irs::directory_utils::remove_all_unreferenced(directory);
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "caught exception during cleanup of IResearch view '" << viewName << "': " << e.what();
|
||||||
|
IR_EXCEPTION();
|
||||||
|
} catch (...) {
|
||||||
|
LOG_TOPIC(WARN, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "caught exception during cleanup of IResearch view '" << viewName << "'";
|
||||||
|
IR_EXCEPTION();
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG_TOPIC(DEBUG, arangodb::iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
<< "finished cleanup for IResearch view '" << viewName << "' run id '" << size_t(&runId) << "'";
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief updates the collections in 'vocbase' to match the specified
|
||||||
|
/// IResearchLink definitions
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
arangodb::Result updateLinks(
|
arangodb::Result updateLinks(
|
||||||
std::unordered_set<TRI_voc_cid_t>& modified,
|
std::unordered_set<TRI_voc_cid_t>& modified,
|
||||||
TRI_vocbase_t& vocbase,
|
TRI_vocbase_t& vocbase,
|
||||||
|
@ -679,8 +790,10 @@ IResearchView::DataStore& IResearchView::DataStore::operator=(
|
||||||
|
|
||||||
void IResearchView::DataStore::sync() {
|
void IResearchView::DataStore::sync() {
|
||||||
TRI_ASSERT(_writer && _reader);
|
TRI_ASSERT(_writer && _reader);
|
||||||
|
_segmentCount.store(0); // reset to zero to get count of new segments that appear during commit
|
||||||
_writer->commit();
|
_writer->commit();
|
||||||
_reader = _reader.reopen(); // update reader
|
_reader = _reader.reopen(); // update reader
|
||||||
|
_segmentCount += _reader.size(); // add commited segments
|
||||||
}
|
}
|
||||||
|
|
||||||
IResearchView::MemoryStore::MemoryStore() {
|
IResearchView::MemoryStore::MemoryStore() {
|
||||||
|
@ -694,32 +807,6 @@ IResearchView::MemoryStore::MemoryStore() {
|
||||||
_reader = irs::directory_reader::open(*_directory); // open after 'commit' for valid 'store'
|
_reader = irs::directory_reader::open(*_directory); // open after 'commit' for valid 'store'
|
||||||
}
|
}
|
||||||
|
|
||||||
IResearchView::SyncState::PolicyState::PolicyState(
|
|
||||||
size_t intervalStep,
|
|
||||||
const std::shared_ptr<irs::index_writer::consolidation_policy_t>& policy
|
|
||||||
): _intervalCount(0), _intervalStep(intervalStep), _policy(policy) {
|
|
||||||
}
|
|
||||||
|
|
||||||
IResearchView::SyncState::SyncState() noexcept
|
|
||||||
: _cleanupIntervalCount(0),
|
|
||||||
_cleanupIntervalStep(0) {
|
|
||||||
}
|
|
||||||
|
|
||||||
IResearchView::SyncState::SyncState(
|
|
||||||
IResearchViewMeta::CommitMeta const& meta
|
|
||||||
): SyncState() {
|
|
||||||
_cleanupIntervalStep = meta._cleanupIntervalStep;
|
|
||||||
|
|
||||||
for (auto& entry: meta._consolidationPolicies) {
|
|
||||||
if (entry.policy()) {
|
|
||||||
_consolidationPolicies.emplace_back(
|
|
||||||
entry.intervalStep(),
|
|
||||||
irs::memory::make_unique<irs::index_writer::consolidation_policy_t>(entry.policy())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
IResearchView::TidStore::TidStore(
|
IResearchView::TidStore::TidStore(
|
||||||
transaction::Methods& trx,
|
transaction::Methods& trx,
|
||||||
std::function<void(transaction::Methods*)> const& trxCallback
|
std::function<void(transaction::Methods*)> const& trxCallback
|
||||||
|
@ -838,79 +925,106 @@ IResearchView::IResearchView(
|
||||||
};
|
};
|
||||||
|
|
||||||
// add asynchronous commit job
|
// add asynchronous commit job
|
||||||
_threadPool.run(
|
_threadPool.run([this]()->void {
|
||||||
[this]()->void {
|
struct DataStoreState {
|
||||||
struct State: public SyncState {
|
size_t _cleanupIntervalCount;
|
||||||
size_t _asyncMetaRevision;
|
DataStore& _dataStore;
|
||||||
size_t _commitIntervalMsecRemainder;
|
DataStoreState(DataStore& store)
|
||||||
size_t _commitTimeoutMsec;
|
: _cleanupIntervalCount(0), _dataStore(store) {}
|
||||||
|
|
||||||
State():
|
|
||||||
SyncState(),
|
|
||||||
_asyncMetaRevision(0), // '0' differs from IResearchView constructor above
|
|
||||||
_commitIntervalMsecRemainder(std::numeric_limits<size_t>::max()),
|
|
||||||
_commitTimeoutMsec(0) {
|
|
||||||
}
|
|
||||||
explicit State(IResearchViewMeta::CommitMeta const& meta)
|
|
||||||
: SyncState(meta),
|
|
||||||
_asyncMetaRevision(0), // '0' differs from IResearchView constructor above
|
|
||||||
_commitIntervalMsecRemainder(std::numeric_limits<size_t>::max()),
|
|
||||||
_commitTimeoutMsec(meta._commitTimeoutMsec) {
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
State state;
|
size_t asyncMetaRevision = 0; // '0' differs from IResearchView constructor above
|
||||||
|
size_t cleanupIntervalStep; // will be initialized when states are updated below
|
||||||
|
auto commitIntervalMsecRemainder = std::numeric_limits<size_t>::max(); // longest possible time for std::min(...)
|
||||||
|
size_t commitTimeoutMsec; // will be initialized when states are updated below
|
||||||
|
IResearchViewMeta::CommitMeta::ConsolidationPolicies consolidationPolicies;
|
||||||
|
DataStoreState states[] = {
|
||||||
|
DataStoreState(_memoryNodes[0]._store),
|
||||||
|
DataStoreState(_memoryNodes[1]._store),
|
||||||
|
DataStoreState(_storePersisted)
|
||||||
|
};
|
||||||
ReadMutex mutex(_mutex); // '_meta' can be asynchronously modified
|
ReadMutex mutex(_mutex); // '_meta' can be asynchronously modified
|
||||||
|
|
||||||
for(;;) {
|
for(;;) {
|
||||||
|
bool commitTimeoutReached = false;
|
||||||
|
|
||||||
// sleep until timeout
|
// sleep until timeout
|
||||||
{
|
{
|
||||||
SCOPED_LOCK_NAMED(mutex, lock); // for '_meta._commit._commitIntervalMsec'
|
SCOPED_LOCK_NAMED(mutex, lock); // for '_meta._commit._commitIntervalMsec'
|
||||||
SCOPED_LOCK_NAMED(_asyncMutex, asyncLock); // aquire before '_asyncTerminate' check
|
SCOPED_LOCK_NAMED(_asyncMutex, asyncLock); // aquire before '_asyncTerminate' check
|
||||||
|
|
||||||
if (_asyncTerminate.load()) {
|
if (_asyncTerminate.load()) {
|
||||||
break;
|
return; // termination requested
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_meta._commit._commitIntervalMsec) {
|
if (!_meta._commit._commitIntervalMsec) {
|
||||||
lock.unlock(); // do not hold read lock while waiting on condition
|
lock.unlock(); // do not hold read lock while waiting on condition
|
||||||
_asyncCondition.wait(asyncLock); // wait forever
|
_asyncCondition.wait(asyncLock); // wait forever
|
||||||
continue;
|
} else {
|
||||||
}
|
auto msecRemainder =
|
||||||
|
std::min(commitIntervalMsecRemainder, _meta._commit._commitIntervalMsec);
|
||||||
auto startTime = std::chrono::system_clock::now();
|
auto startTime = std::chrono::system_clock::now();
|
||||||
auto endTime = startTime
|
auto endTime = startTime + std::chrono::milliseconds(msecRemainder);
|
||||||
+ std::chrono::milliseconds(std::min(state._commitIntervalMsecRemainder, _meta._commit._commitIntervalMsec))
|
|
||||||
;
|
|
||||||
|
|
||||||
lock.unlock(); // do not hold read lock while waiting on condition
|
lock.unlock(); // do not hold read lock while waiting on condition
|
||||||
state._commitIntervalMsecRemainder = std::numeric_limits<size_t>::max(); // longest possible time assuming an uninterrupted sleep
|
commitIntervalMsecRemainder = std::numeric_limits<size_t>::max(); // longest possible time assuming an uninterrupted sleep
|
||||||
|
commitTimeoutReached = true;
|
||||||
|
|
||||||
if (std::cv_status::timeout != _asyncCondition.wait_until(asyncLock, endTime)) {
|
if (std::cv_status::timeout != _asyncCondition.wait_until(asyncLock, endTime)) {
|
||||||
auto nowTime = std::chrono::system_clock::now();
|
auto nowTime = std::chrono::system_clock::now();
|
||||||
|
|
||||||
// if still need to sleep more then must relock '_meta' and sleep for min (remainder, interval)
|
// if still need to sleep more then must relock '_meta' and sleep for min (remainder, interval)
|
||||||
if (nowTime < endTime) {
|
if (nowTime < endTime) {
|
||||||
state._commitIntervalMsecRemainder = std::chrono::duration_cast<std::chrono::milliseconds>(endTime - nowTime).count();
|
commitIntervalMsecRemainder = std::chrono::duration_cast<std::chrono::milliseconds>(endTime - nowTime).count();
|
||||||
|
commitTimeoutReached = false;
|
||||||
continue; // need to reaquire lock to chech for change in '_meta'
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_asyncTerminate.load()) {
|
if (_asyncTerminate.load()) {
|
||||||
break;
|
return; // termination requested
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// reload state if required
|
SCOPED_LOCK(mutex); // '_meta'/'_memoryStore'/'_storePersisted' can be asynchronously modified
|
||||||
if (_asyncMetaRevision.load() != state._asyncMetaRevision) {
|
|
||||||
SCOPED_LOCK(mutex);
|
// reload states if required
|
||||||
state = State(_meta._commit);
|
if (_asyncMetaRevision.load() != asyncMetaRevision) {
|
||||||
state._asyncMetaRevision = _asyncMetaRevision.load();
|
asyncMetaRevision = _asyncMetaRevision.load();
|
||||||
|
cleanupIntervalStep = _meta._commit._cleanupIntervalStep;
|
||||||
|
commitTimeoutMsec = _meta._commit._commitTimeoutMsec;
|
||||||
|
consolidationPolicies = _meta._commit._consolidationPolicies; // local copy
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto thresholdSec = TRI_microtime() + commitTimeoutMsec/1000.0;
|
||||||
|
|
||||||
// perform sync
|
// perform sync
|
||||||
sync(state, state._commitTimeoutMsec);
|
for (size_t i = 0, count = IRESEARCH_COUNTOF(states);
|
||||||
|
i < count && TRI_microtime() <= thresholdSec;
|
||||||
|
++i) {
|
||||||
|
auto& state = states[i];
|
||||||
|
auto runCleanupAfterCommit =
|
||||||
|
state._cleanupIntervalCount > cleanupIntervalStep;
|
||||||
|
|
||||||
|
if (state._dataStore._directory
|
||||||
|
&& state._dataStore._writer
|
||||||
|
&& syncStore(*(state._dataStore._directory),
|
||||||
|
state._dataStore._reader,
|
||||||
|
*(state._dataStore._writer),
|
||||||
|
state._dataStore._segmentCount,
|
||||||
|
consolidationPolicies,
|
||||||
|
commitTimeoutReached,
|
||||||
|
runCleanupAfterCommit,
|
||||||
|
name()
|
||||||
|
)) {
|
||||||
|
commitIntervalMsecRemainder = std::numeric_limits<size_t>::max(); // longest possible time for std::min(...)
|
||||||
|
|
||||||
|
if (runCleanupAfterCommit
|
||||||
|
&& ++state._cleanupIntervalCount >= cleanupIntervalStep) {
|
||||||
|
state._cleanupIntervalCount = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -952,55 +1066,6 @@ IResearchView::MemoryStore& IResearchView::activeMemoryStore() const {
|
||||||
return _memoryNode->_store;
|
return _memoryNode->_store;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IResearchView::cleanup(size_t maxMsec /*= 0*/) {
|
|
||||||
ReadMutex mutex(_mutex);
|
|
||||||
auto thresholdSec = TRI_microtime() + maxMsec/1000.0;
|
|
||||||
|
|
||||||
try {
|
|
||||||
SCOPED_LOCK(mutex);
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting active memory-store cleanup for iResearch view '" << id() << "'";
|
|
||||||
irs::directory_utils::remove_all_unreferenced(*(_memoryNode->_store._directory));
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished active memory-store cleanup for iResearch view '" << id() << "'";
|
|
||||||
|
|
||||||
if (maxMsec && TRI_microtime() >= thresholdSec) {
|
|
||||||
return true; // skip if timout exceeded
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting flushing memory-store cleanup for iResearch view '" << id() << "'";
|
|
||||||
irs::directory_utils::remove_all_unreferenced(*(_toFlush->_store._directory));
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished flushing memory-store cleanup for iResearch view '" << id() << "'";
|
|
||||||
|
|
||||||
if (maxMsec && TRI_microtime() >= thresholdSec) {
|
|
||||||
return true; // skip if timout exceeded
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_storePersisted) {
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting persisted-store cleanup for iResearch view '" << id() << "'";
|
|
||||||
irs::directory_utils::remove_all_unreferenced(*(_storePersisted._directory));
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished persisted-store cleanup for iResearch view '" << id() << "'";
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
} catch (std::exception const& e) {
|
|
||||||
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "caught exception during cleanup of iResearch view '" << id() << "': " << e.what();
|
|
||||||
IR_EXCEPTION();
|
|
||||||
} catch (...) {
|
|
||||||
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "caught exception during cleanup of iResearch view '" << id() << "'";
|
|
||||||
IR_EXCEPTION();
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
void IResearchView::drop() {
|
void IResearchView::drop() {
|
||||||
std::unordered_set<TRI_voc_cid_t> collections;
|
std::unordered_set<TRI_voc_cid_t> collections;
|
||||||
|
|
||||||
|
@ -1173,6 +1238,8 @@ int IResearchView::finish(TRI_voc_tid_t tid, bool commit) {
|
||||||
|
|
||||||
trxStore._writer->commit(); // ensure have latest view in reader
|
trxStore._writer->commit(); // ensure have latest view in reader
|
||||||
memoryStore._writer->import(trxStore._reader.reopen());
|
memoryStore._writer->import(trxStore._reader.reopen());
|
||||||
|
++memoryStore._segmentCount; // a new segment was imported
|
||||||
|
_asyncCondition.notify_all(); // trigger recheck of sync
|
||||||
|
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
} catch (std::exception const& e) {
|
} catch (std::exception const& e) {
|
||||||
|
@ -1214,12 +1281,18 @@ arangodb::Result IResearchView::commit() {
|
||||||
}
|
}
|
||||||
|
|
||||||
SCOPED_LOCK(_toFlush->_reopenMutex); // do not allow concurrent reopen
|
SCOPED_LOCK(_toFlush->_reopenMutex); // do not allow concurrent reopen
|
||||||
|
_storePersisted._segmentCount.store(0); // reset to zero to get count of new segments that appear during commit
|
||||||
_storePersisted._writer->commit(); // finishing flush transaction
|
_storePersisted._writer->commit(); // finishing flush transaction
|
||||||
|
memoryStore._segmentCount.store(0); // reset to zero to get count of new segments that appear during commit
|
||||||
memoryStore._writer->clear(); // prepare the store for reuse
|
memoryStore._writer->clear(); // prepare the store for reuse
|
||||||
|
|
||||||
SCOPED_LOCK(_toFlush->_readMutex); // do not allow concurrent read since _storePersisted/_toFlush need to be updated atomically
|
SCOPED_LOCK(_toFlush->_readMutex); // do not allow concurrent read since _storePersisted/_toFlush need to be updated atomically
|
||||||
_storePersisted._reader = _storePersisted._reader.reopen(); // update reader
|
_storePersisted._reader = _storePersisted._reader.reopen(); // update reader
|
||||||
|
_storePersisted._segmentCount += _storePersisted._reader.size(); // add commited segments
|
||||||
memoryStore._reader = memoryStore._reader.reopen(); // update reader
|
memoryStore._reader = memoryStore._reader.reopen(); // update reader
|
||||||
|
memoryStore._segmentCount += memoryStore._reader.size(); // add commited segments
|
||||||
|
|
||||||
|
_asyncCondition.notify_all(); // trigger recheck of sync
|
||||||
|
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
} catch (std::exception const& e) {
|
} catch (std::exception const& e) {
|
||||||
|
@ -1485,6 +1558,55 @@ int IResearchView::insert(
|
||||||
return TRI_ERROR_NO_ERROR;
|
return TRI_ERROR_NO_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
arangodb::Result IResearchView::link(
|
||||||
|
TRI_voc_cid_t cid,
|
||||||
|
arangodb::velocypack::Slice const link
|
||||||
|
) {
|
||||||
|
if (!_logicalView) {
|
||||||
|
return arangodb::Result(
|
||||||
|
TRI_ERROR_INTERNAL,
|
||||||
|
std::string("failed to find logical view while linking IResearch view '") + std::to_string(id()) + "'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto* vocbase = _logicalView->vocbase();
|
||||||
|
|
||||||
|
if (!vocbase) {
|
||||||
|
return arangodb::Result(
|
||||||
|
TRI_ERROR_INTERNAL,
|
||||||
|
std::string("failed to find vocbase while linking IResearch view '") + std::to_string(id()) + "'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
arangodb::velocypack::Builder builder;
|
||||||
|
|
||||||
|
builder.openObject();
|
||||||
|
builder.add(
|
||||||
|
std::to_string(cid),
|
||||||
|
arangodb::velocypack::Value(arangodb::velocypack::ValueType::Null)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (link.isObject()) {
|
||||||
|
builder.add(std::to_string(cid), link);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.close();
|
||||||
|
|
||||||
|
std::unordered_set<TRI_voc_cid_t> collections;
|
||||||
|
auto result = updateLinks(collections, *vocbase, *this, builder.slice());
|
||||||
|
|
||||||
|
if (result.ok()) {
|
||||||
|
WriteMutex mutex(_mutex); // '_meta' can be asynchronously read
|
||||||
|
SCOPED_LOCK(mutex);
|
||||||
|
|
||||||
|
collections.insert(_meta._collections.begin(), _meta._collections.end());
|
||||||
|
validateLinks(collections, *vocbase, *this); // remove invalid cids (no such collection or no such link)
|
||||||
|
_meta._collections = std::move(collections);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
/*static*/ IResearchView::ptr IResearchView::make(
|
/*static*/ IResearchView::ptr IResearchView::make(
|
||||||
arangodb::LogicalView* view,
|
arangodb::LogicalView* view,
|
||||||
arangodb::velocypack::Slice const& info,
|
arangodb::velocypack::Slice const& info,
|
||||||
|
@ -1719,11 +1841,13 @@ bool IResearchView::sync(size_t maxMsec /*= 0*/) {
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
||||||
<< "starting pending memory-store sync for iResearch view '" << id() << "'";
|
<< "starting pending memory-store sync for iResearch view '" << id() << "'";
|
||||||
|
_toFlush->_store._segmentCount.store(0); // reset to zero to get count of new segments that appear during commit
|
||||||
_toFlush->_store._writer->commit();
|
_toFlush->_store._writer->commit();
|
||||||
|
|
||||||
{
|
{
|
||||||
SCOPED_LOCK(_toFlush->_reopenMutex);
|
SCOPED_LOCK(_toFlush->_reopenMutex);
|
||||||
_toFlush->_store._reader = _toFlush->_store._reader.reopen(); // update reader
|
_toFlush->_store._reader = _toFlush->_store._reader.reopen(); // update reader
|
||||||
|
_toFlush->_store._segmentCount += _toFlush->_store._reader.size(); // add commited segments
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
@ -1737,11 +1861,13 @@ bool IResearchView::sync(size_t maxMsec /*= 0*/) {
|
||||||
if (_storePersisted) {
|
if (_storePersisted) {
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
||||||
<< "starting persisted-sync sync for iResearch view '" << id() << "'";
|
<< "starting persisted-sync sync for iResearch view '" << id() << "'";
|
||||||
|
_storePersisted._segmentCount.store(0); // reset to zero to get count of new segments that appear during commit
|
||||||
_storePersisted._writer->commit();
|
_storePersisted._writer->commit();
|
||||||
|
|
||||||
{
|
{
|
||||||
SCOPED_LOCK(_toFlush->_reopenMutex);
|
SCOPED_LOCK(_toFlush->_reopenMutex);
|
||||||
_storePersisted._reader = _storePersisted._reader.reopen(); // update reader
|
_storePersisted._reader = _storePersisted._reader.reopen(); // update reader
|
||||||
|
_storePersisted._segmentCount += _storePersisted._reader.size(); // add commited segments
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
@ -1762,81 +1888,6 @@ bool IResearchView::sync(size_t maxMsec /*= 0*/) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IResearchView::sync(SyncState& state, size_t maxMsec /*= 0*/) {
|
|
||||||
char runId = 0; // value not used
|
|
||||||
auto thresholdMsec = TRI_microtime() * 1000 + maxMsec;
|
|
||||||
ReadMutex mutex(_mutex); // '_memoryStore'/'_storePersisted' can be asynchronously modified
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting flush for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
|
|
||||||
// .............................................................................
|
|
||||||
// apply consolidation policies
|
|
||||||
// .............................................................................
|
|
||||||
for (auto& entry: state._consolidationPolicies) {
|
|
||||||
if (!entry._intervalStep || ++entry._intervalCount < entry._intervalStep) {
|
|
||||||
continue; // skip if interval not reached or no valid policy to execute
|
|
||||||
}
|
|
||||||
|
|
||||||
entry._intervalCount = 0;
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting consolidation for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
|
|
||||||
try {
|
|
||||||
SCOPED_LOCK(mutex);
|
|
||||||
|
|
||||||
auto& memoryStore = activeMemoryStore();
|
|
||||||
memoryStore._writer->consolidate(entry._policy, false);
|
|
||||||
|
|
||||||
if (_storePersisted) {
|
|
||||||
_storePersisted._writer->consolidate(entry._policy, false);
|
|
||||||
}
|
|
||||||
} catch (std::exception const& e) {
|
|
||||||
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "caught exception while consolidating iResearch view '" << id() << "': " << e.what();
|
|
||||||
IR_EXCEPTION();
|
|
||||||
} catch (...) {
|
|
||||||
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "caught exception while consolidating iResearch view '" << id() << "'";
|
|
||||||
IR_EXCEPTION();
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished consolidation for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
}
|
|
||||||
|
|
||||||
// .............................................................................
|
|
||||||
// apply data store commit
|
|
||||||
// .............................................................................
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting commit for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
|
|
||||||
auto res = sync(std::max(size_t(1), size_t(thresholdMsec - TRI_microtime() * 1000))); // set min 1 msec to enable early termination
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished commit for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
|
|
||||||
// .............................................................................
|
|
||||||
// apply cleanup
|
|
||||||
// .............................................................................
|
|
||||||
if (state._cleanupIntervalStep && ++state._cleanupIntervalCount >= state._cleanupIntervalStep) {
|
|
||||||
state._cleanupIntervalCount = 0;
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "starting cleanup for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
|
|
||||||
cleanup(std::max(size_t(1), size_t(thresholdMsec - TRI_microtime() * 1000))); // set min 1 msec to enable early termination
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished cleanup for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG_TOPIC(DEBUG, iresearch::IResearchFeature::IRESEARCH)
|
|
||||||
<< "finished flush for iResearch view '" << id() << "' run id '" << size_t(&runId) << "'";
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*static*/ std::string const& IResearchView::type() noexcept {
|
/*static*/ std::string const& IResearchView::type() noexcept {
|
||||||
return VIEW_TYPE;
|
return VIEW_TYPE;
|
||||||
}
|
}
|
||||||
|
@ -1929,7 +1980,7 @@ arangodb::Result IResearchView::updateProperties(
|
||||||
if (index && arangodb::Index::TRI_IDX_TYPE_IRESEARCH_LINK == index->type()) {
|
if (index && arangodb::Index::TRI_IDX_TYPE_IRESEARCH_LINK == index->type()) {
|
||||||
auto* link = dynamic_cast<arangodb::iresearch::IResearchLink*>(index.get());
|
auto* link = dynamic_cast<arangodb::iresearch::IResearchLink*>(index.get());
|
||||||
|
|
||||||
if (link && link->_defaultId == id() && !link->_view->get()) {
|
if (link && link->_defaultId == id() && !link->view()) {
|
||||||
arangodb::velocypack::Builder linkBuilder;
|
arangodb::velocypack::Builder linkBuilder;
|
||||||
bool valid;
|
bool valid;
|
||||||
|
|
||||||
|
@ -1996,6 +2047,7 @@ arangodb::Result IResearchView::updateProperties(
|
||||||
|
|
||||||
try {
|
try {
|
||||||
storePersisted._reader = irs::directory_reader::open(*(storePersisted._directory));
|
storePersisted._reader = irs::directory_reader::open(*(storePersisted._directory));
|
||||||
|
storePersisted._segmentCount += storePersisted._reader.size(); // add commited segments (previously had 0)
|
||||||
dropDataPath = _storePersisted ? srcDataPath.c_str() : nullptr;
|
dropDataPath = _storePersisted ? srcDataPath.c_str() : nullptr;
|
||||||
} catch (std::exception const& e) {
|
} catch (std::exception const& e) {
|
||||||
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH)
|
LOG_TOPIC(WARN, iresearch::IResearchFeature::IRESEARCH)
|
||||||
|
|
|
@ -239,6 +239,15 @@ class IResearchView final: public arangodb::ViewImplementation,
|
||||||
IResearchLinkMeta const& meta
|
IResearchLinkMeta const& meta
|
||||||
);
|
);
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/// @brief link the specified 'cid' to the view using the specified 'link'
|
||||||
|
/// definition (!link.isObject() == remove only)
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
arangodb::Result link(
|
||||||
|
TRI_voc_cid_t cid,
|
||||||
|
arangodb::velocypack::Slice const link
|
||||||
|
);
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief view factory
|
/// @brief view factory
|
||||||
/// @returns initialized view object
|
/// @returns initialized view object
|
||||||
|
@ -326,6 +335,7 @@ class IResearchView final: public arangodb::ViewImplementation,
|
||||||
struct DataStore {
|
struct DataStore {
|
||||||
irs::directory::ptr _directory;
|
irs::directory::ptr _directory;
|
||||||
irs::directory_reader _reader;
|
irs::directory_reader _reader;
|
||||||
|
std::atomic<size_t> _segmentCount{}; // total number of segments in the writer
|
||||||
irs::index_writer::ptr _writer;
|
irs::index_writer::ptr _writer;
|
||||||
DataStore() = default;
|
DataStore() = default;
|
||||||
DataStore(DataStore&& other) noexcept;
|
DataStore(DataStore&& other) noexcept;
|
||||||
|
@ -340,26 +350,6 @@ class IResearchView final: public arangodb::ViewImplementation,
|
||||||
MemoryStore(); // initialize _directory and _writer during allocation
|
MemoryStore(); // initialize _directory and _writer during allocation
|
||||||
};
|
};
|
||||||
|
|
||||||
struct SyncState {
|
|
||||||
struct PolicyState {
|
|
||||||
size_t _intervalCount;
|
|
||||||
size_t _intervalStep;
|
|
||||||
|
|
||||||
std::shared_ptr<irs::index_writer::consolidation_policy_t> _policy;
|
|
||||||
PolicyState(
|
|
||||||
size_t intervalStep,
|
|
||||||
const std::shared_ptr<irs::index_writer::consolidation_policy_t>& policy
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
size_t _cleanupIntervalCount;
|
|
||||||
size_t _cleanupIntervalStep;
|
|
||||||
std::vector<PolicyState> _consolidationPolicies;
|
|
||||||
|
|
||||||
SyncState() noexcept;
|
|
||||||
explicit SyncState(IResearchViewMeta::CommitMeta const& meta);
|
|
||||||
};
|
|
||||||
|
|
||||||
struct TidStore {
|
struct TidStore {
|
||||||
TidStore(
|
TidStore(
|
||||||
transaction::Methods& trx,
|
transaction::Methods& trx,
|
||||||
|
@ -393,14 +383,6 @@ class IResearchView final: public arangodb::ViewImplementation,
|
||||||
arangodb::velocypack::Slice const& info
|
arangodb::velocypack::Slice const& info
|
||||||
);
|
);
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
/// @brief run cleaners on data directories to remove unused files
|
|
||||||
/// @param maxMsec try not to exceed the specified time, casues partial cleanup
|
|
||||||
/// 0 == full cleanup
|
|
||||||
/// @return success
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
bool cleanup(size_t maxMsec = 0);
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief Called in post-recovery to remove any dangling documents old links
|
/// @brief Called in post-recovery to remove any dangling documents old links
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -411,15 +393,6 @@ class IResearchView final: public arangodb::ViewImplementation,
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
int finish(TRI_voc_tid_t tid, bool commit);
|
int finish(TRI_voc_tid_t tid, bool commit);
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
/// @brief wait for a flush of all index data to its respective stores
|
|
||||||
/// @param meta configuraton to use for sync
|
|
||||||
/// @param maxMsec try not to exceed the specified time, casues partial sync
|
|
||||||
/// 0 == full sync
|
|
||||||
/// @return success
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
bool sync(SyncState& state, size_t maxMsec = 0);
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
/// @brief registers a callback for flush feature
|
/// @brief registers a callback for flush feature
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
|
@ -162,13 +162,13 @@ bool initCommitMeta(
|
||||||
}
|
}
|
||||||
|
|
||||||
static const ConsolidationPolicy& defaultPolicy = ConsolidationPolicy::DEFAULT(policyItr->second);
|
static const ConsolidationPolicy& defaultPolicy = ConsolidationPolicy::DEFAULT(policyItr->second);
|
||||||
size_t intervalStep = 0;
|
size_t segmentThreshold = 0;
|
||||||
|
|
||||||
{
|
{
|
||||||
// optional size_t
|
// optional size_t
|
||||||
static const std::string subFieldName("intervalStep");
|
static const std::string subFieldName("segmentThreshold");
|
||||||
|
|
||||||
if (!arangodb::iresearch::getNumber(intervalStep, value, subFieldName, tmpSeen, defaultPolicy.intervalStep())) {
|
if (!arangodb::iresearch::getNumber(segmentThreshold, value, subFieldName, tmpSeen, defaultPolicy.segmentThreshold())) {
|
||||||
errorField = fieldName + "=>" + name + "=>" + subFieldName;
|
errorField = fieldName + "=>" + name + "=>" + subFieldName;
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
@ -189,8 +189,8 @@ bool initCommitMeta(
|
||||||
}
|
}
|
||||||
|
|
||||||
// add only enabled policies
|
// add only enabled policies
|
||||||
if (intervalStep) {
|
if (segmentThreshold) {
|
||||||
meta._consolidationPolicies.emplace_back(policyItr->second, intervalStep, threshold);
|
meta._consolidationPolicies.emplace_back(policyItr->second, segmentThreshold, threshold);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -226,7 +226,7 @@ bool jsonCommitMeta(
|
||||||
arangodb::velocypack::ObjectBuilder subBuilderWrapper(&subBuilder);
|
arangodb::velocypack::ObjectBuilder subBuilderWrapper(&subBuilder);
|
||||||
|
|
||||||
for (auto& policy: meta._consolidationPolicies) {
|
for (auto& policy: meta._consolidationPolicies) {
|
||||||
if (!policy.intervalStep()) {
|
if (!policy.segmentThreshold()) {
|
||||||
continue; // do not output disabled consolidation policies
|
continue; // do not output disabled consolidation policies
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,7 +238,7 @@ bool jsonCommitMeta(
|
||||||
{
|
{
|
||||||
arangodb::velocypack::ObjectBuilder policyBuilderWrapper(&policyBuilder);
|
arangodb::velocypack::ObjectBuilder policyBuilderWrapper(&policyBuilder);
|
||||||
|
|
||||||
policyBuilderWrapper->add("intervalStep", arangodb::velocypack::Value(policy.intervalStep()));
|
policyBuilderWrapper->add("segmentThreshold", arangodb::velocypack::Value(policy.segmentThreshold()));
|
||||||
policyBuilderWrapper->add("threshold", arangodb::velocypack::Value(policy.threshold()));
|
policyBuilderWrapper->add("threshold", arangodb::velocypack::Value(policy.threshold()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,11 +260,11 @@ NS_BEGIN(iresearch)
|
||||||
size_t IResearchViewMeta::CommitMeta::ConsolidationPolicy::Hash::operator()(
|
size_t IResearchViewMeta::CommitMeta::ConsolidationPolicy::Hash::operator()(
|
||||||
IResearchViewMeta::CommitMeta::ConsolidationPolicy const& value
|
IResearchViewMeta::CommitMeta::ConsolidationPolicy const& value
|
||||||
) const {
|
) const {
|
||||||
auto step = value.intervalStep();
|
auto segmentThreshold = value.segmentThreshold();
|
||||||
auto threshold = value.threshold();
|
auto threshold = value.threshold();
|
||||||
auto type = value.type();
|
auto type = value.type();
|
||||||
|
|
||||||
return std::hash<decltype(step)>{}(step)
|
return std::hash<decltype(segmentThreshold)>{}(segmentThreshold)
|
||||||
^ std::hash<decltype(threshold)>{}(threshold)
|
^ std::hash<decltype(threshold)>{}(threshold)
|
||||||
^ std::hash<size_t>{}(size_t(type))
|
^ std::hash<size_t>{}(size_t(type))
|
||||||
;
|
;
|
||||||
|
@ -272,9 +272,9 @@ size_t IResearchViewMeta::CommitMeta::ConsolidationPolicy::Hash::operator()(
|
||||||
|
|
||||||
IResearchViewMeta::CommitMeta::ConsolidationPolicy::ConsolidationPolicy(
|
IResearchViewMeta::CommitMeta::ConsolidationPolicy::ConsolidationPolicy(
|
||||||
IResearchViewMeta::CommitMeta::ConsolidationPolicy::Type type,
|
IResearchViewMeta::CommitMeta::ConsolidationPolicy::Type type,
|
||||||
size_t intervalStep,
|
size_t segmentThreshold,
|
||||||
float threshold
|
float threshold
|
||||||
): _intervalStep(intervalStep), _threshold(threshold), _type(type) {
|
): _segmentThreshold(segmentThreshold), _threshold(threshold), _type(type) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case Type::BYTES:
|
case Type::BYTES:
|
||||||
_policy = irs::index_utils::consolidate_bytes(_threshold);
|
_policy = irs::index_utils::consolidate_bytes(_threshold);
|
||||||
|
@ -311,7 +311,7 @@ IResearchViewMeta::CommitMeta::ConsolidationPolicy& IResearchViewMeta::CommitMet
|
||||||
IResearchViewMeta::CommitMeta::ConsolidationPolicy const& other
|
IResearchViewMeta::CommitMeta::ConsolidationPolicy const& other
|
||||||
) {
|
) {
|
||||||
if (this != &other) {
|
if (this != &other) {
|
||||||
_intervalStep = other._intervalStep;
|
_segmentThreshold = other._segmentThreshold;
|
||||||
_policy = other._policy;
|
_policy = other._policy;
|
||||||
_threshold = other._threshold;
|
_threshold = other._threshold;
|
||||||
_type = other._type;
|
_type = other._type;
|
||||||
|
@ -324,7 +324,7 @@ IResearchViewMeta::CommitMeta::ConsolidationPolicy& IResearchViewMeta::CommitMet
|
||||||
IResearchViewMeta::CommitMeta::ConsolidationPolicy&& other
|
IResearchViewMeta::CommitMeta::ConsolidationPolicy&& other
|
||||||
) noexcept {
|
) noexcept {
|
||||||
if (this != &other) {
|
if (this != &other) {
|
||||||
_intervalStep = std::move(other._intervalStep);
|
_segmentThreshold = std::move(other._segmentThreshold);
|
||||||
_policy = std::move(other._policy);
|
_policy = std::move(other._policy);
|
||||||
_threshold = std::move(other._threshold);
|
_threshold = std::move(other._threshold);
|
||||||
_type = std::move(other._type);
|
_type = std::move(other._type);
|
||||||
|
@ -337,7 +337,7 @@ bool IResearchViewMeta::CommitMeta::ConsolidationPolicy::operator==(
|
||||||
IResearchViewMeta::CommitMeta::ConsolidationPolicy const& other
|
IResearchViewMeta::CommitMeta::ConsolidationPolicy const& other
|
||||||
) const noexcept {
|
) const noexcept {
|
||||||
return _type == other._type
|
return _type == other._type
|
||||||
&& _intervalStep == other._intervalStep
|
&& _segmentThreshold == other._segmentThreshold
|
||||||
&& _threshold == other._threshold
|
&& _threshold == other._threshold
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
|
@ -348,22 +348,22 @@ bool IResearchViewMeta::CommitMeta::ConsolidationPolicy::operator==(
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case Type::BYTES:
|
case Type::BYTES:
|
||||||
{
|
{
|
||||||
static const ConsolidationPolicy policy(type, 10, 0.85f);
|
static const ConsolidationPolicy policy(type, 300, 0.85f);
|
||||||
return policy;
|
return policy;
|
||||||
}
|
}
|
||||||
case Type::BYTES_ACCUM:
|
case Type::BYTES_ACCUM:
|
||||||
{
|
{
|
||||||
static const ConsolidationPolicy policy(type, 10, 0.85f);
|
static const ConsolidationPolicy policy(type, 300, 0.85f);
|
||||||
return policy;
|
return policy;
|
||||||
}
|
}
|
||||||
case Type::COUNT:
|
case Type::COUNT:
|
||||||
{
|
{
|
||||||
static const ConsolidationPolicy policy(type, 10, 0.85f);
|
static const ConsolidationPolicy policy(type, 300, 0.85f);
|
||||||
return policy;
|
return policy;
|
||||||
}
|
}
|
||||||
case Type::FILL:
|
case Type::FILL:
|
||||||
{
|
{
|
||||||
static const ConsolidationPolicy policy(type, 10, 0.85f);
|
static const ConsolidationPolicy policy(type, 300, 0.85f);
|
||||||
return policy;
|
return policy;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
@ -373,8 +373,8 @@ bool IResearchViewMeta::CommitMeta::ConsolidationPolicy::operator==(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t IResearchViewMeta::CommitMeta::ConsolidationPolicy::intervalStep() const noexcept {
|
size_t IResearchViewMeta::CommitMeta::ConsolidationPolicy::segmentThreshold() const noexcept {
|
||||||
return _intervalStep;
|
return _segmentThreshold;
|
||||||
}
|
}
|
||||||
|
|
||||||
irs::index_writer::consolidation_policy_t const& IResearchViewMeta::CommitMeta::ConsolidationPolicy::policy() const noexcept {
|
irs::index_writer::consolidation_policy_t const& IResearchViewMeta::CommitMeta::ConsolidationPolicy::policy() const noexcept {
|
||||||
|
|
|
@ -74,21 +74,21 @@ struct IResearchViewMeta {
|
||||||
FILL, // {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed})
|
FILL, // {threshold} > #segment_docs{valid} / (#segment_docs{valid} + #segment_docs{removed})
|
||||||
};
|
};
|
||||||
|
|
||||||
ConsolidationPolicy(Type type, size_t intervalStep, float threshold);
|
ConsolidationPolicy(Type type, size_t segmentThreshold, float threshold);
|
||||||
ConsolidationPolicy(ConsolidationPolicy const& other);
|
ConsolidationPolicy(ConsolidationPolicy const& other);
|
||||||
ConsolidationPolicy(ConsolidationPolicy&& other) noexcept;
|
ConsolidationPolicy(ConsolidationPolicy&& other) noexcept;
|
||||||
ConsolidationPolicy& operator=(ConsolidationPolicy const& other);
|
ConsolidationPolicy& operator=(ConsolidationPolicy const& other);
|
||||||
ConsolidationPolicy& operator=(ConsolidationPolicy&& other) noexcept;
|
ConsolidationPolicy& operator=(ConsolidationPolicy&& other) noexcept;
|
||||||
bool operator==(ConsolidationPolicy const& other) const noexcept;
|
bool operator==(ConsolidationPolicy const& other) const noexcept;
|
||||||
static const ConsolidationPolicy& DEFAULT(Type type); // default values for a given type
|
static const ConsolidationPolicy& DEFAULT(Type type); // default values for a given type
|
||||||
size_t intervalStep() const noexcept;
|
|
||||||
irs::index_writer::consolidation_policy_t const& policy() const noexcept;
|
irs::index_writer::consolidation_policy_t const& policy() const noexcept;
|
||||||
|
size_t segmentThreshold() const noexcept;
|
||||||
float threshold() const noexcept;
|
float threshold() const noexcept;
|
||||||
Type type() const noexcept;
|
Type type() const noexcept;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
size_t _intervalStep; // apply consolidation policy with every Nth commit (0 == disable)
|
|
||||||
irs::index_writer::consolidation_policy_t _policy;
|
irs::index_writer::consolidation_policy_t _policy;
|
||||||
|
size_t _segmentThreshold; // apply policy if number of segments is >= value (0 == disable)
|
||||||
float _threshold; // consolidation policy threshold
|
float _threshold; // consolidation policy threshold
|
||||||
Type _type;
|
Type _type;
|
||||||
};
|
};
|
||||||
|
|
|
@ -93,22 +93,22 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertTrue(Object === properties.commit.consolidate.constructor);
|
assertTrue(Object === properties.commit.consolidate.constructor);
|
||||||
assertEqual(4, Object.keys(properties.commit.consolidate).length);
|
assertEqual(4, Object.keys(properties.commit.consolidate).length);
|
||||||
assertTrue(Object === properties.commit.consolidate.bytes.constructor);
|
assertTrue(Object === properties.commit.consolidate.bytes.constructor);
|
||||||
assertEqual(10, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertTrue(Object === properties.commit.consolidate.bytes_accum.constructor);
|
assertTrue(Object === properties.commit.consolidate.bytes_accum.constructor);
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertTrue(Object === properties.commit.consolidate.count.constructor);
|
assertTrue(Object === properties.commit.consolidate.count.constructor);
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
assertTrue(Object === properties.commit.consolidate.fill.constructor);
|
assertTrue(Object === properties.commit.consolidate.fill.constructor);
|
||||||
assertEqual(10, properties.commit.consolidate.fill.intervalStep);
|
assertEqual(300, properties.commit.consolidate.fill.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.fill.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.fill.threshold.toFixed(6));
|
||||||
|
|
||||||
meta = { commit: {
|
meta = { commit: {
|
||||||
commitIntervalMsec: 10000,
|
commitIntervalMsec: 10000,
|
||||||
consolidate: {
|
consolidate: {
|
||||||
bytes: { intervalStep: 20, threshold: 0.5 },
|
bytes: { segmentThreshold: 20, threshold: 0.5 },
|
||||||
bytes_accum: {},
|
bytes_accum: {},
|
||||||
count: {}
|
count: {}
|
||||||
}
|
}
|
||||||
|
@ -122,18 +122,18 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertTrue(Object === properties.commit.consolidate.constructor);
|
assertTrue(Object === properties.commit.consolidate.constructor);
|
||||||
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
||||||
assertTrue(Object === properties.commit.consolidate.bytes.constructor);
|
assertTrue(Object === properties.commit.consolidate.bytes.constructor);
|
||||||
assertEqual(20, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(20, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertTrue(Object === properties.commit.consolidate.bytes_accum.constructor);
|
assertTrue(Object === properties.commit.consolidate.bytes_accum.constructor);
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertTrue(Object === properties.commit.consolidate.count.constructor);
|
assertTrue(Object === properties.commit.consolidate.count.constructor);
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
|
|
||||||
meta = { commit: {
|
meta = { commit: {
|
||||||
cleanupIntervalStep: 20,
|
cleanupIntervalStep: 20,
|
||||||
consolidate: { count: { intervalStep: 30, threshold: 0.75 } }
|
consolidate: { count: { segmentThreshold: 30, threshold: 0.75 } }
|
||||||
} };
|
} };
|
||||||
view.properties(meta, false); // full update
|
view.properties(meta, false); // full update
|
||||||
properties = view.properties();
|
properties = view.properties();
|
||||||
|
@ -144,7 +144,7 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertTrue(Object === properties.commit.consolidate.constructor);
|
assertTrue(Object === properties.commit.consolidate.constructor);
|
||||||
assertEqual(1, Object.keys(properties.commit.consolidate).length);
|
assertEqual(1, Object.keys(properties.commit.consolidate).length);
|
||||||
assertTrue(Object === properties.commit.consolidate.count.constructor);
|
assertTrue(Object === properties.commit.consolidate.count.constructor);
|
||||||
assertEqual(30, properties.commit.consolidate.count.intervalStep);
|
assertEqual(30, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.75).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.75).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
|
|
||||||
|
|
||||||
|
@ -465,7 +465,7 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
commit: {
|
commit: {
|
||||||
commitIntervalMsec: 10000,
|
commitIntervalMsec: 10000,
|
||||||
consolidate: {
|
consolidate: {
|
||||||
bytes: { intervalStep: 20, threshold: 0.5 },
|
bytes: { segmentThreshold: 20, threshold: 0.5 },
|
||||||
bytes_accum: {},
|
bytes_accum: {},
|
||||||
count: {}
|
count: {}
|
||||||
}
|
}
|
||||||
|
@ -484,11 +484,11 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertEqual(10000, properties.commit.commitIntervalMsec);
|
assertEqual(10000, properties.commit.commitIntervalMsec);
|
||||||
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
||||||
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
||||||
assertEqual(20, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(20, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
assertEqual("TestPath", properties.dataPath);
|
assertEqual("TestPath", properties.dataPath);
|
||||||
assertEqual("de_DE.UTF-8", properties.locale);
|
assertEqual("de_DE.UTF-8", properties.locale);
|
||||||
|
@ -518,7 +518,7 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
commit: {
|
commit: {
|
||||||
commitIntervalMsec: 10000,
|
commitIntervalMsec: 10000,
|
||||||
consolidate: {
|
consolidate: {
|
||||||
bytes: { intervalStep: 20, threshold: 0.5 },
|
bytes: { segmentThreshold: 20, threshold: 0.5 },
|
||||||
bytes_accum: {},
|
bytes_accum: {},
|
||||||
count: {}
|
count: {}
|
||||||
}
|
}
|
||||||
|
@ -541,11 +541,11 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertEqual(10000, properties.commit.commitIntervalMsec);
|
assertEqual(10000, properties.commit.commitIntervalMsec);
|
||||||
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
||||||
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
||||||
assertEqual(20, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(20, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
assertEqual("TestPath", properties.dataPath);
|
assertEqual("TestPath", properties.dataPath);
|
||||||
assertEqual("de_DE.UTF-8", properties.locale);
|
assertEqual("de_DE.UTF-8", properties.locale);
|
||||||
|
@ -575,7 +575,7 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
commit: {
|
commit: {
|
||||||
commitIntervalMsec: 10000,
|
commitIntervalMsec: 10000,
|
||||||
consolidate: {
|
consolidate: {
|
||||||
bytes: { intervalStep: 20, threshold: 0.5 },
|
bytes: { segmentThreshold: 20, threshold: 0.5 },
|
||||||
bytes_accum: {},
|
bytes_accum: {},
|
||||||
count: {}
|
count: {}
|
||||||
}
|
}
|
||||||
|
@ -598,11 +598,11 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertEqual(10000, properties.commit.commitIntervalMsec);
|
assertEqual(10000, properties.commit.commitIntervalMsec);
|
||||||
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
||||||
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
||||||
assertEqual(20, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(20, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
assertEqual("TestPath", properties.dataPath);
|
assertEqual("TestPath", properties.dataPath);
|
||||||
assertEqual("de_DE.UTF-8", properties.locale);
|
assertEqual("de_DE.UTF-8", properties.locale);
|
||||||
|
@ -635,7 +635,7 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
commit: {
|
commit: {
|
||||||
commitIntervalMsec: 10000,
|
commitIntervalMsec: 10000,
|
||||||
consolidate: {
|
consolidate: {
|
||||||
bytes: { intervalStep: 20, threshold: 0.5 },
|
bytes: { segmentThreshold: 20, threshold: 0.5 },
|
||||||
bytes_accum: {},
|
bytes_accum: {},
|
||||||
count: {}
|
count: {}
|
||||||
}
|
}
|
||||||
|
@ -658,11 +658,11 @@ function iResearchFeatureAqlTestSuite () {
|
||||||
assertEqual(10000, properties.commit.commitIntervalMsec);
|
assertEqual(10000, properties.commit.commitIntervalMsec);
|
||||||
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
||||||
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
||||||
assertEqual(20, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(20, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
assertEqual("TestPath", properties.dataPath);
|
assertEqual("TestPath", properties.dataPath);
|
||||||
assertEqual("de_DE.UTF-8", properties.locale);
|
assertEqual("de_DE.UTF-8", properties.locale);
|
||||||
|
|
|
@ -298,10 +298,15 @@ function iResearchAqlTestSuite () {
|
||||||
// },
|
// },
|
||||||
|
|
||||||
testPhraseFilter : function () {
|
testPhraseFilter : function () {
|
||||||
var result = AQL_EXECUTE("FOR doc IN VIEW UnitTestsView FILTER PHRASE(doc.text, 'quick brown fox jumps', 'text_en') RETURN doc", null, { waitForSync: true }).json;
|
var result0 = AQL_EXECUTE("FOR doc IN VIEW UnitTestsView FILTER PHRASE(doc.text, 'quick brown fox jumps', 'text_en') RETURN doc", null, { waitForSync: true }).json;
|
||||||
|
|
||||||
assertEqual(result.length, 1);
|
assertEqual(result0.length, 1);
|
||||||
assertEqual(result[0].name, 'full');
|
assertEqual(result0[0].name, 'full');
|
||||||
|
|
||||||
|
var result1 = AQL_EXECUTE("FOR doc IN VIEW UnitTestsView FILTER PHRASE(doc.text, [ 'quick brown fox jumps' ], 'text_en') RETURN doc", null, { waitForSync: true }).json;
|
||||||
|
|
||||||
|
assertEqual(result1.length, 1);
|
||||||
|
assertEqual(result1[0].name, 'full');
|
||||||
},
|
},
|
||||||
|
|
||||||
testExistsFilter : function () {
|
testExistsFilter : function () {
|
||||||
|
|
|
@ -75,9 +75,8 @@ function recoverySuite () {
|
||||||
assertTrue(p.hasOwnProperty('UnitTestsRecoveryDummy'));
|
assertTrue(p.hasOwnProperty('UnitTestsRecoveryDummy'));
|
||||||
assertTrue(p.UnitTestsRecoveryDummy.includeAllFields);
|
assertTrue(p.UnitTestsRecoveryDummy.includeAllFields);
|
||||||
|
|
||||||
// FIXME uncomment when rocksdb recovery will be fixed
|
var result = AQL_EXECUTE("FOR doc IN VIEW UnitTestsRecoveryView FILTER doc.c >= 0 COLLECT WITH COUNT INTO length RETURN length", null, { }).json;
|
||||||
// var result = AQL_EXECUTE("FOR doc IN VIEW UnitTestsRecoveryView FILTER doc.c >= 0 COLLECT WITH COUNT INTO length RETURN length", null, { }).json;
|
assertEqual(result[0], 10000);
|
||||||
// assertEqual(result[0], 10000);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -51,7 +51,7 @@ function runSetup () {
|
||||||
commit: {
|
commit: {
|
||||||
commitIntervalMsec: 10000,
|
commitIntervalMsec: 10000,
|
||||||
consolidate: {
|
consolidate: {
|
||||||
bytes: { intervalStep: 20, threshold: 0.5 },
|
bytes: { segmentThreshold: 20, threshold: 0.5 },
|
||||||
bytes_accum: {},
|
bytes_accum: {},
|
||||||
count: {}
|
count: {}
|
||||||
}
|
}
|
||||||
|
@ -91,9 +91,8 @@ function recoverySuite () {
|
||||||
assertTrue(p.hasOwnProperty('UnitTestsRecoveryDummy'));
|
assertTrue(p.hasOwnProperty('UnitTestsRecoveryDummy'));
|
||||||
assertTrue(p.UnitTestsRecoveryDummy.includeAllFields);
|
assertTrue(p.UnitTestsRecoveryDummy.includeAllFields);
|
||||||
|
|
||||||
// uncomment when rocksdb recovery will be fixed
|
var result = AQL_EXECUTE("FOR doc IN VIEW UnitTestsRecoveryView FILTER doc.c >= 0 COLLECT WITH COUNT INTO length RETURN length", null, { }).json;
|
||||||
// var result = AQL_EXECUTE("FOR doc IN VIEW UnitTestsRecoveryView FILTER doc.c >= 0 COLLECT WITH COUNT INTO length RETURN length", null, { }).json;
|
assertEqual(result[0], 10000);
|
||||||
// assertEqual(result[0], 10000);
|
|
||||||
|
|
||||||
// validate state
|
// validate state
|
||||||
var properties = v.properties();
|
var properties = v.properties();
|
||||||
|
@ -101,11 +100,11 @@ function recoverySuite () {
|
||||||
assertEqual(10000, properties.commit.commitIntervalMsec);
|
assertEqual(10000, properties.commit.commitIntervalMsec);
|
||||||
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
assertEqual(5000, properties.commit.commitTimeoutMsec);
|
||||||
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
assertEqual(3, Object.keys(properties.commit.consolidate).length);
|
||||||
assertEqual(20, properties.commit.consolidate.bytes.intervalStep);
|
assertEqual(20, properties.commit.consolidate.bytes.segmentThreshold);
|
||||||
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
assertEqual((0.5).toFixed(6), properties.commit.consolidate.bytes.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.bytes_accum.intervalStep);
|
assertEqual(300, properties.commit.consolidate.bytes_accum.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.bytes_accum.threshold.toFixed(6));
|
||||||
assertEqual(10, properties.commit.consolidate.count.intervalStep);
|
assertEqual(300, properties.commit.consolidate.count.segmentThreshold);
|
||||||
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
assertEqual((0.85).toFixed(6), properties.commit.consolidate.count.threshold.toFixed(6));
|
||||||
assertEqual("TestPath", properties.dataPath);
|
assertEqual("TestPath", properties.dataPath);
|
||||||
assertEqual("de_DE.UTF-8", properties.locale);
|
assertEqual("de_DE.UTF-8", properties.locale);
|
||||||
|
|
|
@ -38,6 +38,8 @@ if [ -z "${ARANGOSH}" ]; then
|
||||||
ARANGOSH="build/bin/arangosh${EXT}"
|
ARANGOSH="build/bin/arangosh${EXT}"
|
||||||
elif [ -x bin/arangosh ]; then
|
elif [ -x bin/arangosh ]; then
|
||||||
ARANGOSH="bin/arangosh${EXT}"
|
ARANGOSH="bin/arangosh${EXT}"
|
||||||
|
elif [ -x arangosh ]; then
|
||||||
|
ARANGOSH="arangosh${EXT}"
|
||||||
elif [ -x usr/bin/arangosh ]; then
|
elif [ -x usr/bin/arangosh ]; then
|
||||||
ARANGOSH="usr/bin/arangosh${EXT}"
|
ARANGOSH="usr/bin/arangosh${EXT}"
|
||||||
else
|
else
|
||||||
|
|
|
@ -1248,6 +1248,8 @@ SECTION("Phrase") {
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(false, 'quick', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(false, 'quick', 'test_analyzer') RETURN d");
|
||||||
|
|
||||||
// invalid input
|
// invalid input
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ ], 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 1, \"abc\" ], 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 1, \"abc\" ], 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 1, \"abc\" ], 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 1, \"abc\" ], 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, true, 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, true, 'test_analyzer') RETURN d");
|
||||||
|
@ -1318,6 +1320,7 @@ SECTION("Phrase") {
|
||||||
phrase.push_back("q").push_back("u").push_back("i").push_back("c").push_back("k");
|
phrase.push_back("q").push_back("u").push_back("i").push_back("c").push_back("k");
|
||||||
|
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[42], 'quick', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[42], 'quick', 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[42], [ 'quick' ], 'test_analyzer') RETURN d", expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
// without offset, custom analyzer, expressions
|
// without offset, custom analyzer, expressions
|
||||||
|
@ -1334,8 +1337,12 @@ SECTION("Phrase") {
|
||||||
|
|
||||||
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d.name, CONCAT(value,'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d.name, CONCAT(value,'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d['name'], CONCAT(value, 'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d['name'], CONCAT(value, 'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d.name, [ CONCAT(value,'ck') ], CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d['name'], [ CONCAT(value, 'ck') ], CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d.name, CONCAT(value, 'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d.name, CONCAT(value, 'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d['name'], CONCAT(value, 'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d['name'], CONCAT(value, 'ck'), CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d.name, [ CONCAT(value, 'ck') ], CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d['name'], [ CONCAT(value, 'ck') ], CONCAT(analyzer, 'analyzer')) RETURN d", expected, &ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
// without offset, custom analyzer, invalid expressions
|
// without offset, custom analyzer, invalid expressions
|
||||||
|
@ -1347,8 +1354,12 @@ SECTION("Phrase") {
|
||||||
|
|
||||||
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d.name, CONCAT(value,'ck'), analyzer) RETURN d", &ctx);
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d.name, CONCAT(value,'ck'), analyzer) RETURN d", &ctx);
|
||||||
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d['name'], CONCAT(value, 'ck'), analyzer) RETURN d", &ctx);
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d['name'], CONCAT(value, 'ck'), analyzer) RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d.name, [ CONCAT(value,'ck') ], analyzer) RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phrase(d['name'], [ CONCAT(value, 'ck') ], analyzer) RETURN d", &ctx);
|
||||||
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d.name, CONCAT(value, 'ck'), analyzer) RETURN d", &ctx);
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d.name, CONCAT(value, 'ck'), analyzer) RETURN d", &ctx);
|
||||||
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d['name'], CONCAT(value, 'ck'), analyzer) RETURN d", &ctx);
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d['name'], CONCAT(value, 'ck'), analyzer) RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d.name, [ CONCAT(value, 'ck') ], analyzer) RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET value='qui' LET analyzer='test_' FOR d IN VIEW myView FILTER phRase(d['name'], [ CONCAT(value, 'ck') ], analyzer) RETURN d", &ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
// with offset, custom analyzer
|
// with offset, custom analyzer
|
||||||
|
@ -1363,6 +1374,9 @@ SECTION("Phrase") {
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0.0, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0.0, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0.5, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0.5, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 0.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 0.5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
|
||||||
// wrong offset argument
|
// wrong offset argument
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', '0', 'brown', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', '0', 'brown', 'test_analyzer') RETURN d");
|
||||||
|
@ -1370,6 +1384,11 @@ SECTION("Phrase") {
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', true, 'brown', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', true, 'brown', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', false, 'brown', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', false, 'brown', 'test_analyzer') RETURN d");
|
||||||
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', d.name, 'brown', 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', d.name, 'brown', 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', '0', 'brown' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', null, 'brown' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', true, 'brown' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', false, 'brown' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', d.name, 'brown' ], 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
}
|
}
|
||||||
|
|
||||||
// with offset, complex name, custom analyzer
|
// with offset, complex name, custom analyzer
|
||||||
|
@ -1387,6 +1406,12 @@ SECTION("Phrase") {
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj['name'], 'quick', 5.0, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj['name'], 'quick', 5.0, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.name, 'quick', 5.6, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.name, 'quick', 5.6, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['name'], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['name'], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['name'], [ 'quick', 5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.name, [ 'quick', 5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.name, [ 'quick', 5.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj['name'], [ 'quick', 5.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.name, [ 'quick', 5.6, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['name'], [ 'quick', 5.5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
// with offset, complex name with offset, custom analyzer
|
// with offset, complex name with offset, custom analyzer
|
||||||
|
@ -1404,6 +1429,12 @@ SECTION("Phrase") {
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3]['name'][1], 'quick', 5.0, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3]['name'][1], 'quick', 5.0, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3].name[1], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3].name[1], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj'][3]['name'][1], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj'][3]['name'][1], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj'][3].name[1], [ 'quick', 5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3].name[1], [ 'quick', 5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3].name[1], [ 'quick', 5.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3]['name'][1], [ 'quick', 5.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj[3].name[1], [ 'quick', 5.5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj'][3]['name'][1], [ 'quick', 5.5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
// with offset, complex name, custom analyzer
|
// with offset, complex name, custom analyzer
|
||||||
|
@ -1421,6 +1452,12 @@ SECTION("Phrase") {
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj['name'][100], 'quick', 5.0, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj['name'][100], 'quick', 5.0, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj.name[100], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj.name[100], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5]['obj']['name'][100], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5]['obj']['name'][100], 'quick', 5.5, 'brown', 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5]['obj'].name[100], [ 'quick', 5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj.name[100], [ 'quick', 5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj.name[100], [ 'quick', 5.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj['name'][100], [ 'quick', 5.0, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5].obj.name[100], [ 'quick', 5.5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d[5]['obj']['name'][100], [ 'quick', 5.5, 'brown' ], 'test_analyzer') RETURN d", expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
// multiple offsets, complex name, custom analyzer
|
// multiple offsets, complex name, custom analyzer
|
||||||
|
@ -1443,6 +1480,15 @@ SECTION("Phrase") {
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', 2.5, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', 2.5, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3.2, 'brown', 2.0, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3.2, 'brown', 2.0, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected);
|
||||||
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['properties']['id']['name'], 'quick', 3.2, 'brown', 2.0, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected);
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['properties']['id']['name'], 'quick', 3.2, 'brown', 2.0, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3.0, 'brown', 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id['name'], [ 'quick', 3.0, 'brown', 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3.6, 'brown', 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj['properties'].id.name, [ 'quick', 3.6, 'brown', 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', 2.0, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', 2.5, 'fox', 0.0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3.2, 'brown', 2.0, 'fox', 0.0, 'jumps' ], 'test_analyzer') RETURN d", expected);
|
||||||
|
assertFilterSuccess("FOR d IN VIEW myView FILTER phrase(d['obj']['properties']['id']['name'], [ 'quick', 3.2, 'brown', 2.0, 'fox', 0.0, 'jumps'] , 'test_analyzer') RETURN d", expected);
|
||||||
|
|
||||||
// wrong value
|
// wrong value
|
||||||
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, d.brown, 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, d.brown, 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
@ -1452,12 +1498,23 @@ SECTION("Phrase") {
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, true, 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, true, 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, false, 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, false, 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
||||||
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', 2, 'fox', 0, d, 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', 2, 'fox', 0, d, 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, d.brown, 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 2, 2, 'fox', 0, 'jumps'] , 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 2.5, 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, null, 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, true, 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, false, 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', 2, 'fox', 0, d ], 'test_analyzer') RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
|
||||||
// wrong offset argument
|
// wrong offset argument
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', '2', 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', '2', 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', null, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', null, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', true, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', true, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', false, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', false, 'fox', 0, 'jumps', 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', '2', 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', null, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', true, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', false, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d");
|
||||||
}
|
}
|
||||||
|
|
||||||
// multiple offsets, complex name, custom analyzer, expressions
|
// multiple offsets, complex name, custom analyzer, expressions
|
||||||
|
@ -1481,6 +1538,12 @@ SECTION("Phrase") {
|
||||||
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3.6, 'brown', 2, 'fox', offset-2, 'jumps', 'test_analyzer') RETURN d", expected, &ctx);
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3.6, 'brown', 2, 'fox', offset-2, 'jumps', 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj['properties'].id.name, 'quick', 3.6, CONCAT(input, 'wn'), 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d", expected, &ctx);
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj['properties'].id.name, 'quick', 3.6, CONCAT(input, 'wn'), 2, 'fox', 0, 'jumps', 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', offset+0.5, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected, &ctx);
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', 3, 'brown', offset+0.5, 'fox', 0.0, 'jumps', 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', offset+1, CONCAT(input, 'wn'), offset, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', offset + 1.5, 'brown', 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id['name'], [ 'quick', 3.0, 'brown', offset, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3.6, 'brown', 2, 'fox', offset-2, 'jumps' ], 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj['properties'].id.name, [ 'quick', 3.6, CONCAT(input, 'wn'), 2, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
|
assertFilterSuccess("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', 3, 'brown', offset+0.5, 'fox', 0.0, 'jumps' ], 'test_analyzer') RETURN d", expected, &ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
// multiple offsets, complex name, custom analyzer, invalid expressions
|
// multiple offsets, complex name, custom analyzer, invalid expressions
|
||||||
|
@ -1494,6 +1557,10 @@ SECTION("Phrase") {
|
||||||
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', offset + 1.5, 'brown', TO_STRING(2), 'fox', 0, 'jumps', 'test_analyzer') RETURN d", &ctx);
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', offset + 1.5, 'brown', TO_STRING(2), 'fox', 0, 'jumps', 'test_analyzer') RETURN d", &ctx);
|
||||||
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id['name'], 'quick', 3.0, 'brown', offset, 'fox', 0, 'jumps', TO_BOOL('test_analyzer')) RETURN d", &ctx);
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id['name'], 'quick', 3.0, 'brown', offset, 'fox', 0, 'jumps', TO_BOOL('test_analyzer')) RETURN d", &ctx);
|
||||||
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', TO_BOOL(3.6), 'brown', 2, 'fox', offset-2, 'jumps', 'test_analyzer') RETURN d", &ctx);
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, 'quick', TO_BOOL(3.6), 'brown', 2, 'fox', offset-2, 'jumps', 'test_analyzer') RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', TO_BOOL(offset+1), CONCAT(input, 'wn'), offset, 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', offset + 1.5, 'brown', TO_STRING(2), 'fox', 0, 'jumps' ], 'test_analyzer') RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id['name'], [ 'quick', 3.0, 'brown', offset, 'fox', 0, 'jumps' ], TO_BOOL('test_analyzer')) RETURN d", &ctx);
|
||||||
|
assertFilterExecutionFail("LET offset=2 LET input='bro' FOR d IN VIEW myView FILTER phrase(d.obj.properties.id.name, [ 'quick', TO_BOOL(3.6), 'brown', 2, 'fox', offset-2, 'jumps' ], 'test_analyzer') RETURN d", &ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
// invalid analyzer
|
// invalid analyzer
|
||||||
|
@ -1509,6 +1576,18 @@ SECTION("Phrase") {
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], 'quick', { \"a\": 7, \"b\": \"c\" }) RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], 'quick', { \"a\": 7, \"b\": \"c\" }) RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 'invalid_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 'invalid_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], 'quick', 'invalid_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], 'quick', 'invalid_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], [ 1, \"abc\" ]) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 'quick' ], [ 1, \"abc\" ]) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], true) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 'quick' ], false) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], null) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 'quick' ], null) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], 3.14) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 'quick' ], 1234) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], { \"a\": 7, \"b\": \"c\" }) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 'quick' ], { \"a\": 7, \"b\": \"c\" }) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], 'invalid_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d['name'], [ 'quick' ], 'invalid_analyzer') RETURN d");
|
||||||
|
|
||||||
// wrong analylzer
|
// wrong analylzer
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', ['d']) RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', ['d']) RETURN d");
|
||||||
|
@ -1527,12 +1606,32 @@ SECTION("Phrase") {
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 3, 'brown', false) RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 3, 'brown', false) RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 3, 'brown', null) RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 3, 'brown', null) RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 3, 'brown', 'invalidAnalyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 3, 'brown', 'invalidAnalyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], ['d']) RETURN d");
|
||||||
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], [d]) RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], d) RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], 3) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], 3.0) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], true) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], false) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], null) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick' ], 'invalidAnalyzer') RETURN d");
|
||||||
|
assertFilterExecutionFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], d) RETURN d", &ExpressionContextMock::EMPTY);
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], 3) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], 3.0) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], true) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], false) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], null) RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 3, 'brown' ], 'invalidAnalyzer') RETURN d");
|
||||||
|
|
||||||
// non-deterministic arguments
|
// non-deterministic arguments
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d[RAND() ? 'name' : 0], 'quick', 0, 'brown', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d[RAND() ? 'name' : 0], 'quick', 0, 'brown', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, RAND() ? 'quick' : 'slow', 0, 'brown', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, RAND() ? 'quick' : 'slow', 0, 'brown', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0, RAND() ? 'brown' : 'red', 'test_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0, RAND() ? 'brown' : 'red', 'test_analyzer') RETURN d");
|
||||||
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0, 'brown', RAND() ? 'test_analyzer' : 'invalid_analyzer') RETURN d");
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, 'quick', 0, 'brown', RAND() ? 'test_analyzer' : 'invalid_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d[RAND() ? 'name' : 0], [ 'quick', 0, 'brown' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ RAND() ? 'quick' : 'slow', 0, 'brown' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 0, RAND() ? 'brown' : 'red' ], 'test_analyzer') RETURN d");
|
||||||
|
assertFilterFail("FOR d IN VIEW myView FILTER phrase(d.name, [ 'quick', 0, 'brown' ], RAND() ? 'test_analyzer' : 'invalid_analyzer') RETURN d");
|
||||||
}
|
}
|
||||||
|
|
||||||
SECTION("StartsWith") {
|
SECTION("StartsWith") {
|
||||||
|
|
|
@ -422,6 +422,24 @@ TEST_CASE("IResearchQueryTestPhrase", "[iresearch][iresearch-query]") {
|
||||||
CHECK((i == expected.size()));
|
CHECK((i == expected.size()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// test invalid input type (empty-array)
|
||||||
|
{
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d.value, [ ], 'identity') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_QUERY_PARSE == result.code);
|
||||||
|
}
|
||||||
|
|
||||||
|
// test invalid input type (empty-array) via []
|
||||||
|
{
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d['value'], [ ], 'identity') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_QUERY_PARSE == result.code);
|
||||||
|
}
|
||||||
|
|
||||||
// test invalid input type (array)
|
// test invalid input type (array)
|
||||||
{
|
{
|
||||||
auto result = arangodb::tests::executeQuery(
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
@ -797,6 +815,166 @@ TEST_CASE("IResearchQueryTestPhrase", "[iresearch][iresearch-query]") {
|
||||||
|
|
||||||
CHECK((i == expected.size()));
|
CHECK((i == expected.size()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// test custom analyzer with [ phrase ] arg
|
||||||
|
{
|
||||||
|
std::vector<arangodb::velocypack::Slice> expected = {
|
||||||
|
insertedDocs[7].slice(),
|
||||||
|
insertedDocs[8].slice(),
|
||||||
|
insertedDocs[13].slice(),
|
||||||
|
insertedDocs[19].slice(),
|
||||||
|
insertedDocs[22].slice(),
|
||||||
|
insertedDocs[24].slice(),
|
||||||
|
insertedDocs[29].slice(),
|
||||||
|
};
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d.duplicated, [ 'z' ], 'test_analyzer') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_NO_ERROR == result.code);
|
||||||
|
auto slice = result.result->slice();
|
||||||
|
CHECK(slice.isArray());
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for (arangodb::velocypack::ArrayIterator itr(slice); itr.valid(); ++itr) {
|
||||||
|
auto const resolved = itr.value().resolveExternals();
|
||||||
|
CHECK((i < expected.size()));
|
||||||
|
CHECK((0 == arangodb::basics::VelocyPackHelper::compare(expected[i++], resolved, true)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((i == expected.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// test custom analyzer via [] with [ phrase ] arg
|
||||||
|
{
|
||||||
|
std::vector<arangodb::velocypack::Slice> expected = {
|
||||||
|
insertedDocs[7].slice(),
|
||||||
|
insertedDocs[8].slice(),
|
||||||
|
insertedDocs[13].slice(),
|
||||||
|
insertedDocs[19].slice(),
|
||||||
|
insertedDocs[22].slice(),
|
||||||
|
insertedDocs[24].slice(),
|
||||||
|
insertedDocs[29].slice(),
|
||||||
|
};
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d['duplicated'], [ 'z' ], 'test_analyzer') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_NO_ERROR == result.code);
|
||||||
|
auto slice = result.result->slice();
|
||||||
|
CHECK(slice.isArray());
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for (arangodb::velocypack::ArrayIterator itr(slice); itr.valid(); ++itr) {
|
||||||
|
auto const resolved = itr.value().resolveExternals();
|
||||||
|
CHECK((i < expected.size()));
|
||||||
|
CHECK((0 == arangodb::basics::VelocyPackHelper::compare(expected[i++], resolved, true)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((i == expected.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// test custom analyzer with offsets with [ phrase ] arg
|
||||||
|
{
|
||||||
|
std::vector<arangodb::velocypack::Slice> expected = {
|
||||||
|
insertedDocs[7].slice(),
|
||||||
|
insertedDocs[8].slice(),
|
||||||
|
insertedDocs[13].slice(),
|
||||||
|
insertedDocs[19].slice(),
|
||||||
|
insertedDocs[22].slice(),
|
||||||
|
insertedDocs[24].slice(),
|
||||||
|
insertedDocs[29].slice(),
|
||||||
|
};
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d.duplicated, [ 'v', 1, 'z' ], 'test_analyzer') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_NO_ERROR == result.code);
|
||||||
|
auto slice = result.result->slice();
|
||||||
|
CHECK(slice.isArray());
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for (arangodb::velocypack::ArrayIterator itr(slice); itr.valid(); ++itr) {
|
||||||
|
auto const resolved = itr.value().resolveExternals();
|
||||||
|
CHECK((i < expected.size()));
|
||||||
|
CHECK((0 == arangodb::basics::VelocyPackHelper::compare(expected[i++], resolved, true)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((i == expected.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// test custom analyzer with offsets via [] with [ phrase ] arg
|
||||||
|
{
|
||||||
|
std::vector<arangodb::velocypack::Slice> expected = {
|
||||||
|
insertedDocs[7].slice(),
|
||||||
|
insertedDocs[8].slice(),
|
||||||
|
insertedDocs[13].slice(),
|
||||||
|
insertedDocs[19].slice(),
|
||||||
|
insertedDocs[22].slice(),
|
||||||
|
insertedDocs[24].slice(),
|
||||||
|
insertedDocs[29].slice(),
|
||||||
|
};
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d['duplicated'], [ 'v', 2, 'c' ], 'test_analyzer') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_NO_ERROR == result.code);
|
||||||
|
auto slice = result.result->slice();
|
||||||
|
CHECK(slice.isArray());
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for (arangodb::velocypack::ArrayIterator itr(slice); itr.valid(); ++itr) {
|
||||||
|
auto const resolved = itr.value().resolveExternals();
|
||||||
|
CHECK((i < expected.size()));
|
||||||
|
CHECK((0 == arangodb::basics::VelocyPackHelper::compare(expected[i++], resolved, true)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((i == expected.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// test custom analyzer with offsets (no match) with [ phrase ] arg
|
||||||
|
{
|
||||||
|
std::vector<arangodb::velocypack::Slice> expected = {
|
||||||
|
};
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d.duplicated, [ 'v', 0, 'z' ], 'test_analyzer') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_NO_ERROR == result.code);
|
||||||
|
auto slice = result.result->slice();
|
||||||
|
CHECK(slice.isArray());
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for (arangodb::velocypack::ArrayIterator itr(slice); itr.valid(); ++itr) {
|
||||||
|
auto const resolved = itr.value().resolveExternals();
|
||||||
|
CHECK((i < expected.size()));
|
||||||
|
CHECK((0 == arangodb::basics::VelocyPackHelper::compare(expected[i++], resolved, true)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((i == expected.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// test custom analyzer with offsets (no match) via [] with [ phrase ] arg
|
||||||
|
{
|
||||||
|
std::vector<arangodb::velocypack::Slice> expected = {
|
||||||
|
};
|
||||||
|
auto result = arangodb::tests::executeQuery(
|
||||||
|
vocbase,
|
||||||
|
"FOR d IN VIEW testView FILTER PHRASE(d['duplicated'], [ 'v', 1, 'c' ], 'test_analyzer') SORT BM25(d) ASC, TFIDF(d) DESC, d.seq RETURN d"
|
||||||
|
);
|
||||||
|
REQUIRE(TRI_ERROR_NO_ERROR == result.code);
|
||||||
|
auto slice = result.result->slice();
|
||||||
|
CHECK(slice.isArray());
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for (arangodb::velocypack::ArrayIterator itr(slice); itr.valid(); ++itr) {
|
||||||
|
auto const resolved = itr.value().resolveExternals();
|
||||||
|
CHECK((i < expected.size()));
|
||||||
|
CHECK((0 == arangodb::basics::VelocyPackHelper::compare(expected[i++], resolved, true)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((i == expected.size()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
|
|
|
@ -778,6 +778,251 @@ SECTION("test_insert") {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SECTION("test_link") {
|
||||||
|
auto collectionJson = arangodb::velocypack::Parser::fromJson("{ \"name\": \"testCollection\", \"id\": 100 }");
|
||||||
|
auto viewJson = arangodb::velocypack::Parser::fromJson("{ \"name\": \"testView\", \"type\": \"arangosearch\" }");
|
||||||
|
|
||||||
|
// drop invalid collection
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
CHECK((true == viewImpl->link(100, arangodb::velocypack::Slice::nullSlice()).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// drop non-exiting
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto* logicalCollection = vocbase.createCollection(collectionJson->slice());
|
||||||
|
CHECK((nullptr != logicalCollection));
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
CHECK((true == viewImpl->link(logicalCollection->cid(), arangodb::velocypack::Slice::nullSlice()).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// drop exiting
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto* logicalCollection = vocbase.createCollection(collectionJson->slice());
|
||||||
|
CHECK((nullptr != logicalCollection));
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
auto links = arangodb::velocypack::Parser::fromJson("{ \
|
||||||
|
\"links\": { \"testCollection\": {} } \
|
||||||
|
}");
|
||||||
|
CHECK((true == logicalView->updateProperties(links->slice(), true, false).ok()));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((1 == cids.size()));
|
||||||
|
CHECK((1 == logicalCollection->getIndexes().size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
CHECK((true == viewImpl->link(logicalCollection->cid(), arangodb::velocypack::Slice::nullSlice()).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
CHECK((true == logicalCollection->getIndexes().empty()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// drop invalid collection + recreate
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
CHECK((false == viewImpl->link(100, arangodb::iresearch::emptyObjectSlice()).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// drop non-existing + recreate
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto* logicalCollection = vocbase.createCollection(collectionJson->slice());
|
||||||
|
CHECK((nullptr != logicalCollection));
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
CHECK((true == logicalCollection->getIndexes().empty()));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
CHECK((true == viewImpl->link(logicalCollection->cid(), arangodb::iresearch::emptyObjectSlice()).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
std::unordered_set<TRI_voc_cid_t> expected = { 100 };
|
||||||
|
|
||||||
|
for (auto& cid: expected) {
|
||||||
|
CHECK((1 == cids.erase(cid)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
CHECK((1 == logicalCollection->getIndexes().size()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// drop existing + recreate
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto* logicalCollection = vocbase.createCollection(collectionJson->slice());
|
||||||
|
CHECK((nullptr != logicalCollection));
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
auto links = arangodb::velocypack::Parser::fromJson("{ \
|
||||||
|
\"links\": { \"testCollection\": { \"includeAllFields\": true } } \
|
||||||
|
}");
|
||||||
|
CHECK((true == logicalView->updateProperties(links->slice(), true, false).ok()));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((1 == cids.size()));
|
||||||
|
CHECK((1 == logicalCollection->getIndexes().size()));
|
||||||
|
auto link = logicalCollection->getIndexes()[0]->toVelocyPack(true, false);
|
||||||
|
arangodb::iresearch::IResearchLinkMeta linkMeta;
|
||||||
|
std::string error;
|
||||||
|
CHECK((linkMeta.init(link->slice(), error) && true == linkMeta._includeAllFields));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
CHECK((true == viewImpl->link(logicalCollection->cid(), arangodb::iresearch::emptyObjectSlice()).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
std::unordered_set<TRI_voc_cid_t> expected = { 100 };
|
||||||
|
|
||||||
|
for (auto& cid: expected) {
|
||||||
|
CHECK((1 == cids.erase(cid)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
CHECK((1 == logicalCollection->getIndexes().size()));
|
||||||
|
auto link = logicalCollection->getIndexes()[0]->toVelocyPack(true, false);
|
||||||
|
arangodb::iresearch::IResearchLinkMeta linkMeta;
|
||||||
|
std::string error;
|
||||||
|
CHECK((linkMeta.init(link->slice(), error) && false == linkMeta._includeAllFields));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// drop existing + recreate invalid
|
||||||
|
{
|
||||||
|
TRI_vocbase_t vocbase(TRI_vocbase_type_e::TRI_VOCBASE_TYPE_NORMAL, 1, "testVocbase");
|
||||||
|
auto* logicalCollection = vocbase.createCollection(collectionJson->slice());
|
||||||
|
CHECK((nullptr != logicalCollection));
|
||||||
|
auto logicalView = vocbase.createView(viewJson->slice(), 0);
|
||||||
|
REQUIRE((false == !logicalView));
|
||||||
|
auto* view = logicalView->getImplementation();
|
||||||
|
REQUIRE((false == !view));
|
||||||
|
auto* viewImpl = dynamic_cast<arangodb::iresearch::IResearchView*>(view);
|
||||||
|
REQUIRE((nullptr != viewImpl));
|
||||||
|
|
||||||
|
auto links = arangodb::velocypack::Parser::fromJson("{ \
|
||||||
|
\"links\": { \"testCollection\": { \"includeAllFields\": true } } \
|
||||||
|
}");
|
||||||
|
CHECK((true == logicalView->updateProperties(links->slice(), true, false).ok()));
|
||||||
|
|
||||||
|
{
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
CHECK((1 == cids.size()));
|
||||||
|
CHECK((1 == logicalCollection->getIndexes().size()));
|
||||||
|
auto link = logicalCollection->getIndexes()[0]->toVelocyPack(true, false);
|
||||||
|
arangodb::iresearch::IResearchLinkMeta linkMeta;
|
||||||
|
std::string error;
|
||||||
|
CHECK((linkMeta.init(link->slice(), error) && true == linkMeta._includeAllFields));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
arangodb::velocypack::Builder builder;
|
||||||
|
builder.openObject();
|
||||||
|
builder.add("includeAllFields", arangodb::velocypack::Value("abc"));
|
||||||
|
builder.close();
|
||||||
|
auto slice = builder.slice();
|
||||||
|
CHECK((false == viewImpl->link(logicalCollection->cid(), slice).ok()));
|
||||||
|
std::set<TRI_voc_cid_t> cids;
|
||||||
|
viewImpl->visitCollections([&cids](TRI_voc_cid_t cid)->bool { cids.emplace(cid); return true; });
|
||||||
|
std::unordered_set<TRI_voc_cid_t> expected = { 100 };
|
||||||
|
|
||||||
|
for (auto& cid: expected) {
|
||||||
|
CHECK((1 == cids.erase(cid)));
|
||||||
|
}
|
||||||
|
|
||||||
|
CHECK((0 == cids.size()));
|
||||||
|
CHECK((1 == logicalCollection->getIndexes().size()));
|
||||||
|
auto link = logicalCollection->getIndexes()[0]->toVelocyPack(true, false);
|
||||||
|
arangodb::iresearch::IResearchLinkMeta linkMeta;
|
||||||
|
std::string error;
|
||||||
|
CHECK((linkMeta.init(link->slice(), error) && true == linkMeta._includeAllFields));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
SECTION("test_move_datapath") {
|
SECTION("test_move_datapath") {
|
||||||
std::string createDataPath = ((irs::utf8_path()/=s.testFilesystemPath)/=std::string("deleteme0")).utf8();
|
std::string createDataPath = ((irs::utf8_path()/=s.testFilesystemPath)/=std::string("deleteme0")).utf8();
|
||||||
std::string updateDataPath = ((irs::utf8_path()/=s.testFilesystemPath)/=std::string("deleteme1")).utf8();
|
std::string updateDataPath = ((irs::utf8_path()/=s.testFilesystemPath)/=std::string("deleteme1")).utf8();
|
||||||
|
@ -1403,6 +1648,7 @@ SECTION("test_unregister_link") {
|
||||||
CHECK((1 == cids.size()));
|
CHECK((1 == cids.size()));
|
||||||
|
|
||||||
auto factory = [](arangodb::LogicalView*, arangodb::velocypack::Slice const&, bool isNew)->std::unique_ptr<arangodb::ViewImplementation>{ return nullptr; };
|
auto factory = [](arangodb::LogicalView*, arangodb::velocypack::Slice const&, bool isNew)->std::unique_ptr<arangodb::ViewImplementation>{ return nullptr; };
|
||||||
|
logicalCollection->getIndexes()[0]->unload(); // release view reference to prevent deadlock due to ~IResearchView() waiting for IResearchLink::unload()
|
||||||
logicalView->spawnImplementation(factory, createJson->slice(), true); // ensure destructor for ViewImplementation is called
|
logicalView->spawnImplementation(factory, createJson->slice(), true); // ensure destructor for ViewImplementation is called
|
||||||
CHECK((false == logicalCollection->getIndexes().empty()));
|
CHECK((false == logicalCollection->getIndexes().empty()));
|
||||||
}
|
}
|
||||||
|
@ -1494,6 +1740,7 @@ SECTION("test_tracked_cids") {
|
||||||
}
|
}
|
||||||
|
|
||||||
CHECK((expected.empty()));
|
CHECK((expected.empty()));
|
||||||
|
logicalCollection->getIndexes()[0]->unload(); // release view reference to prevent deadlock due to ~IResearchView() waiting for IResearchLink::unload()
|
||||||
}
|
}
|
||||||
|
|
||||||
// test drop via link before open (TRI_vocbase_t::createView(...) will call open())
|
// test drop via link before open (TRI_vocbase_t::createView(...) will call open())
|
||||||
|
|
|
@ -74,7 +74,7 @@ SECTION("test_defaults") {
|
||||||
|
|
||||||
for (auto& entry: meta._commit._consolidationPolicies) {
|
for (auto& entry: meta._commit._consolidationPolicies) {
|
||||||
CHECK(true == (1 == expectedItem.erase(entry.type())));
|
CHECK(true == (1 == expectedItem.erase(entry.type())));
|
||||||
CHECK(true == (10 == entry.intervalStep()));
|
CHECK(true == (300 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (0.85f == entry.threshold()));
|
CHECK(true == (0.85f == entry.threshold()));
|
||||||
}
|
}
|
||||||
|
@ -123,22 +123,22 @@ SECTION("test_inheritDefaults") {
|
||||||
|
|
||||||
switch(entry.type()) {
|
switch(entry.type()) {
|
||||||
case ConsolidationPolicy::Type::BYTES:
|
case ConsolidationPolicy::Type::BYTES:
|
||||||
CHECK(true == (101 == entry.intervalStep()));
|
CHECK(true == (101 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.11f == entry.threshold()));
|
CHECK(true == (.11f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
case ConsolidationPolicy::Type::BYTES_ACCUM:
|
case ConsolidationPolicy::Type::BYTES_ACCUM:
|
||||||
CHECK(true == (151 == entry.intervalStep()));
|
CHECK(true == (151 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.151f == entry.threshold()));
|
CHECK(true == (.151f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
case ConsolidationPolicy::Type::COUNT:
|
case ConsolidationPolicy::Type::COUNT:
|
||||||
CHECK(true == (201 == entry.intervalStep()));
|
CHECK(true == (201 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.21f == entry.threshold()));
|
CHECK(true == (.21f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
case ConsolidationPolicy::Type::FILL:
|
case ConsolidationPolicy::Type::FILL:
|
||||||
CHECK(true == (301 == entry.intervalStep()));
|
CHECK(true == (301 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.31f == entry.threshold()));
|
CHECK(true == (.31f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
|
@ -171,7 +171,7 @@ SECTION("test_readDefaults") {
|
||||||
|
|
||||||
for (auto& entry: meta._commit._consolidationPolicies) {
|
for (auto& entry: meta._commit._consolidationPolicies) {
|
||||||
CHECK(true == (1 == expectedItem.erase(entry.type())));
|
CHECK(true == (1 == expectedItem.erase(entry.type())));
|
||||||
CHECK(true == (10 == entry.intervalStep()));
|
CHECK(true == (300 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.85f == entry.threshold()));
|
CHECK(true == (.85f == entry.threshold()));
|
||||||
}
|
}
|
||||||
|
@ -244,9 +244,9 @@ SECTION("test_readCustomizedValues") {
|
||||||
|
|
||||||
{
|
{
|
||||||
std::string errorField;
|
std::string errorField;
|
||||||
auto json = arangodb::velocypack::Parser::fromJson("{ \"commit\": { \"consolidate\": { \"bytes\": { \"intervalStep\": 0.5, \"threshold\": 1 } } } }");
|
auto json = arangodb::velocypack::Parser::fromJson("{ \"commit\": { \"consolidate\": { \"bytes\": { \"segmentThreshold\": 0.5, \"threshold\": 1 } } } }");
|
||||||
CHECK(false == meta.init(json->slice(), errorField, logicalView));
|
CHECK(false == meta.init(json->slice(), errorField, logicalView));
|
||||||
CHECK(std::string("commit=>consolidate=>bytes=>intervalStep") == errorField);
|
CHECK(std::string("commit=>consolidate=>bytes=>segmentThreshold") == errorField);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -302,7 +302,7 @@ SECTION("test_readCustomizedValues") {
|
||||||
{
|
{
|
||||||
std::string errorField;
|
std::string errorField;
|
||||||
auto json = arangodb::velocypack::Parser::fromJson("{ \
|
auto json = arangodb::velocypack::Parser::fromJson("{ \
|
||||||
\"commit\": { \"consolidate\": { \"bytes_accum\": { \"intervalStep\": 0, \"threshold\": 0.2 }, \"fill\": { \"intervalStep\": 0 } } } \
|
\"commit\": { \"consolidate\": { \"bytes_accum\": { \"segmentThreshold\": 0, \"threshold\": 0.2 }, \"fill\": { \"segmentThreshold\": 0 } } } \
|
||||||
}");
|
}");
|
||||||
CHECK(true == meta.init(json->slice(), errorField, logicalView));
|
CHECK(true == meta.init(json->slice(), errorField, logicalView));
|
||||||
CHECK(true == (meta._commit._consolidationPolicies.empty()));
|
CHECK(true == (meta._commit._consolidationPolicies.empty()));
|
||||||
|
@ -312,7 +312,7 @@ SECTION("test_readCustomizedValues") {
|
||||||
std::string errorField;
|
std::string errorField;
|
||||||
auto json = arangodb::velocypack::Parser::fromJson("{ \
|
auto json = arangodb::velocypack::Parser::fromJson("{ \
|
||||||
\"collections\": [ 42 ], \
|
\"collections\": [ 42 ], \
|
||||||
\"commit\": { \"commitIntervalMsec\": 456, \"cleanupIntervalStep\": 654, \"commitTimeoutMsec\": 789, \"consolidate\": { \"bytes\": { \"intervalStep\": 1001, \"threshold\": 0.11 }, \"bytes_accum\": { \"intervalStep\": 1501, \"threshold\": 0.151 }, \"count\": { \"intervalStep\": 2001 }, \"fill\": {} } }, \
|
\"commit\": { \"commitIntervalMsec\": 456, \"cleanupIntervalStep\": 654, \"commitTimeoutMsec\": 789, \"consolidate\": { \"bytes\": { \"segmentThreshold\": 1001, \"threshold\": 0.11 }, \"bytes_accum\": { \"segmentThreshold\": 1501, \"threshold\": 0.151 }, \"count\": { \"segmentThreshold\": 2001 }, \"fill\": {} } }, \
|
||||||
\"dataPath\": \"somepath\", \
|
\"dataPath\": \"somepath\", \
|
||||||
\"locale\": \"ru_RU.KOI8-R\", \
|
\"locale\": \"ru_RU.KOI8-R\", \
|
||||||
\"threadsMaxIdle\": 8, \
|
\"threadsMaxIdle\": 8, \
|
||||||
|
@ -338,22 +338,22 @@ SECTION("test_readCustomizedValues") {
|
||||||
|
|
||||||
switch(entry.type()) {
|
switch(entry.type()) {
|
||||||
case ConsolidationPolicy::Type::BYTES:
|
case ConsolidationPolicy::Type::BYTES:
|
||||||
CHECK(true == (1001 == entry.intervalStep()));
|
CHECK(true == (1001 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.11f == entry.threshold()));
|
CHECK(true == (.11f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
case ConsolidationPolicy::Type::BYTES_ACCUM:
|
case ConsolidationPolicy::Type::BYTES_ACCUM:
|
||||||
CHECK(true == (1501 == entry.intervalStep()));
|
CHECK(true == (1501 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.151f == entry.threshold()));
|
CHECK(true == (.151f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
case ConsolidationPolicy::Type::COUNT:
|
case ConsolidationPolicy::Type::COUNT:
|
||||||
CHECK(true == (2001 == entry.intervalStep()));
|
CHECK(true == (2001 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.85f == entry.threshold()));
|
CHECK(true == (.85f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
case ConsolidationPolicy::Type::FILL:
|
case ConsolidationPolicy::Type::FILL:
|
||||||
CHECK(true == (10 == entry.intervalStep()));
|
CHECK(true == (300 == entry.segmentThreshold()));
|
||||||
CHECK(true == (false == !entry.policy()));
|
CHECK(true == (false == !entry.policy()));
|
||||||
CHECK(true == (.85f == entry.threshold()));
|
CHECK(true == (.85f == entry.threshold()));
|
||||||
break;
|
break;
|
||||||
|
@ -369,10 +369,10 @@ SECTION("test_readCustomizedValues") {
|
||||||
|
|
||||||
SECTION("test_writeDefaults") {
|
SECTION("test_writeDefaults") {
|
||||||
std::unordered_map<std::string, std::unordered_map<std::string, double>> expectedCommitItemConsolidate = {
|
std::unordered_map<std::string, std::unordered_map<std::string, double>> expectedCommitItemConsolidate = {
|
||||||
{ "bytes",{ { "intervalStep", 10 },{ "threshold", .85f } } },
|
{ "bytes",{ { "segmentThreshold", 300 },{ "threshold", .85f } } },
|
||||||
{ "bytes_accum",{ { "intervalStep", 10 },{ "threshold", .85f } } },
|
{ "bytes_accum",{ { "segmentThreshold", 300 },{ "threshold", .85f } } },
|
||||||
{ "count",{ { "intervalStep", 10 },{ "threshold", .85f } } },
|
{ "count",{ { "segmentThreshold", 300 },{ "threshold", .85f } } },
|
||||||
{ "fill",{ { "intervalStep", 10 },{ "threshold", .85f } } }
|
{ "fill",{ { "segmentThreshold", 300 },{ "threshold", .85f } } }
|
||||||
};
|
};
|
||||||
arangodb::iresearch::IResearchViewMeta meta;
|
arangodb::iresearch::IResearchViewMeta meta;
|
||||||
arangodb::velocypack::Builder builder;
|
arangodb::velocypack::Builder builder;
|
||||||
|
@ -466,10 +466,10 @@ SECTION("test_writeCustomizedValues") {
|
||||||
|
|
||||||
std::unordered_set<TRI_voc_cid_t> expectedCollections = { 42, 52, 62 };
|
std::unordered_set<TRI_voc_cid_t> expectedCollections = { 42, 52, 62 };
|
||||||
std::unordered_map<std::string, std::unordered_map<std::string, double>> expectedCommitItemConsolidate = {
|
std::unordered_map<std::string, std::unordered_map<std::string, double>> expectedCommitItemConsolidate = {
|
||||||
{ "bytes",{ { "intervalStep", 101 },{ "threshold", .11f } } },
|
{ "bytes",{ { "segmentThreshold", 101 },{ "threshold", .11f } } },
|
||||||
{ "bytes_accum",{ { "intervalStep", 151 },{ "threshold", .151f } } },
|
{ "bytes_accum",{ { "segmentThreshold", 151 },{ "threshold", .151f } } },
|
||||||
{ "count",{ { "intervalStep", 201 },{ "threshold", .21f } } },
|
{ "count",{ { "segmentThreshold", 201 },{ "threshold", .21f } } },
|
||||||
{ "fill",{ { "intervalStep", 301 },{ "threshold", .31f } } }
|
{ "fill",{ { "segmentThreshold", 301 },{ "threshold", .31f } } }
|
||||||
};
|
};
|
||||||
arangodb::velocypack::Builder builder;
|
arangodb::velocypack::Builder builder;
|
||||||
arangodb::velocypack::Slice tmpSlice;
|
arangodb::velocypack::Slice tmpSlice;
|
||||||
|
|
|
@ -94,7 +94,7 @@ class EdgeIndexIteratorMock final : public arangodb::IndexIterator {
|
||||||
return "edge-index-iterator-mock";
|
return "edge-index-iterator-mock";
|
||||||
}
|
}
|
||||||
|
|
||||||
bool next(LocalDocumentIdCallback const& cb, size_t limit) {
|
bool next(LocalDocumentIdCallback const& cb, size_t limit) override {
|
||||||
while (limit && _begin != _end && _keysIt.valid()) {
|
while (limit && _begin != _end && _keysIt.valid()) {
|
||||||
auto key = _keysIt.value();
|
auto key = _keysIt.value();
|
||||||
|
|
||||||
|
@ -175,7 +175,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
VPackBuilder& builder,
|
VPackBuilder& builder,
|
||||||
bool withFigures,
|
bool withFigures,
|
||||||
bool forPersistence
|
bool forPersistence
|
||||||
) const {
|
) const override {
|
||||||
builder.openObject();
|
builder.openObject();
|
||||||
Index::toVelocyPack(builder, withFigures, forPersistence);
|
Index::toVelocyPack(builder, withFigures, forPersistence);
|
||||||
// hard-coded
|
// hard-coded
|
||||||
|
@ -184,7 +184,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
builder.close();
|
builder.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
void toVelocyPackFigures(VPackBuilder& builder) const {
|
void toVelocyPackFigures(VPackBuilder& builder) const override {
|
||||||
Index::toVelocyPackFigures(builder);
|
Index::toVelocyPackFigures(builder);
|
||||||
|
|
||||||
builder.add("from", VPackValue(VPackValueType::Object));
|
builder.add("from", VPackValue(VPackValueType::Object));
|
||||||
|
@ -201,7 +201,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
arangodb::LocalDocumentId const& documentId,
|
arangodb::LocalDocumentId const& documentId,
|
||||||
arangodb::velocypack::Slice const& doc,
|
arangodb::velocypack::Slice const& doc,
|
||||||
OperationMode
|
OperationMode
|
||||||
) {
|
) override {
|
||||||
if (!doc.isObject()) {
|
if (!doc.isObject()) {
|
||||||
return { TRI_ERROR_INTERNAL };
|
return { TRI_ERROR_INTERNAL };
|
||||||
}
|
}
|
||||||
|
@ -229,7 +229,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
arangodb::LocalDocumentId const&,
|
arangodb::LocalDocumentId const&,
|
||||||
arangodb::velocypack::Slice const& doc,
|
arangodb::velocypack::Slice const& doc,
|
||||||
OperationMode
|
OperationMode
|
||||||
) {
|
) override {
|
||||||
if (!doc.isObject()) {
|
if (!doc.isObject()) {
|
||||||
return { TRI_ERROR_INTERNAL };
|
return { TRI_ERROR_INTERNAL };
|
||||||
}
|
}
|
||||||
|
@ -258,7 +258,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
size_t itemsInIndex,
|
size_t itemsInIndex,
|
||||||
size_t& estimatedItems,
|
size_t& estimatedItems,
|
||||||
double& estimatedCost
|
double& estimatedCost
|
||||||
) const {
|
) const override {
|
||||||
arangodb::SimpleAttributeEqualityMatcher matcher(IndexAttributes);
|
arangodb::SimpleAttributeEqualityMatcher matcher(IndexAttributes);
|
||||||
|
|
||||||
return matcher.matchOne(
|
return matcher.matchOne(
|
||||||
|
@ -272,7 +272,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
arangodb::aql::AstNode const* node,
|
arangodb::aql::AstNode const* node,
|
||||||
arangodb::aql::Variable const*,
|
arangodb::aql::Variable const*,
|
||||||
bool
|
bool
|
||||||
) {
|
) override {
|
||||||
TRI_ASSERT(node->type == arangodb::aql::NODE_TYPE_OPERATOR_NARY_AND);
|
TRI_ASSERT(node->type == arangodb::aql::NODE_TYPE_OPERATOR_NARY_AND);
|
||||||
|
|
||||||
TRI_ASSERT(node->numMembers() == 1);
|
TRI_ASSERT(node->numMembers() == 1);
|
||||||
|
@ -311,7 +311,7 @@ class EdgeIndexMock final : public arangodb::Index {
|
||||||
arangodb::aql::AstNode* specializeCondition(
|
arangodb::aql::AstNode* specializeCondition(
|
||||||
arangodb::aql::AstNode* node,
|
arangodb::aql::AstNode* node,
|
||||||
arangodb::aql::Variable const* reference
|
arangodb::aql::Variable const* reference
|
||||||
) const {
|
) const override {
|
||||||
arangodb::SimpleAttributeEqualityMatcher matcher(IndexAttributes);
|
arangodb::SimpleAttributeEqualityMatcher matcher(IndexAttributes);
|
||||||
|
|
||||||
return matcher.specializeOne(this, node, reference);
|
return matcher.specializeOne(this, node, reference);
|
||||||
|
|
Loading…
Reference in New Issue