mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of https://github.com/arangodb/arangodb into devel
This commit is contained in:
commit
e952d2f08b
|
@ -1,75 +1,75 @@
|
|||
# -*- mode: CMAKE; -*-
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# OPENSSL
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
find_package(OpenSSL REQUIRED)
|
||||
|
||||
if (OPENSSL_VERSION)
|
||||
string(REPLACE "." ";" OPENSSL_VERSION_LIST ${OPENSSL_VERSION})
|
||||
list(GET OPENSSL_VERSION_LIST 0 OPENSSL_VERSION_MAJOR)
|
||||
list(GET OPENSSL_VERSION_LIST 1 OPENSSL_VERSION_MINOR)
|
||||
if ("${OPENSSL_VERSION_MAJOR}" GREATER 0 AND "${OPENSSL_VERSION_MINOR}" GREATER 0)
|
||||
option(USE_OPENSSL_NO_SSL2
|
||||
"do not use OPENSSL_NO_SSL2"
|
||||
ON
|
||||
)
|
||||
else ()
|
||||
option(USE_OPENSSL_NO_SSL2
|
||||
"do not use OPENSSL_NO_SSL2"
|
||||
OFF
|
||||
)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
set(OPENSSL_VERSION
|
||||
"${OPENSSL_VERSION}"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: Version"
|
||||
)
|
||||
|
||||
set(OPENSSL_INCLUDE_DIR
|
||||
"${OPENSSL_INCLUDE_DIR}"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: Include Directory"
|
||||
)
|
||||
|
||||
if (WIN32 AND NOT SSL_NUGET)
|
||||
if (DEBUG)
|
||||
set(OPENSSL_EXT "d")
|
||||
else ()
|
||||
set(OPENSSL_EXT "")
|
||||
endif ()
|
||||
|
||||
if (NOT ${OPENSSL_LIB_DIR})
|
||||
set(OPENSSL_LIB_DIR "${OPENSSL_ROOT_DIR}/lib/VC")
|
||||
endif ()
|
||||
|
||||
message(STATUS "OPENSSL_LIB_DIR = ${OPENSSL_LIB_DIR}")
|
||||
message(STATUS "OPENSSL_ROOT_DIR = ${OPENSSL_ROOT_DIR}")
|
||||
|
||||
set(OPENSSL_LIBRARIES
|
||||
debug "${OPENSSL_LIB_DIR}/ssleay32MTd.lib;${OPENSSL_LIB_DIR}/libeay32MTd.lib"
|
||||
optimized "${OPENSSL_LIB_DIR}/ssleay32MT.lib;${OPENSSL_LIB_DIR}/libeay32MT.lib"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: libraries"
|
||||
)
|
||||
else ()
|
||||
set(OPENSSL_LIBRARIES
|
||||
"${OPENSSL_LIBRARIES}"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: libraries"
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (WIN32 AND SSL_NUGET)
|
||||
#install (FILES ${LIB_EAY_DEBUG_DLL} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
#install (FILES ${SSL_EAY_DEBUG_DLL} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
set(OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/bin/$(Configuration)/")
|
||||
|
||||
install (FILES "${LIB_EAY_RELEASE_DLL}"
|
||||
RUNTIME DESTINATION "${OUTPUT_DIRECTORY}")
|
||||
install (FILES "${SSL_EAY_RELEASE_DLL}"
|
||||
RUNTIME DESTINATION "${OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
# -*- mode: CMAKE; -*-
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# OPENSSL
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
find_package(OpenSSL REQUIRED)
|
||||
|
||||
if (OPENSSL_VERSION)
|
||||
string(REPLACE "." ";" OPENSSL_VERSION_LIST ${OPENSSL_VERSION})
|
||||
list(GET OPENSSL_VERSION_LIST 0 OPENSSL_VERSION_MAJOR)
|
||||
list(GET OPENSSL_VERSION_LIST 1 OPENSSL_VERSION_MINOR)
|
||||
if ("${OPENSSL_VERSION_MAJOR}" GREATER 0 AND "${OPENSSL_VERSION_MINOR}" GREATER 0)
|
||||
option(USE_OPENSSL_NO_SSL2
|
||||
"do not use OPENSSL_NO_SSL2"
|
||||
ON
|
||||
)
|
||||
else ()
|
||||
option(USE_OPENSSL_NO_SSL2
|
||||
"do not use OPENSSL_NO_SSL2"
|
||||
OFF
|
||||
)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
set(OPENSSL_VERSION
|
||||
"${OPENSSL_VERSION}"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: Version"
|
||||
)
|
||||
|
||||
set(OPENSSL_INCLUDE_DIR
|
||||
"${OPENSSL_INCLUDE_DIR}"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: Include Directory"
|
||||
)
|
||||
|
||||
if (WIN32 AND NOT SSL_NUGET)
|
||||
if (DEBUG)
|
||||
set(OPENSSL_EXT "d")
|
||||
else ()
|
||||
set(OPENSSL_EXT "")
|
||||
endif ()
|
||||
|
||||
if (NOT ${OPENSSL_LIB_DIR})
|
||||
set(OPENSSL_LIB_DIR "${OPENSSL_ROOT_DIR}/lib/VC")
|
||||
endif ()
|
||||
|
||||
message(STATUS "OPENSSL_LIB_DIR = ${OPENSSL_LIB_DIR}")
|
||||
message(STATUS "OPENSSL_ROOT_DIR = ${OPENSSL_ROOT_DIR}")
|
||||
|
||||
set(OPENSSL_LIBRARIES
|
||||
debug "${OPENSSL_LIB_DIR}/ssleay32MTd.lib;${OPENSSL_LIB_DIR}/libeay32MTd.lib"
|
||||
optimized "${OPENSSL_LIB_DIR}/ssleay32MT.lib;${OPENSSL_LIB_DIR}/libeay32MT.lib"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: libraries"
|
||||
)
|
||||
else ()
|
||||
set(OPENSSL_LIBRARIES
|
||||
"${OPENSSL_LIBRARIES}"
|
||||
CACHE INTERNAL
|
||||
"OpenSSL: libraries"
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (WIN32 AND SSL_NUGET)
|
||||
#install (FILES ${LIB_EAY_DEBUG_DLL} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
#install (FILES ${SSL_EAY_DEBUG_DLL} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
#message("FILES ${SSL_EAY_RELEASE_DLL} RUNTIME DESTINATION ${OUTPUT_DIRECTORY}")
|
||||
|
||||
install (FILES "${LIB_EAY_RELEASE_DLL}"
|
||||
DESTINATION "bin/")
|
||||
install (FILES "${SSL_EAY_RELEASE_DLL}"
|
||||
DESTINATION "bin/")
|
||||
endif()
|
||||
|
|
16
CHANGELOG
16
CHANGELOG
|
@ -1,5 +1,17 @@
|
|||
v3.0.0 (XXXX-XX-XX)
|
||||
-------------------
|
||||
v3.0.0-rc2 (2015-06-12)
|
||||
-----------------------
|
||||
|
||||
* added option `--server.max-packet-size` for client tools
|
||||
|
||||
* renamed option `--server.ssl-protocol` to `--ssl.protocol` in client tools
|
||||
(was already done for arangod, but overlooked for client tools)
|
||||
|
||||
* fix handling of `--ssl.protocol` value 5 (TLS v1.2) in client tools, which
|
||||
claimed to support it but didn't
|
||||
|
||||
|
||||
v3.0.0-rc1 (2015-06-10)
|
||||
-----------------------
|
||||
|
||||
* forward ported V8 Comparator bugfix for inline heuristics from
|
||||
https://github.com/v8/v8/commit/5ff7901e24c2c6029114567de5a08ed0f1494c81
|
||||
|
|
|
@ -119,6 +119,11 @@ configure_file(
|
|||
## OPERATION SYSTEM
|
||||
################################################################################
|
||||
|
||||
option(HOMEBREW
|
||||
"whether to install for homebrew"
|
||||
Off
|
||||
)
|
||||
|
||||
if (WIN32)
|
||||
set(WINDOWS TRUE)
|
||||
set(MSBUILD TRUE)
|
||||
|
|
|
@ -105,8 +105,8 @@ FOR vertex[, edge[, path]]
|
|||
- `IN` `min..max`: the minimal and maximal depth for the traversal:
|
||||
- **min** (number, *optional*): edges and vertices returned by this query will
|
||||
start at the traversal depth of *min* (thus edges and vertices below will
|
||||
not be returned). If not specified, it defaults to 1, which is the minimal
|
||||
possible value.
|
||||
not be returned). If not specified, it defaults to 1. The minimal
|
||||
possible value is 0.
|
||||
- **max** (number, *optional*): up to *max* length paths are traversed.
|
||||
If omitted, *max* defaults to *min*. Thus only the vertices and edges in
|
||||
the range of *min* are returned. *max* can not be specified without *min*.
|
||||
|
|
|
@ -8,7 +8,7 @@ These attributes are mandatory and must contain the document-handle
|
|||
of the from and to vertices of an edge.
|
||||
|
||||
Use the general document
|
||||
[REST api](../Document/WorkingWithDocuments.html)
|
||||
[REST api](../Document/WorkingWithDocuments.md)
|
||||
for create/read/update/delete.
|
||||
|
||||
<!-- Rest/Graph edges -->
|
||||
|
|
|
@ -55,7 +55,7 @@ ppbook-precheck-bad-code-sections:
|
|||
|
||||
ppbook-check-html-link:
|
||||
@echo "##### checking for invalid HTML links in $(NAME)"
|
||||
@echo "$(ALLBOOKS)" |sed -e 's; ;\n;g' |sed -e 's;^;/;' -e 's;$$;/;' > /tmp/books.regex
|
||||
@echo "$(ALLBOOKS)" | tr " " "\n" | sed -e 's;^;/;' -e 's;$$;/;' > /tmp/books.regex
|
||||
|
||||
@egrep -r '\[.*\]\(.*\)' ppbooks/$(NAME)|grep '\.md:'| grep 'html'| grep -v 'http://' | grep -v 'https://' | grep -v 'header.css' | grep -v -f /tmp/books.regex > /tmp/relative_html_links.txt ||true
|
||||
@if test "`cat /tmp/relative_html_links.txt |wc -l`" -gt 0; then \
|
||||
|
|
|
@ -29,7 +29,7 @@ arangosh> users.grantDatabase("admin@testapp", "testdb");
|
|||
This grants the user access to the database *testdb*. `revokeDatabase`
|
||||
will revoke the right.
|
||||
|
||||
!SECTION Comparision to ArangoDB 2
|
||||
!SECTION Comparison to ArangoDB 2
|
||||
|
||||
ArangoDB 2 contained separate users per database. It was not possible
|
||||
to give an user access to two or more databases. This proved
|
||||
|
|
|
@ -16,20 +16,17 @@ Unfortunately, the JavaScript libraries are just in the process of being
|
|||
standardized. CommonJS has defined some important modules. ArangoDB implements
|
||||
the following
|
||||
|
||||
* "console" is a well known logging facility to all the JavaScript developers.
|
||||
- [console](Console.md) is a well known logging facility to all the JavaScript developers.
|
||||
ArangoDB implements most of the [Console API](http://wiki.commonjs.org/wiki/Console),
|
||||
with the exceptions of *profile* and *count*.
|
||||
|
||||
* "fs" provides a file system API for the manipulation of paths, directories,
|
||||
- [fs](FileSystem.md) provides a file system API for the manipulation of paths, directories,
|
||||
files, links, and the construction of file streams. ArangoDB implements
|
||||
most [Filesystem/A](http://wiki.commonjs.org/wiki/Filesystem/A) functions.
|
||||
|
||||
* Modules are implemented according to
|
||||
- Modules are implemented according to
|
||||
[Modules/1.1.1](http://wiki.commonjs.org/wiki/Modules)
|
||||
|
||||
* Packages are implemented according to
|
||||
[Packages/1.0](http://wiki.commonjs.org/wiki/Packages)
|
||||
|
||||
!SUBSECTION ArangoDB Specific Modules
|
||||
|
||||
A lot of the modules, however, are ArangoDB specific. These modules
|
||||
|
@ -39,75 +36,76 @@ are described in the following chapters.
|
|||
|
||||
ArangoDB also supports some [node](http://www.nodejs.org) modules.
|
||||
|
||||
* ["assert"](http://nodejs.org/api/assert.html) implements
|
||||
- [assert](http://nodejs.org/api/assert.html) implements
|
||||
assertion and testing functions.
|
||||
|
||||
* ["buffer"](http://nodejs.org/api/buffer.html) implements
|
||||
- [buffer](http://nodejs.org/api/buffer.html) implements
|
||||
a binary data type for JavaScript.
|
||||
|
||||
* ["path"](http://nodejs.org/api/path.html) implements
|
||||
- [path](http://nodejs.org/api/path.html) implements
|
||||
functions dealing with filenames and paths.
|
||||
|
||||
* ["punycode"](http://nodejs.org/api/punycode.html) implements
|
||||
- [punycode](http://nodejs.org/api/punycode.html) implements
|
||||
conversion functions for
|
||||
[punycode](http://en.wikipedia.org/wiki/Punycode) encoding.
|
||||
|
||||
* ["querystring"](http://nodejs.org/api/querystring.html)
|
||||
- [querystring](http://nodejs.org/api/querystring.html)
|
||||
provides utilities for dealing with query strings.
|
||||
|
||||
* ["stream"](http://nodejs.org/api/stream.html)
|
||||
- [stream](http://nodejs.org/api/stream.html)
|
||||
provides a streaming interface.
|
||||
|
||||
* ["url"](http://nodejs.org/api/url.html)
|
||||
- [url](http://nodejs.org/api/url.html)
|
||||
has utilities for URL resolution and parsing.
|
||||
|
||||
!SUBSECTION Bundled NPM Modules
|
||||
|
||||
The following [NPM modules](https://npmjs.org) are preinstalled.
|
||||
|
||||
* ["aqb"](https://github.com/arangodb/aqbjs)
|
||||
- [aqb](https://github.com/arangodb/aqbjs)
|
||||
is the ArangoDB Query Builder and can be used to construct
|
||||
AQL queries with a chaining JavaScript API.
|
||||
|
||||
* ["error-stack-parser"](http://www.stacktracejs.com)
|
||||
- [error-stack-parser](http://www.stacktracejs.com)
|
||||
|
||||
* ["expect.js"](https://github.com/Automattic/expect.js)
|
||||
- [expect.js](https://github.com/Automattic/expect.js)
|
||||
|
||||
* ["extendible"](https://github.com/3rd-Eden/extendible)
|
||||
- [extendible](https://github.com/3rd-Eden/extendible)
|
||||
|
||||
* ["foxx_generator"](https://github.com/moonglum/foxx_generator)
|
||||
- [foxx_generator](https://github.com/moonglum/foxx_generator)
|
||||
|
||||
* ["http-errors"](https://github.com/jshttp/http-errors)
|
||||
- [http-errors](https://github.com/jshttp/http-errors)
|
||||
|
||||
* ["i"](https://github.com/pksunkara/inflect)
|
||||
- [i (inflect)](https://github.com/pksunkara/inflect)
|
||||
|
||||
* ["joi"](https://github.com/hapijs/joi)
|
||||
- [joi](https://github.com/hapijs/joi)
|
||||
is a validation library that is used throughout the Foxx framework.
|
||||
|
||||
* ["js-yaml"](https://github.com/nodeca/js-yaml)
|
||||
- [js-yaml](https://github.com/nodeca/js-yaml)
|
||||
|
||||
* ["minimatch"](https://github.com/isaacs/minimatch)
|
||||
- [minimatch](https://github.com/isaacs/minimatch)
|
||||
|
||||
* ["qs"](https://github.com/hapijs/qs)
|
||||
- [qs](https://github.com/hapijs/qs)
|
||||
provides utilities for dealing with query strings using a different format
|
||||
than the **querystring** module.
|
||||
|
||||
* ["ramda"](http://ramdajs.com)
|
||||
- [ramda](http://ramdajs.com)
|
||||
|
||||
* ["semver"](https://github.com/npm/node-semver)
|
||||
- [semver](https://github.com/npm/node-semver)
|
||||
|
||||
* ["sinon"](http://sinonjs.org)
|
||||
- [sinon](http://sinonjs.org)
|
||||
|
||||
* ["underscore"](http://underscorejs.org)
|
||||
- [underscore](http://underscorejs.org)
|
||||
|
||||
!SUBSECTION Installing NPM Modules
|
||||
|
||||
You can install additional modules using `npm install`. Note the following limitations in ArangoDB's compatibility with node or browser modules:
|
||||
You can install additional modules using `npm install`. Note the following
|
||||
limitations in ArangoDB's compatibility with node or browser modules:
|
||||
|
||||
* modules must be implemented in pure JavaScript (no native extensions)
|
||||
* modules must be strictly synchronous (e.g. no setTimeout or promises)
|
||||
* only a subset of node's built-in modules are supported (see above)
|
||||
* the same limitations apply to each module's dependencies
|
||||
- modules must be implemented in pure JavaScript (no native extensions)
|
||||
- modules must be strictly synchronous (e.g. no setTimeout or promises)
|
||||
- only a subset of node's built-in modules are supported (see above)
|
||||
- the same limitations apply to each module's dependencies
|
||||
|
||||
!SUBSECTION require
|
||||
|
||||
|
@ -123,10 +121,10 @@ Assume that your module file is *test1.js* and contains
|
|||
|
||||
```js
|
||||
exports.func1 = function() {
|
||||
print("1");
|
||||
print("2");
|
||||
};
|
||||
|
||||
exports.const1 = 1;
|
||||
exports.const1 = 4;
|
||||
```
|
||||
|
||||
Then you can use *require* to load the file and access the exports.
|
||||
|
@ -136,10 +134,10 @@ unix> ./arangosh
|
|||
arangosh> var test1 = require("test1");
|
||||
|
||||
arangosh> test1.const1;
|
||||
1
|
||||
4
|
||||
|
||||
arangosh> test1.func1();
|
||||
1
|
||||
2
|
||||
```
|
||||
|
||||
*require* follows the specification
|
||||
|
@ -193,14 +191,14 @@ require("com/example/extension")
|
|||
then ArangoDB will try to locate the corresponding JavaScript as file as
|
||||
follows
|
||||
|
||||
* There is a cache for the results of previous *require* calls. First of
|
||||
- There is a cache for the results of previous *require* calls. First of
|
||||
all ArangoDB checks if *com/example/extension* is already in the modules
|
||||
cache. If it is, the export object for this module is returned. No further
|
||||
JavaScript is executed.
|
||||
|
||||
* ArangoDB will then check, if there is a file called **com/example/extension.js** in the system search path. If such a file exists, it is executed in a new module context and the value of *exports* object is returned. This value is also stored in the module cache.
|
||||
- ArangoDB will then check, if there is a file called **com/example/extension.js** in the system search path. If such a file exists, it is executed in a new module context and the value of *exports* object is returned. This value is also stored in the module cache.
|
||||
|
||||
* If no file can be found, ArangoDB will check if the collection *_modules*
|
||||
- If no file can be found, ArangoDB will check if the collection *_modules*
|
||||
contains a document of the form
|
||||
|
||||
```js
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
!CHAPTER The "collection" Object
|
||||
|
||||
The following methods exists on the collection object (returned by *db.name*):
|
||||
|
||||
*Collection*
|
||||
|
||||
* [collection.checksum()](../../DataModeling/Collections/CollectionMethods.md#checksum)
|
||||
* [collection.count()](../../DataModeling/Collections/CollectionMethods.md#count)
|
||||
* [collection.drop()](../../DataModeling/Collections/CollectionMethods.md#drop)
|
||||
* [collection.figures()](../../DataModeling/Collections/CollectionMethods.md#figures)
|
||||
* [collection.load()](../../DataModeling/Collections/CollectionMethods.md#load)
|
||||
* [collection.properties()](../../DataModeling/Collections/CollectionMethods.md#properties)
|
||||
* [collection.reserve()](../../DataModeling/Collections/CollectionMethods.md#reserve)
|
||||
* [collection.revision()](../../DataModeling/Collections/CollectionMethods.md#revision)
|
||||
* [collection.rotate()](../../DataModeling/Collections/CollectionMethods.md#rotate)
|
||||
* [collection.toArray()](../../DataModeling/Collections/CollectionMethods.md#toarray)
|
||||
* [collection.truncate()](../../DataModeling/Collections/CollectionMethods.md#truncate)
|
||||
* [collection.type()](../../DataModeling/Collections/CollectionMethods.md#type)
|
||||
* [collection.unload()](../../DataModeling/Collections/CollectionMethods.md#unload)
|
||||
|
||||
*Indexes*
|
||||
|
||||
* [collection.dropIndex(index)](../../Indexing/WorkingWithIndexes.md#dropping-an-index)
|
||||
* [collection.ensureIndex(description)](../../Indexing/WorkingWithIndexes.md#creating-an-index)
|
||||
* [collection.getIndexes(name)](../../Indexing/WorkingWithIndexes.md#listing-all-indexes-of-a-collection)
|
||||
* [collection.index(index)](../../Indexing/WorkingWithIndexes.md#index-identifiers-and-handles)
|
||||
|
||||
*Document*
|
||||
|
||||
* [collection.all()](../../DataModeling/Collections/CollectionMethods.md#all)
|
||||
* [collection.any()](../../DataModeling/Collections/CollectionMethods.md#any)
|
||||
* [collection.closedRange(attribute, left, right)](../../DataModeling/Collections/CollectionMethods.md#closed-range)
|
||||
* [collection.document(object)](../../DataModeling/Collections/CollectionMethods.md#document)
|
||||
* [collection.documents(keys)](../../DataModeling/Collections/CollectionMethods.md#lookup-by-keys)
|
||||
* [collection.edges(vertex-id)](../../DataModeling/Collections/CollectionMethods.md#misc)
|
||||
* [collection.exists(object)](../../DataModeling/Collections/CollectionMethods.md#exists)
|
||||
* [collection.firstExample(example)](../../DataModeling/Collections/CollectionMethods.md#first-example)
|
||||
* [collection.inEdges(vertex-id)](../../DataModeling/Collections/CollectionMethods.md#misc)
|
||||
* [collection.insert(data)](../../DataModeling/Collections/CollectionMethods.md#insert)
|
||||
* [collection.iterate(iterator,options)](../../DataModeling/Collections/CollectionMethods.md#misc)
|
||||
* [collection.outEdges(vertex-id)](../../DataModeling/Collections/CollectionMethods.md#misc)
|
||||
* [collection.queryByExample(example)](../../DataModeling/Collections/CollectionMethods.md#query-by-example)
|
||||
* [collection.range(attribute, left, right)](../../DataModeling/Collections/CollectionMethods.md#range)
|
||||
* [collection.remove(selector)](../../DataModeling/Collections/CollectionMethods.md#remove)
|
||||
* [collection.removeByKeys(keys)](../../DataModeling/Collections/CollectionMethods.md#remove-by-keys)
|
||||
* [collection.rename()](../../DataModeling/Collections/CollectionMethods.md#rename)
|
||||
* [collection.replace(selector, data)](../../DataModeling/Collections/CollectionMethods.md#replace)
|
||||
* [collection.replaceByExample(example, data)](../../DataModeling/Collections/CollectionMethods.md#replace-by-example)
|
||||
* [collection.update(selector, data)](../../DataModeling/Collections/CollectionMethods.md#update)
|
||||
* [collection.updateByExample(example, data)](../../DataModeling/Collections/CollectionMethods.md#update-by-example)
|
|
@ -0,0 +1,46 @@
|
|||
!CHAPTER The "db" Object
|
||||
|
||||
*db.name* returns a [collection object](CollectionObject.md) for the collection *name*.
|
||||
|
||||
The following methods exists on the *_db* object:
|
||||
|
||||
*Database*
|
||||
|
||||
* [db._createDatabase(name, options, users)](../../DataModeling/Databases/WorkingWith.md#create-database)
|
||||
* [db._databases()](../../DataModeling/Databases/WorkingWith.md#list-databases)
|
||||
* [db._dropDatabase(name, options, users)](../../DataModeling/Databases/WorkingWith.md#drop-database)
|
||||
* [db._useDatabase(name)](../../DataModeling/Databases/WorkingWith.md#use-database)
|
||||
|
||||
*Indexes*
|
||||
|
||||
* [db._index(index)](../../Indexing/WorkingWithIndexes.md#fetching-an-index-by-handle)
|
||||
* [db._dropIndex(index)](../../Indexing/WorkingWithIndexes.md#dropping-an-index)
|
||||
|
||||
*Properties*
|
||||
|
||||
* [db._id()](../../DataModeling/Databases/WorkingWith.md#id)
|
||||
* [db._isSystem()](../../DataModeling/Databases/WorkingWith.md#issystem)
|
||||
* [db._name()](../../DataModeling/Databases/WorkingWith.md#name)
|
||||
* [db._path()](../../DataModeling/Databases/WorkingWith.md#path)
|
||||
* [db._version()](../../DataModeling/Documents/DocumentMethods.md#get-the-version-of-arangodb)
|
||||
|
||||
*Collection*
|
||||
|
||||
* [db._collection(name)](../../DataModeling/Collections/DatabaseMethods.md#collection)
|
||||
* [db._create(name)](../../DataModeling/Collections/DatabaseMethods.md#create)
|
||||
* [db._drop(name)](../../DataModeling/Collections/DatabaseMethods.md#drop)
|
||||
* [db._truncate(name)](../../DataModeling/Collections/DatabaseMethods.md#truncate)
|
||||
|
||||
*AQL*
|
||||
|
||||
* [db._createStatement(query)](../../../AQL/Invocation/WithArangosh.html#with-createstatement-arangostatement)
|
||||
* [db._query(query)](../../../AQL/Invocation/WithArangosh.html#with-dbquery)
|
||||
* [db._explain(query)](../../ReleaseNotes/NewFeatures28.md#miscellaneous-improvements)
|
||||
|
||||
*Document*
|
||||
|
||||
* [db._document(object)](../../DataModeling/Documents/DatabaseMethods.md#document)
|
||||
* [db._exists(object)](../../DataModeling/Documents/DatabaseMethods.md#exists)
|
||||
* [db._remove(selector)](../../DataModeling/Documents/DatabaseMethods.md#remove)
|
||||
* [db._replace(selector,data)](../../DataModeling/Documents/DatabaseMethods.md#replace)
|
||||
* [db._update(selector,data)](../../DataModeling/Documents/DatabaseMethods.md#update)
|
|
@ -0,0 +1 @@
|
|||
!CHAPTER References
|
|
@ -1224,9 +1224,3 @@ as second argument.
|
|||
~ db._drop("example");
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
@endDocuBlock accessViaGeoIndex
|
||||
|
||||
`edge.setProperty(name, value)`
|
||||
|
||||
Changes or sets the property *name* an *edges* to *value*.
|
||||
|
||||
|
||||
|
|
|
@ -209,7 +209,7 @@ const DOC_NOT_FOUND = errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code;
|
|||
router.post('/entries', function (req, res) {
|
||||
const data = req.body;
|
||||
const meta = foxxColl.save(req.body);
|
||||
res.json(Object.assign(data, meta));
|
||||
res.send(Object.assign(data, meta));
|
||||
})
|
||||
.body(joi.object().required(), 'Entry to store in the collection.')
|
||||
.response(joi.object().required(), 'Entry stored in the collection.')
|
||||
|
@ -219,7 +219,7 @@ router.post('/entries', function (req, res) {
|
|||
router.get('/entries/:key', function (req, res) {
|
||||
try {
|
||||
const data = foxxColl.document(req.pathParams.key);
|
||||
res.json(data)
|
||||
res.send(data)
|
||||
} catch (e) {
|
||||
if (!e.isArangoError || e.errorNum !== DOC_NOT_FOUND) {
|
||||
throw e;
|
||||
|
@ -258,7 +258,7 @@ router.get('/entries', function (req, res) {
|
|||
FOR entry IN ${foxxColl}
|
||||
RETURN entry._key
|
||||
`);
|
||||
res.json(keys)
|
||||
res.send(keys);
|
||||
})
|
||||
.response(joi.array().items(
|
||||
joi.string().required()
|
||||
|
|
|
@ -1053,6 +1053,14 @@ and all client tools uses these APIs.
|
|||
In order to connect to earlier versions of ArangoDB with the client tools, an older
|
||||
version of the client tools needs to be kept installed.
|
||||
|
||||
!SUBSECTION Command-line options added
|
||||
|
||||
All client tools in 3.0 provide an option `--server.max-packet-size` for controlling
|
||||
the maximum size of HTTP packets to be handled by the client tools. The default value
|
||||
is 128 MB, as in previous versions of ArangoDB. In contrast to previous versions in
|
||||
which the value was hard-coded, the option is now configurable. It can be increased to
|
||||
make the client tools handle very large HTTP result messages sent by the server.
|
||||
|
||||
!SUBSECTION Command-line options changed
|
||||
|
||||
For all client tools, the option `--server.disable-authentication` was renamed to
|
||||
|
|
|
@ -169,13 +169,16 @@
|
|||
* [Incompatible changes in 2.3](ReleaseNotes/UpgradingChanges23.md)
|
||||
#
|
||||
* [Appendix](Appendix/README.md)
|
||||
* [References](Appendix/References/README.md)
|
||||
* [db](Appendix/References/DBObject.md)
|
||||
* [collection](Appendix/References/CollectionObject.md)
|
||||
* [JavaScript Modules](Appendix/JavaScriptModules/README.md)
|
||||
* ["console"](Appendix/JavaScriptModules/Console.md)
|
||||
* ["fs"](Appendix/JavaScriptModules/FS.md)
|
||||
* ["process"](Appendix/JavaScriptModules/Process.md)
|
||||
* ["request"](Appendix/JavaScriptModules/Request.md)
|
||||
* ["actions"](Appendix/JavaScriptModules/Actions.md)
|
||||
* ["queries"](Appendix/JavaScriptModules/Queries.md)
|
||||
* [console](Appendix/JavaScriptModules/Console.md)
|
||||
* [fs](Appendix/JavaScriptModules/FileSystem.md)
|
||||
* [process](Appendix/JavaScriptModules/Process.md)
|
||||
* [request](Appendix/JavaScriptModules/Request.md)
|
||||
* [actions](Appendix/JavaScriptModules/Actions.md)
|
||||
* [queries](Appendix/JavaScriptModules/Queries.md)
|
||||
* [Write-ahead log](Appendix/JavaScriptModules/WAL.md)
|
||||
* [Task Management](Appendix/JavaScriptModules/Tasks.md)
|
||||
* [Deprecated](Appendix/Deprecated/README.md)
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
@startDocuBlock API_EDGE_READ_ALL
|
||||
@brief reads all edges from collection
|
||||
|
||||
@RESTHEADER{GET /_api/document, Read all edges from collection}
|
||||
|
||||
@RESTQUERYPARAMETERS
|
||||
|
||||
@RESTQUERYPARAM{collection,string,required}
|
||||
The name of the collection.
|
||||
|
||||
@RESTDESCRIPTION
|
||||
Returns an array of all URIs for all edges from the collection identified
|
||||
by *collection*.
|
||||
|
||||
@RESTRETURNCODES
|
||||
|
||||
@RESTRETURNCODE{200}
|
||||
All went good.
|
||||
|
||||
@RESTRETURNCODE{404}
|
||||
The collection does not exist.
|
||||
@endDocuBlock
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
shell> curl -X POST --data-binary @- --dump - http://localhost:8529/_api/user <<EOF
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin@example"</span>,
|
||||
<span class="hljs-string">"password"</span> : <span class="hljs-string">"secure"</span>
|
||||
}
|
||||
EOF
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">201</span> Created
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin@example"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">201</span>
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
shell> curl -X DELETE --data-binary @- --dump - http://localhost:8529/_api/user/admin@ourapp <<EOF
|
||||
{
|
||||
}
|
||||
EOF
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">202</span> Accepted
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">202</span>
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
shell> curl --dump - http://localhost:8529/_api/user
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">200</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"result"</span> : [
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"root"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin@example"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"tester"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>
|
||||
}
|
||||
],
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
shell> curl --dump - http://localhost:8529/_api/user/admin@theirapp
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">200</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin@theirapp"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
shell> curl -X PUT --data-binary @- --dump - http://localhost:8529/_api/user/admin@secapp/database/_system <<EOF
|
||||
{
|
||||
<span class="hljs-string">"grant"</span> : <span class="hljs-string">"rw"</span>
|
||||
}
|
||||
EOF
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">200</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"_system"</span> : <span class="hljs-string">"rw"</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
shell> curl -X PUT --data-binary @- --dump - http://localhost:8529/_api/user/admin@myapp <<EOF
|
||||
{
|
||||
<span class="hljs-string">"password"</span> : <span class="hljs-string">"secure"</span>
|
||||
}
|
||||
EOF
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">200</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin@myapp"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
shell> curl -X PATCH --data-binary @- --dump - http://localhost:8529/_api/user/admin@yourapp <<EOF
|
||||
{
|
||||
<span class="hljs-string">"password"</span> : <span class="hljs-string">"secure"</span>
|
||||
}
|
||||
EOF
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> <span class="hljs-number">200</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
|
||||
{
|
||||
<span class="hljs-string">"user"</span> : <span class="hljs-string">"admin@yourapp"</span>,
|
||||
<span class="hljs-string">"active"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"extra"</span> : {
|
||||
},
|
||||
<span class="hljs-string">"changePassword"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
arangosh> db.names.ensureIndex({ type: <span class="hljs-string">"persistent"</span>, fields: [ <span class="hljs-string">"first"</span> ] });
|
||||
{
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"names/15880"</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"persistent"</span>,
|
||||
<span class="hljs-string">"fields"</span> : [
|
||||
<span class="hljs-string">"first"</span>
|
||||
],
|
||||
<span class="hljs-string">"unique"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"sparse"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"isNewlyCreated"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">201</span>
|
||||
}
|
||||
arangosh> db.names.save({ <span class="hljs-string">"first"</span> : <span class="hljs-string">"Tim"</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"names/15883"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15883"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15883"</span>
|
||||
}
|
||||
arangosh> db.names.save({ <span class="hljs-string">"first"</span> : <span class="hljs-string">"Tom"</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"names/15887"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15887"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15887"</span>
|
||||
}
|
||||
arangosh> db.names.save({ <span class="hljs-string">"first"</span> : <span class="hljs-string">"John"</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"names/15890"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15890"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15890"</span>
|
||||
}
|
||||
arangosh> db.names.save({ <span class="hljs-string">"first"</span> : <span class="hljs-string">"Tim"</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"names/15893"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15893"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15893"</span>
|
||||
}
|
||||
arangosh> db.names.save({ <span class="hljs-string">"first"</span> : <span class="hljs-string">"Tom"</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"names/15896"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15896"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15896"</span>
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
arangosh> db.ids.ensureIndex({ type: <span class="hljs-string">"persistent"</span>, fields: [ <span class="hljs-string">"name.first"</span>, <span class="hljs-string">"name.last"</span> ], unique: <span class="hljs-literal">true</span> });
|
||||
{
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"ids/15941"</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"persistent"</span>,
|
||||
<span class="hljs-string">"fields"</span> : [
|
||||
<span class="hljs-string">"name.first"</span>,
|
||||
<span class="hljs-string">"name.last"</span>
|
||||
],
|
||||
<span class="hljs-string">"unique"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"sparse"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"isNewlyCreated"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">201</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"hans"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"hansen"</span> }});
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/15944"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15944"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15944"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"jens"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"jensen"</span> }});
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/15948"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15948"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15948"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"hans"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"jensen"</span> }});
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/15951"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15951"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15951"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"hans"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"hansen"</span> }});
|
||||
[ArangoError <span class="hljs-number">1210</span>: cannot create <span class="hljs-built_in">document</span>, unique constraint violated]
|
|
@ -0,0 +1,32 @@
|
|||
arangosh> db.ids.ensureIndex({ type: <span class="hljs-string">"persistent"</span>, fields: [ <span class="hljs-string">"myId"</span> ], unique: <span class="hljs-literal">true</span> });
|
||||
{
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"ids/15959"</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"persistent"</span>,
|
||||
<span class="hljs-string">"fields"</span> : [
|
||||
<span class="hljs-string">"myId"</span>
|
||||
],
|
||||
<span class="hljs-string">"unique"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"sparse"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"isNewlyCreated"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">201</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"myId"</span>: <span class="hljs-number">123</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/15962"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15962"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15962"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"myId"</span>: <span class="hljs-number">456</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/15966"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15966"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15966"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"myId"</span>: <span class="hljs-number">789</span> });
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/15969"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"15969"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"15969"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"myId"</span>: <span class="hljs-number">123</span> });
|
||||
[ArangoError <span class="hljs-number">1210</span>: cannot create <span class="hljs-built_in">document</span>, unique constraint violated]
|
|
@ -0,0 +1,33 @@
|
|||
arangosh> db.ids.ensureIndex({ type: <span class="hljs-string">"skiplist"</span>, fields: [ <span class="hljs-string">"name.first"</span>, <span class="hljs-string">"name.last"</span> ], unique: <span class="hljs-literal">true</span> });
|
||||
{
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"ids/16013"</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"skiplist"</span>,
|
||||
<span class="hljs-string">"fields"</span> : [
|
||||
<span class="hljs-string">"name.first"</span>,
|
||||
<span class="hljs-string">"name.last"</span>
|
||||
],
|
||||
<span class="hljs-string">"unique"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"sparse"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"isNewlyCreated"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">201</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"hans"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"hansen"</span> }});
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/16016"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"16016"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"16016"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"jens"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"jensen"</span> }});
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/16020"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"16020"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"16020"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"hans"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"jensen"</span> }});
|
||||
{
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"ids/16023"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"16023"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"16023"</span>
|
||||
}
|
||||
arangosh> db.ids.save({ <span class="hljs-string">"name"</span> : { <span class="hljs-string">"first"</span> : <span class="hljs-string">"hans"</span>, <span class="hljs-string">"last"</span>: <span class="hljs-string">"hansen"</span> }});
|
||||
[ArangoError <span class="hljs-number">1210</span>: cannot create <span class="hljs-built_in">document</span>, unique constraint violated]
|
|
@ -21,6 +21,7 @@ def brTrim(text):
|
|||
|
||||
swagger = None
|
||||
fileFilter = None
|
||||
blockFilter = None
|
||||
dokuBlocks = [{},{}]
|
||||
thisVerb = {}
|
||||
route = ''
|
||||
|
@ -102,6 +103,15 @@ def getRestBodyParam():
|
|||
rc += addText
|
||||
return rc
|
||||
|
||||
def getRestDescription():
|
||||
#print >>sys.stderr, "RESTDESCRIPTION"
|
||||
if thisVerb['description']:
|
||||
#print >> sys.stderr, thisVerb['description']
|
||||
return thisVerb['description']
|
||||
else:
|
||||
#print >> sys.stderr, "ELSE"
|
||||
return ""
|
||||
|
||||
def getRestReplyBodyParam(param):
|
||||
rc = "\n**Reply Body**\n"
|
||||
|
||||
|
@ -115,31 +125,33 @@ def getRestReplyBodyParam(param):
|
|||
|
||||
|
||||
SIMPL_REPL_DICT = {
|
||||
"@RESTDESCRIPTION" : "",
|
||||
"@RESTURLPARAMETERS" : "\n**Path Parameters**\n",
|
||||
"@RESTQUERYPARAMETERS" : "\n**Query Parameters**\n",
|
||||
"@RESTHEADERPARAMETERS" : "\n**Header Parameters**\n",
|
||||
"@RESTRETURNCODES" : "\n**Return Codes**\n",
|
||||
"@PARAMS" : "\n**Parameters**\n",
|
||||
"@RESTPARAMS" : "",
|
||||
"@RESTURLPARAMS" : "\n**Path Parameters**\n",
|
||||
"@RESTQUERYPARAMS" : "\n**Query Parameters**\n",
|
||||
"@RESTBODYPARAM" : getRestBodyParam,
|
||||
"@RESTREPLYBODY" : getRestReplyBodyParam,
|
||||
"@RESTQUERYPARAM" : "@RESTPARAM",
|
||||
"@RESTURLPARAM" : "@RESTPARAM",
|
||||
"@PARAM" : "@RESTPARAM",
|
||||
"@RESTHEADERPARAM" : "@RESTPARAM",
|
||||
"@EXAMPLES" : "\n**Examples**\n",
|
||||
"@RESTPARAMETERS" : ""
|
||||
"\\" : "\\\\",
|
||||
"@RESTDESCRIPTION" : getRestDescription,
|
||||
"@RESTURLPARAMETERS" : "\n**Path Parameters**\n",
|
||||
"@RESTQUERYPARAMETERS" : "\n**Query Parameters**\n",
|
||||
"@RESTHEADERPARAMETERS" : "\n**Header Parameters**\n",
|
||||
"@RESTRETURNCODES" : "\n**Return Codes**\n",
|
||||
"@PARAMS" : "\n**Parameters**\n",
|
||||
"@RESTPARAMS" : "",
|
||||
"@RESTURLPARAMS" : "\n**Path Parameters**\n",
|
||||
"@RESTQUERYPARAMS" : "\n**Query Parameters**\n",
|
||||
"@RESTBODYPARAM" : "", #getRestBodyParam,
|
||||
"@RESTREPLYBODY" : getRestReplyBodyParam,
|
||||
"@RESTQUERYPARAM" : "@RESTPARAM",
|
||||
"@RESTURLPARAM" : "@RESTPARAM",
|
||||
"@PARAM" : "@RESTPARAM",
|
||||
"@RESTHEADERPARAM" : "@RESTPARAM",
|
||||
"@EXAMPLES" : "\n**Examples**\n",
|
||||
"@RESTPARAMETERS" : ""
|
||||
}
|
||||
SIMPLE_RX = re.compile(
|
||||
r'''
|
||||
\\| # the backslash...
|
||||
@RESTDESCRIPTION| # -> <empty>
|
||||
@RESTURLPARAMETERS| # -> \n**Path Parameters**\n
|
||||
@RESTQUERYPARAMETERS| # -> \n**Query Parameters**\n
|
||||
@RESTHEADERPARAMETERS| # -> \n**Header Parameters**\n
|
||||
@RESTBODYPARAM| # -> call post body param
|
||||
@RESTBODYPARAM| # empty now, comes with the post body -> call post body param
|
||||
@RESTRETURNCODES| # -> \n**Return Codes**\n
|
||||
@PARAMS| # -> \n**Parameters**\n
|
||||
@RESTPARAMS| # -> <empty>
|
||||
|
@ -157,7 +169,7 @@ r'''
|
|||
|
||||
def SimpleRepl(match):
|
||||
m = match.group(0)
|
||||
#print 'xxxxx ' + m
|
||||
# print 'xxxxx [%s]' % m
|
||||
try:
|
||||
n = SIMPL_REPL_DICT[m]
|
||||
if n == None:
|
||||
|
@ -210,7 +222,7 @@ RX = [
|
|||
# Error codes replace
|
||||
(re.compile(r"(####)#+"), r""),
|
||||
# (re.compile(r"- (\w+):\s*@LIT{(.+)}"), r"\n*\g<1>* - **\g<2>**:"),
|
||||
(re.compile(r"(.+),(\d+),\"(.+)\",\"(.+)\""), r"\n*\g<2>* - **\g<3>**: \g<4>"),
|
||||
(re.compile(r"(.+),(\d+),\"(.+)\",\"(.+)\""), r"\n- *\g<2>* - **\g<1>** - **\g<3>**: \n \g<4>"),
|
||||
|
||||
(re.compile(r"TODOSWAGGER.*"),r"")
|
||||
]
|
||||
|
@ -231,7 +243,7 @@ RX2 = [
|
|||
|
||||
match_RESTHEADER = re.compile(r"@RESTHEADER\{(.*)\}")
|
||||
match_RESTRETURNCODE = re.compile(r"@RESTRETURNCODE\{(.*)\}")
|
||||
have_RESTBODYPARAM = re.compile(r"@RESTBODYPARAM")
|
||||
have_RESTBODYPARAM = re.compile(r"@RESTBODYPARAM|@RESTDESCRIPTION")
|
||||
have_RESTREPLYBODY = re.compile(r"@RESTREPLYBODY")
|
||||
have_RESTSTRUCT = re.compile(r"@RESTSTRUCT")
|
||||
remove_MULTICR = re.compile(r'\n\n\n*')
|
||||
|
@ -275,6 +287,7 @@ def replaceCode(lines, blockName):
|
|||
foundRestBodyParam = False
|
||||
foundRestReplyBodyParam = False
|
||||
lineR = lines.split('\n')
|
||||
#print lineR
|
||||
l = len(lineR)
|
||||
r = 0
|
||||
while (r < l):
|
||||
|
@ -283,10 +296,13 @@ def replaceCode(lines, blockName):
|
|||
if foundRestBodyParam:
|
||||
lineR[r] = ''
|
||||
else:
|
||||
lineR[r] = '@RESTBODYPARAM'
|
||||
lineR[r] = '@RESTDESCRIPTION'
|
||||
foundRestBodyParam = True
|
||||
r+=1
|
||||
while (len(lineR[r]) > 1):
|
||||
while ((len(lineR[r]) > 0) and
|
||||
((lineR[r][0] != '@') or
|
||||
have_RESTBODYPARAM.search(lineR[r]))):
|
||||
# print "xxx - %d %s" %(len(lineR[r]), lineR[r])
|
||||
lineR[r] = ''
|
||||
r+=1
|
||||
|
||||
|
@ -316,7 +332,8 @@ def replaceCode(lines, blockName):
|
|||
r+=1
|
||||
r+=1
|
||||
lines = "\n".join(lineR)
|
||||
|
||||
#print "x" * 70
|
||||
#print lines
|
||||
lines = SIMPLE_RX.sub(SimpleRepl, lines)
|
||||
|
||||
for (oneRX, repl) in RX2:
|
||||
|
@ -389,9 +406,10 @@ def findStartCode(fd,full_path):
|
|||
#print textFile
|
||||
|
||||
match = re.findall(r'@startDocuBlock\s*(\w+)', textFile)
|
||||
if match:
|
||||
if match:
|
||||
for find in match:
|
||||
#print "8"*80
|
||||
#print find
|
||||
textFile = replaceText(textFile, full_path, find)
|
||||
#print textFile
|
||||
|
||||
|
@ -407,10 +425,11 @@ def findStartCode(fd,full_path):
|
|||
outFD.truncate()
|
||||
outFD.write(textFile)
|
||||
outFD.close()
|
||||
|
||||
#JSF_put_api_replication_synchronize
|
||||
|
||||
def replaceText(text, pathOfFile, searchText):
|
||||
''' reads the mdpp and generates the md '''
|
||||
#print '7'*80
|
||||
global dokuBlocks
|
||||
if not searchText in dokuBlocks[0]:
|
||||
print >> sys.stderr, "Failed to locate the docublock '%s' for replacing it into the file '%s'\n have:" % (searchText, pathOfFile)
|
||||
|
@ -418,7 +437,9 @@ def replaceText(text, pathOfFile, searchText):
|
|||
print >> sys.stderr, '*' * 80
|
||||
print >> sys.stderr, text
|
||||
exit(1)
|
||||
#print '7'*80
|
||||
#print dokuBlocks[0][searchText]
|
||||
#print '7'*80
|
||||
rc= re.sub("@startDocuBlock\s+"+ searchText + "(?:\s+|$)", dokuBlocks[0][searchText], text)
|
||||
return rc
|
||||
|
||||
|
@ -495,10 +516,24 @@ def loadDokuBlocks():
|
|||
|
||||
#if state == STATE_SEARCH_START:
|
||||
# print dokuBlocks[thisBlockType].keys()
|
||||
|
||||
|
||||
if blockFilter != None:
|
||||
remainBlocks= {}
|
||||
print "filtering blocks"
|
||||
for oneBlock in dokuBlocks[0]:
|
||||
if blockFilter.match(oneBlock) != None:
|
||||
print "found block %s" % oneBlock
|
||||
#print dokuBlocks[0][oneBlock]
|
||||
remainBlocks[oneBlock] = dokuBlocks[0][oneBlock]
|
||||
dokuBlocks[0] = remainBlocks
|
||||
|
||||
for oneBlock in dokuBlocks[0]:
|
||||
try:
|
||||
#print "processing %s" % oneBlock
|
||||
dokuBlocks[0][oneBlock] = replaceCode(dokuBlocks[0][oneBlock], oneBlock)
|
||||
#print "6"*80
|
||||
#print dokuBlocks[0][oneBlock]
|
||||
#print "6"*80
|
||||
except:
|
||||
print >>sys.stderr, "while parsing :\n" + oneBlock
|
||||
raise
|
||||
|
@ -521,6 +556,9 @@ if __name__ == '__main__':
|
|||
if len(sys.argv) > 4 and sys.argv[4].strip() != '':
|
||||
print "filtering " + sys.argv[4]
|
||||
fileFilter = re.compile(sys.argv[4])
|
||||
if len(sys.argv) > 5 and sys.argv[5].strip() != '':
|
||||
print "filtering Docublocks: " + sys.argv[5]
|
||||
blockFilter = re.compile(sys.argv[5])
|
||||
f=open(swaggerJson, 'rU')
|
||||
swagger= json.load(f)
|
||||
f.close()
|
||||
|
|
|
@ -513,14 +513,9 @@ bool Node::applieOp(VPackSlice const& slice) {
|
|||
|
||||
// Apply slice to this node
|
||||
bool Node::applies(VPackSlice const& slice) {
|
||||
if (slice.isObject()) {
|
||||
// Object is an operation?
|
||||
if (slice.hasKey("op")) {
|
||||
if (applieOp(slice)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (slice.isObject()) {
|
||||
|
||||
// Object is special case json
|
||||
for (auto const& i : VPackObjectIterator(slice)) {
|
||||
std::string key = i.key.copyString();
|
||||
|
@ -534,11 +529,11 @@ bool Node::applies(VPackSlice const& slice) {
|
|||
_children[key]->applies(i.value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
} else {
|
||||
*this = slice;
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -195,7 +195,7 @@ std::vector<VPackSlice> State::slices(arangodb::consensus::index_t start,
|
|||
for (size_t i = start - _cur; i <= end - _cur; ++i) { // TODO:: Check bounds
|
||||
try {
|
||||
slices.push_back(VPackSlice(_log.at(i).entry->data()));
|
||||
} catch (std::exception const& e) {
|
||||
} catch (std::exception const&) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,31 +147,27 @@ Store::~Store() {
|
|||
std::vector<bool> Store::apply(query_t const& query) {
|
||||
std::vector<bool> applied;
|
||||
MUTEX_LOCKER(storeLocker, _storeLock);
|
||||
/*for (auto const& i : VPackArrayIterator(query->slice())) {
|
||||
LOG(WARN) << i[0].typeName();
|
||||
LOG(WARN) << i[0].keyAt(0).copyString();
|
||||
} */
|
||||
for (auto const& i : VPackArrayIterator(query->slice())) {
|
||||
switch (i.length()) {
|
||||
case 1:
|
||||
case 1:
|
||||
applied.push_back(applies(i[0]));
|
||||
break; // no precond
|
||||
case 2:
|
||||
if (check(i[1])) { // precondition
|
||||
applied.push_back(applies(i[0]));
|
||||
break; // no precond
|
||||
case 2:
|
||||
if (check(i[1])) { // precondition
|
||||
applied.push_back(applies(i[0]));
|
||||
} else { // precondition failed
|
||||
LOG_TOPIC(TRACE, Logger::AGENCY) << "Precondition failed!";
|
||||
applied.push_back(false);
|
||||
}
|
||||
break;
|
||||
default: // wrong
|
||||
LOG_TOPIC(ERR, Logger::AGENCY)
|
||||
<< "We can only handle log entry with or without precondition!";
|
||||
} else { // precondition failed
|
||||
LOG_TOPIC(TRACE, Logger::AGENCY) << "Precondition failed!";
|
||||
applied.push_back(false);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
default: // wrong
|
||||
LOG_TOPIC(ERR, Logger::AGENCY)
|
||||
<< "We can only handle log entry with or without precondition!";
|
||||
applied.push_back(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
_cv.signal();
|
||||
|
||||
return applied;
|
||||
|
@ -510,10 +506,38 @@ void Store::run() {
|
|||
|
||||
}
|
||||
|
||||
bool Store::applies(arangodb::velocypack::Slice const& transaction) {
|
||||
|
||||
// Apply a request to my key value tree
|
||||
bool Store::applies(arangodb::velocypack::Slice const& slice) {
|
||||
return _node.applies(slice);
|
||||
std::vector<std::string> keys;
|
||||
std::vector<std::string> abskeys;
|
||||
std::vector<size_t> idx;
|
||||
size_t counter = 0;
|
||||
|
||||
for (const auto& atom : VPackObjectIterator(transaction)) {
|
||||
std::string key(atom.key.copyString());
|
||||
keys.push_back(key);
|
||||
abskeys.push_back(((key[0]=='/') ? key : std::string("/")+key));
|
||||
idx.push_back(counter++);
|
||||
}
|
||||
|
||||
sort(idx.begin(), idx.end(),
|
||||
[&abskeys](size_t i1, size_t i2) {return abskeys[i1] < abskeys[i2];});
|
||||
|
||||
for (const auto& i : idx) {
|
||||
|
||||
std::string const& key = keys.at(i);
|
||||
Slice value = transaction.get(key);
|
||||
|
||||
if (value.isObject() && value.hasKey("op")) {
|
||||
_node(key).applieOp(value);
|
||||
} else {
|
||||
_node(key).applies(value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -165,7 +165,8 @@ std::vector<check_t> Supervision::checkDBServers() {
|
|||
del->openArray();
|
||||
del->openObject();
|
||||
for (auto const& srv : todelete) {
|
||||
del->add(_agencyPrefix + healthPrefix + srv, VPackValue(VPackValueType::Object));
|
||||
del->add(_agencyPrefix + healthPrefix + srv,
|
||||
VPackValue(VPackValueType::Object));
|
||||
del->add("op", VPackValue("delete"));
|
||||
del->close();
|
||||
}
|
||||
|
@ -273,7 +274,8 @@ std::vector<check_t> Supervision::checkCoordinators() {
|
|||
del->openArray();
|
||||
del->openObject();
|
||||
for (auto const& srv : todelete) {
|
||||
del->add(_agencyPrefix + healthPrefix + srv, VPackValue(VPackValueType::Object));
|
||||
del->add(_agencyPrefix + healthPrefix + srv,
|
||||
VPackValue(VPackValueType::Object));
|
||||
del->add("op", VPackValue("delete"));
|
||||
del->close();
|
||||
}
|
||||
|
@ -450,7 +452,8 @@ void Supervision::shrinkCluster () {
|
|||
|
||||
// Minimum 1 DB server must remain
|
||||
if (availServers.size() == 1) {
|
||||
LOG_TOPIC(DEBUG, Logger::AGENCY) << "Only one db server left for operation";
|
||||
LOG_TOPIC(DEBUG, Logger::AGENCY) <<
|
||||
"Only one db server left for operation";
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -465,8 +468,8 @@ void Supervision::shrinkCluster () {
|
|||
maxReplFact = replFact;
|
||||
}
|
||||
} catch (std::exception const& e) {
|
||||
LOG_TOPIC(DEBUG, Logger::AGENCY) <<
|
||||
"Cannot retrieve replication factor for collection " << collptr.first;
|
||||
LOG_TOPIC(DEBUG, Logger::AGENCY) << "Cannot retrieve replication " <<
|
||||
"factor for collection " << collptr.first;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -537,7 +540,7 @@ void Supervision::getUniqueIds() {
|
|||
while (!this->isStopping()) {
|
||||
try {
|
||||
latestId = std::stoul(
|
||||
_agent->readDB().get(_agencyPrefix + "/Sync/LatestID").slice().toJson());
|
||||
_agent->readDB().get(_agencyPrefix + "/Sync/LatestID").slice().toJson());
|
||||
} catch (...) {
|
||||
std::this_thread::sleep_for (std::chrono::seconds(1));
|
||||
continue;
|
||||
|
|
|
@ -382,7 +382,7 @@ AqlItemBlock* TraversalBlock::getSome(size_t, // atLeast,
|
|||
size_t const curRegs = cur->getNrRegs();
|
||||
|
||||
if (_pos == 0) {
|
||||
// Initial initialisation
|
||||
// Initial initialization
|
||||
initializePaths(cur);
|
||||
}
|
||||
|
||||
|
|
|
@ -57,6 +57,7 @@ void ClusterTraversalPath::pathToVelocyPack(Transaction*, VPackBuilder& result)
|
|||
}
|
||||
|
||||
void ClusterTraversalPath::lastVertexToVelocyPack(Transaction*, VPackBuilder& result) {
|
||||
TRI_ASSERT(!_path.vertices.empty());
|
||||
auto cached = _traverser->_vertices.find(_path.vertices.back());
|
||||
TRI_ASSERT(cached != _traverser->_vertices.end());
|
||||
result.add(VPackSlice(cached->second->data()));
|
||||
|
@ -77,15 +78,16 @@ bool ClusterTraverser::VertexGetter::getVertex(std::string const& edgeId,
|
|||
std::string const& vertexId,
|
||||
size_t depth,
|
||||
std::string& result) {
|
||||
|
||||
auto it = _traverser->_edges.find(edgeId);
|
||||
if (it != _traverser->_edges.end()) {
|
||||
VPackSlice slice(it->second->data());
|
||||
std::string from = slice.get(StaticStrings::FromString).copyString();
|
||||
if (from != vertexId) {
|
||||
result = from;
|
||||
result = std::move(from);
|
||||
} else {
|
||||
std::string to = slice.get(StaticStrings::ToString).copyString();
|
||||
result = to;
|
||||
result = std::move(to);
|
||||
}
|
||||
auto exp = _traverser->_expressions->find(depth);
|
||||
if (exp != _traverser->_expressions->end()) {
|
||||
|
@ -94,9 +96,11 @@ bool ClusterTraverser::VertexGetter::getVertex(std::string const& edgeId,
|
|||
// If the vertex ist not in list it means it has not passed any
|
||||
// filtering up to now
|
||||
++_traverser->_filteredPaths;
|
||||
result = "";
|
||||
return false;
|
||||
}
|
||||
if (!_traverser->vertexMatchesCondition(VPackSlice(v->second->data()), exp->second)) {
|
||||
result = "";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -114,16 +118,25 @@ void ClusterTraverser::VertexGetter::reset() {
|
|||
bool ClusterTraverser::UniqueVertexGetter::getVertex(
|
||||
std::string const& edgeId, std::string const& vertexId, size_t depth,
|
||||
std::string& result) {
|
||||
|
||||
auto it = _traverser->_edges.find(edgeId);
|
||||
if (it != _traverser->_edges.end()) {
|
||||
VPackSlice slice(it->second->data());
|
||||
std::string from = slice.get(StaticStrings::FromString).copyString();
|
||||
if (from != vertexId) {
|
||||
result = from;
|
||||
result = std::move(from);
|
||||
} else {
|
||||
std::string to = slice.get(StaticStrings::ToString).copyString();
|
||||
result = to;
|
||||
result = std::move(to);
|
||||
}
|
||||
|
||||
if (_returnedVertices.find(result) != _returnedVertices.end()) {
|
||||
// This vertex is not unique.
|
||||
++_traverser->_filteredPaths;
|
||||
result = "";
|
||||
return false;
|
||||
}
|
||||
|
||||
auto exp = _traverser->_expressions->find(depth);
|
||||
if (exp != _traverser->_expressions->end()) {
|
||||
auto v = _traverser->_vertices.find(result);
|
||||
|
@ -131,17 +144,14 @@ bool ClusterTraverser::UniqueVertexGetter::getVertex(
|
|||
// If the vertex ist not in list it means it has not passed any
|
||||
// filtering up to now
|
||||
++_traverser->_filteredPaths;
|
||||
result = "";
|
||||
return false;
|
||||
}
|
||||
if (!_traverser->vertexMatchesCondition(VPackSlice(v->second->data()), exp->second)) {
|
||||
result = "";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_returnedVertices.find(result) != _returnedVertices.end()) {
|
||||
// This vertex is not unique.
|
||||
++_traverser->_filteredPaths;
|
||||
return false;
|
||||
}
|
||||
_returnedVertices.emplace(result);
|
||||
return true;
|
||||
}
|
||||
|
@ -250,7 +260,7 @@ void ClusterTraverser::ClusterEdgeGetter::getEdge(
|
|||
return;
|
||||
}
|
||||
}
|
||||
result.push_back(next);
|
||||
result.push_back(std::move(next));
|
||||
} else {
|
||||
if (_traverser->_iteratorCache.empty()) {
|
||||
last = nullptr;
|
||||
|
@ -264,7 +274,7 @@ void ClusterTraverser::ClusterEdgeGetter::getEdge(
|
|||
getEdge(startVertex, result, last, eColIdx);
|
||||
return;
|
||||
} else {
|
||||
std::string const next = tmp.top();
|
||||
std::string next = tmp.top();
|
||||
tmp.pop();
|
||||
if (_traverser->_opts.uniqueEdges == TraverserOptions::UniquenessLevel::PATH) {
|
||||
auto search = std::find(result.begin(), result.end(), next);
|
||||
|
@ -274,7 +284,7 @@ void ClusterTraverser::ClusterEdgeGetter::getEdge(
|
|||
return;
|
||||
}
|
||||
}
|
||||
result.push_back(next);
|
||||
result.push_back(std::move(next));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -353,6 +363,7 @@ void ClusterTraverser::setStartVertex(std::string const& id) {
|
|||
_enumerator.reset(
|
||||
new arangodb::basics::BreadthFirstEnumerator<std::string, std::string, size_t>(
|
||||
_edgeGetter.get(), _vertexGetter.get(), id, _opts.maxDepth));
|
||||
_vertexGetter->setStartVertex(id);
|
||||
} else {
|
||||
_enumerator.reset(
|
||||
new arangodb::basics::DepthFirstEnumerator<std::string, std::string, size_t>(
|
||||
|
@ -400,7 +411,8 @@ void ClusterTraverser::fetchVertices(std::unordered_set<std::string>& verticesTo
|
|||
|
||||
int res = getFilteredDocumentsOnCoordinator(_dbname, expVertices,
|
||||
verticesToFetch, _vertices);
|
||||
if (res != TRI_ERROR_NO_ERROR && res != TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND) {
|
||||
if (res != TRI_ERROR_NO_ERROR &&
|
||||
res != TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND) {
|
||||
THROW_ARANGO_EXCEPTION(res);
|
||||
}
|
||||
|
||||
|
@ -435,7 +447,7 @@ arangodb::traverser::TraversalPath* ClusterTraverser::next() {
|
|||
_enumerator->prune();
|
||||
}
|
||||
TRI_ASSERT(!_pruneNext);
|
||||
const arangodb::basics::EnumeratedPath<std::string, std::string>& path =
|
||||
arangodb::basics::EnumeratedPath<std::string, std::string> const& path =
|
||||
_enumerator->next();
|
||||
if (path.vertices.empty()) {
|
||||
_done = true;
|
||||
|
@ -462,7 +474,7 @@ arangodb::traverser::TraversalPath* ClusterTraverser::next() {
|
|||
_opts.uniqueEdges == TraverserOptions::UniquenessLevel::PATH) {
|
||||
// Only if we use breadth first
|
||||
// and vertex uniqueness is not guaranteed
|
||||
// We have to validate edges on path uniquness.
|
||||
// We have to validate edges on path uniqueness.
|
||||
// Otherwise this situation cannot occur.
|
||||
// If two edges are identical than at least their start or end vertex
|
||||
// is on the path twice: A -> B <- A
|
||||
|
|
|
@ -96,6 +96,10 @@ class ClusterTraverser : public Traverser {
|
|||
|
||||
void reset() override;
|
||||
|
||||
void setStartVertex(std::string const& id) override {
|
||||
_returnedVertices.emplace(id);
|
||||
}
|
||||
|
||||
private:
|
||||
std::unordered_set<std::string> _returnedVertices;
|
||||
};
|
||||
|
@ -149,8 +153,9 @@ class ClusterTraversalPath : public TraversalPath {
|
|||
public:
|
||||
ClusterTraversalPath(
|
||||
ClusterTraverser const* traverser,
|
||||
const arangodb::basics::EnumeratedPath<std::string, std::string>& path)
|
||||
: _path(path), _traverser(traverser) {}
|
||||
arangodb::basics::EnumeratedPath<std::string, std::string> const& path)
|
||||
: _path(path), _traverser(traverser) {
|
||||
}
|
||||
|
||||
void pathToVelocyPack(Transaction*, arangodb::velocypack::Builder&) override;
|
||||
|
||||
|
|
|
@ -955,6 +955,10 @@ static void JS_GetCoordinators(
|
|||
TRI_V8_TRY_CATCH_BEGIN(isolate);
|
||||
v8::HandleScope scope(isolate);
|
||||
|
||||
if (!ServerState::instance()->isRunningInCluster()) {
|
||||
TRI_V8_THROW_EXCEPTION_INTERNAL("ArangoDB is not running in cluster mode");
|
||||
}
|
||||
|
||||
if (args.Length() != 0) {
|
||||
TRI_V8_THROW_EXCEPTION_USAGE("getCoordinators()");
|
||||
}
|
||||
|
|
|
@ -490,8 +490,10 @@ int ContinuousSyncer::processDocument(TRI_replication_operation_e type,
|
|||
isSystem = (!cnameString.empty() && cnameString[0] == '_');
|
||||
|
||||
if (!cnameString.empty()) {
|
||||
TRI_vocbase_col_t* col =
|
||||
TRI_LookupCollectionByNameVocBase(_vocbase, cnameString.c_str());
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByNameVocBase(_vocbase, cnameString.c_str());
|
||||
}
|
||||
|
||||
if (col != nullptr && col->_cid != cid) {
|
||||
// cid change? this may happen for system collections or if we restored
|
||||
|
@ -751,7 +753,10 @@ int ContinuousSyncer::renameCollection(VPackSlice const& slice) {
|
|||
}
|
||||
|
||||
TRI_voc_cid_t const cid = getCid(slice);
|
||||
TRI_vocbase_col_t* col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr && !cname.empty()) {
|
||||
col = TRI_LookupCollectionByNameVocBase(_vocbase, cname.c_str());
|
||||
|
@ -776,7 +781,11 @@ int ContinuousSyncer::changeCollection(VPackSlice const& slice) {
|
|||
|
||||
TRI_voc_cid_t cid = getCid(slice);
|
||||
std::string const cname = getCName(slice);
|
||||
TRI_vocbase_col_t* col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
|
||||
if (col == nullptr) {
|
||||
TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr && !cname.empty()) {
|
||||
col = TRI_LookupCollectionByNameVocBase(_vocbase, cname.c_str());
|
||||
|
|
|
@ -1705,7 +1705,10 @@ int InitialSyncer::handleCollection(VPackSlice const& parameters,
|
|||
if (phase == PHASE_DROP_CREATE) {
|
||||
if (!incremental) {
|
||||
// first look up the collection by the cid
|
||||
TRI_vocbase_col_t* col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr && !masterName.empty()) {
|
||||
// not found, try name next
|
||||
|
@ -1774,7 +1777,9 @@ int InitialSyncer::handleCollection(VPackSlice const& parameters,
|
|||
TRI_vocbase_col_t* col = nullptr;
|
||||
|
||||
if (incremental) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr && !masterName.empty()) {
|
||||
// not found, try name next
|
||||
|
@ -1812,7 +1817,10 @@ int InitialSyncer::handleCollection(VPackSlice const& parameters,
|
|||
std::string const progress = "dumping data for " + collectionMsg;
|
||||
setProgress(progress.c_str());
|
||||
|
||||
TRI_vocbase_col_t* col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr && !masterName.empty()) {
|
||||
// not found, try name next
|
||||
|
|
|
@ -57,7 +57,7 @@ class InitialSyncer : public Syncer {
|
|||
public:
|
||||
InitialSyncer(TRI_vocbase_t*, TRI_replication_applier_configuration_t const*,
|
||||
std::unordered_map<std::string, bool> const&,
|
||||
std::string const&, bool);
|
||||
std::string const&, bool verbose);
|
||||
|
||||
~InitialSyncer();
|
||||
|
||||
|
|
|
@ -79,6 +79,7 @@ Syncer::Syncer(TRI_vocbase_t* vocbase,
|
|||
_localServerIdString = StringUtils::itoa(_localServerId);
|
||||
|
||||
_configuration.update(configuration);
|
||||
_useCollectionId = _configuration._useCollectionId;
|
||||
|
||||
_masterInfo._endpoint = configuration->_endpoint;
|
||||
|
||||
|
@ -439,7 +440,10 @@ int Syncer::createCollection(VPackSlice const& slice, TRI_vocbase_col_t** dst) {
|
|||
TRI_col_type_e const type = static_cast<TRI_col_type_e>(VelocyPackHelper::getNumericValue<int>(
|
||||
slice, "type", static_cast<int>(TRI_COL_TYPE_DOCUMENT)));
|
||||
|
||||
TRI_vocbase_col_t* col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr) {
|
||||
// try looking up the collection by name then
|
||||
|
@ -480,7 +484,11 @@ int Syncer::createCollection(VPackSlice const& slice, TRI_vocbase_col_t** dst) {
|
|||
|
||||
int Syncer::dropCollection(VPackSlice const& slice, bool reportError) {
|
||||
TRI_voc_cid_t const cid = getCid(slice);
|
||||
TRI_vocbase_col_t* col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
TRI_vocbase_col_t* col = nullptr;
|
||||
|
||||
if (_useCollectionId) {
|
||||
col = TRI_LookupCollectionByIdVocBase(_vocbase, cid);
|
||||
}
|
||||
|
||||
if (col == nullptr) {
|
||||
std::string cname = getCName(slice);
|
||||
|
|
|
@ -247,6 +247,12 @@ class Syncer {
|
|||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int _barrierTtl;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief whether or not to use collection ids in replication
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool _useCollectionId;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief base url of the replication API
|
||||
|
|
|
@ -3061,6 +3061,8 @@ void RestReplicationHandler::handleCommandMakeSlave() {
|
|||
VelocyPackHelper::getBooleanValue(body, "verbose", defaults._verbose);
|
||||
config._incremental = VelocyPackHelper::getBooleanValue(
|
||||
body, "incremental", defaults._incremental);
|
||||
config._useCollectionId = VelocyPackHelper::getBooleanValue(
|
||||
body, "useCollectionId", defaults._useCollectionId);
|
||||
config._requireFromPresent = VelocyPackHelper::getBooleanValue(
|
||||
body, "requireFromPresent", defaults._requireFromPresent);
|
||||
config._restrictType = VelocyPackHelper::getStringValue(
|
||||
|
@ -3215,6 +3217,8 @@ void RestReplicationHandler::handleCommandSync() {
|
|||
VelocyPackHelper::getBooleanValue(body, "incremental", false);
|
||||
bool const keepBarrier =
|
||||
VelocyPackHelper::getBooleanValue(body, "keepBarrier", false);
|
||||
bool const useCollectionId =
|
||||
VelocyPackHelper::getBooleanValue(body, "useCollectionId", true);
|
||||
|
||||
std::unordered_map<std::string, bool> restrictCollections;
|
||||
VPackSlice const restriction = body.get("restrictCollections");
|
||||
|
@ -3248,6 +3252,7 @@ void RestReplicationHandler::handleCommandSync() {
|
|||
config._password = password;
|
||||
config._includeSystem = includeSystem;
|
||||
config._verbose = verbose;
|
||||
config._useCollectionId = useCollectionId;
|
||||
|
||||
// wait until all data in current logfile got synced
|
||||
arangodb::wal::LogfileManager::instance()->waitForSync(5.0);
|
||||
|
|
|
@ -113,6 +113,10 @@ void UpgradeFeature::start() {
|
|||
|
||||
// and force shutdown
|
||||
if (_upgrade || init->isInitDatabase() || init->restoreAdmin()) {
|
||||
if (init->isInitDatabase()) {
|
||||
*_result = EXIT_SUCCESS;
|
||||
}
|
||||
|
||||
server()->beginShutdown();
|
||||
}
|
||||
}
|
||||
|
@ -172,6 +176,8 @@ void UpgradeFeature::changeAdminPassword(std::string const& defaultPassword) {
|
|||
|
||||
// and return from the context
|
||||
LOG(TRACE) << "finished to restore admin user";
|
||||
|
||||
*_result = EXIT_SUCCESS;
|
||||
}
|
||||
|
||||
void UpgradeFeature::upgradeDatabase(std::string const& defaultPassword) {
|
||||
|
|
|
@ -310,6 +310,11 @@ static void JS_SynchronizeReplication(
|
|||
TRI_ObjectToBoolean(object->Get(TRI_V8_ASCII_STRING("keepBarrier")));
|
||||
}
|
||||
|
||||
if (object->Has(TRI_V8_ASCII_STRING("useCollectionId"))) {
|
||||
config._useCollectionId =
|
||||
TRI_ObjectToBoolean(object->Get(TRI_V8_ASCII_STRING("useCollectionId")));
|
||||
}
|
||||
|
||||
std::string errorMsg = "";
|
||||
InitialSyncer syncer(vocbase, &config, restrictCollections, restrictType,
|
||||
verbose);
|
||||
|
|
|
@ -1043,7 +1043,7 @@ static void JS_WarningAql(v8::FunctionCallbackInfo<v8::Value> const& args) {
|
|||
// note: we may not have a query if the AQL functions are called without
|
||||
// a query, e.g. during tests
|
||||
int code = static_cast<int>(TRI_ObjectToInt64(args[0]));
|
||||
std::string const&& message = TRI_ObjectToString(args[1]);
|
||||
std::string const message = TRI_ObjectToString(args[1]);
|
||||
|
||||
auto query = static_cast<arangodb::aql::Query*>(v8g->_query);
|
||||
query->registerWarning(code, message.c_str());
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
|
||||
using namespace arangodb::traverser;
|
||||
|
||||
void SingleServerTraversalPath::getDocumentByIdentifier(Transaction* trx,
|
||||
void SingleServerTraversalPath::getDocumentByIdentifier(arangodb::Transaction* trx,
|
||||
std::string const& identifier,
|
||||
VPackBuilder& result) {
|
||||
std::shared_ptr<VPackBuffer<uint8_t>> vertex =
|
||||
|
@ -33,7 +33,7 @@ void SingleServerTraversalPath::getDocumentByIdentifier(Transaction* trx,
|
|||
result.add(VPackSlice(vertex->data()));
|
||||
}
|
||||
|
||||
void SingleServerTraversalPath::pathToVelocyPack(Transaction* trx,
|
||||
void SingleServerTraversalPath::pathToVelocyPack(arangodb::Transaction* trx,
|
||||
VPackBuilder& result) {
|
||||
result.openObject();
|
||||
result.add(VPackValue("edges"));
|
||||
|
@ -56,7 +56,7 @@ void SingleServerTraversalPath::pathToVelocyPack(Transaction* trx,
|
|||
result.close();
|
||||
}
|
||||
|
||||
void SingleServerTraversalPath::lastEdgeToVelocyPack(Transaction* trx, VPackBuilder& result) {
|
||||
void SingleServerTraversalPath::lastEdgeToVelocyPack(arangodb::Transaction* trx, VPackBuilder& result) {
|
||||
if (_path.edges.empty()) {
|
||||
result.add(arangodb::basics::VelocyPackHelper::NullValue());
|
||||
return;
|
||||
|
@ -67,7 +67,7 @@ void SingleServerTraversalPath::lastEdgeToVelocyPack(Transaction* trx, VPackBuil
|
|||
result.add(VPackSlice(cached->second->data()));
|
||||
}
|
||||
|
||||
void SingleServerTraversalPath::lastVertexToVelocyPack(Transaction* trx, VPackBuilder& result) {
|
||||
void SingleServerTraversalPath::lastVertexToVelocyPack(arangodb::Transaction* trx, VPackBuilder& result) {
|
||||
std::shared_ptr<VPackBuffer<uint8_t>> vertex =
|
||||
_traverser->fetchVertexData(_path.vertices.back());
|
||||
result.add(VPackSlice(vertex->data()));
|
||||
|
|
|
@ -65,7 +65,7 @@ static int FetchDocumentById(arangodb::Transaction* trx,
|
|||
}
|
||||
|
||||
SingleServerTraverser::SingleServerTraverser(
|
||||
TraverserOptions& opts, Transaction* trx,
|
||||
TraverserOptions& opts, arangodb::Transaction* trx,
|
||||
std::unordered_map<size_t, std::vector<TraverserExpression*>> const*
|
||||
expressions)
|
||||
: Traverser(opts, expressions), _trx(trx) {
|
||||
|
@ -386,7 +386,6 @@ void SingleServerTraverser::EdgeGetter::nextEdge(
|
|||
_traverser->_edges.emplace(_trx->extractIdString(edge), nullptr);
|
||||
}
|
||||
|
||||
++_traverser->_filteredPaths;
|
||||
TRI_ASSERT(last != nullptr);
|
||||
(*last)++;
|
||||
continue;
|
||||
|
@ -463,7 +462,6 @@ void SingleServerTraverser::EdgeGetter::getAllEdges(
|
|||
// Insert a dummy to please the uniqueness
|
||||
_traverser->_edges.emplace(_trx->extractIdString(edge), nullptr);
|
||||
}
|
||||
++_traverser->_filteredPaths;
|
||||
continue;
|
||||
}
|
||||
std::string id = _trx->extractIdString(edge);
|
||||
|
|
|
@ -210,6 +210,12 @@ static int LoadConfiguration(TRI_vocbase_t* vocbase,
|
|||
config->_incremental = value.getBoolean();
|
||||
}
|
||||
|
||||
value = slice.get("useCollectionId");
|
||||
|
||||
if (value.isBoolean()) {
|
||||
config->_useCollectionId = value.getBoolean();
|
||||
}
|
||||
|
||||
value = slice.get("ignoreErrors");
|
||||
|
||||
if (value.isNumber()) {
|
||||
|
@ -463,6 +469,7 @@ TRI_replication_applier_configuration_t::
|
|||
_requireFromPresent(false),
|
||||
_incremental(false),
|
||||
_verbose(false),
|
||||
_useCollectionId(true),
|
||||
_restrictType(),
|
||||
_restrictCollections() {}
|
||||
|
||||
|
@ -509,6 +516,7 @@ void TRI_replication_applier_configuration_t::toVelocyPack(
|
|||
builder.add("requireFromPresent", VPackValue(_requireFromPresent));
|
||||
builder.add("verbose", VPackValue(_verbose));
|
||||
builder.add("incremental", VPackValue(_incremental));
|
||||
builder.add("useCollectionId", VPackValue(_useCollectionId));
|
||||
builder.add("restrictType", VPackValue(_restrictType));
|
||||
|
||||
builder.add("restrictCollections", VPackValue(VPackValueType::Array));
|
||||
|
@ -800,6 +808,7 @@ void TRI_replication_applier_configuration_t::update(
|
|||
_requireFromPresent = src->_requireFromPresent;
|
||||
_verbose = src->_verbose;
|
||||
_incremental = src->_incremental;
|
||||
_useCollectionId = src->_useCollectionId;
|
||||
_restrictType = src->_restrictType;
|
||||
_restrictCollections = src->_restrictCollections;
|
||||
_connectionRetryWaitTime = src->_connectionRetryWaitTime;
|
||||
|
|
|
@ -65,6 +65,7 @@ class TRI_replication_applier_configuration_t {
|
|||
bool _requireFromPresent;
|
||||
bool _incremental;
|
||||
bool _verbose;
|
||||
bool _useCollectionId;
|
||||
std::string _restrictType;
|
||||
std::unordered_map<std::string, bool> _restrictCollections;
|
||||
|
||||
|
|
|
@ -46,8 +46,8 @@ ClientFeature::ClientFeature(application_features::ApplicationServer* server,
|
|||
_password(""),
|
||||
_connectionTimeout(connectionTimeout),
|
||||
_requestTimeout(requestTimeout),
|
||||
_maxPacketSize(128 * 1024 * 1024),
|
||||
_sslProtocol(4),
|
||||
_section("server"),
|
||||
_retries(DEFAULT_RETRIES),
|
||||
_warn(false) {
|
||||
setOptional(true);
|
||||
|
@ -56,79 +56,88 @@ ClientFeature::ClientFeature(application_features::ApplicationServer* server,
|
|||
}
|
||||
|
||||
void ClientFeature::collectOptions(std::shared_ptr<ProgramOptions> options) {
|
||||
options->addSection(_section, "Configure a connection to the server");
|
||||
options->addSection("server", "Configure a connection to the server");
|
||||
|
||||
options->addOption("--" + _section + ".database",
|
||||
options->addOption("--server.database",
|
||||
"database name to use when connecting",
|
||||
new StringParameter(&_databaseName));
|
||||
|
||||
options->addOption("--" + _section + ".authentication",
|
||||
options->addOption("--server.authentication",
|
||||
"require authentication when connecting",
|
||||
new BooleanParameter(&_authentication));
|
||||
|
||||
options->addOption("--" + _section + ".username",
|
||||
options->addOption("--server.username",
|
||||
"username to use when connecting",
|
||||
new StringParameter(&_username));
|
||||
|
||||
options->addOption(
|
||||
"--" + _section + ".endpoint",
|
||||
"--server.endpoint",
|
||||
"endpoint to connect to, use 'none' to start without a server",
|
||||
new StringParameter(&_endpoint));
|
||||
|
||||
options->addOption("--" + _section + ".password",
|
||||
"password to use when connection. If not specified and "
|
||||
options->addOption("--server.password",
|
||||
"password to use when connecting. If not specified and "
|
||||
"authentication is required, the user will be prompted "
|
||||
"for a password.",
|
||||
"for a password",
|
||||
new StringParameter(&_password));
|
||||
|
||||
options->addOption("--" + _section + ".connection-timeout",
|
||||
options->addOption("--server.connection-timeout",
|
||||
"connection timeout in seconds",
|
||||
new DoubleParameter(&_connectionTimeout));
|
||||
|
||||
options->addOption("--" + _section + ".request-timeout",
|
||||
options->addOption("--server.request-timeout",
|
||||
"request timeout in seconds",
|
||||
new DoubleParameter(&_requestTimeout));
|
||||
|
||||
options->addOption("--server.max-packet-size",
|
||||
"maximum packet size (in bytes) for client/server communication",
|
||||
new UInt64Parameter(&_maxPacketSize));
|
||||
|
||||
std::unordered_set<uint64_t> sslProtocols = {1, 2, 3, 4};
|
||||
std::unordered_set<uint64_t> sslProtocols = {1, 2, 3, 4, 5};
|
||||
|
||||
options->addOption("--" + _section + ".ssl-protocol",
|
||||
"1 = SSLv2, 2 = SSLv23, 3 = SSLv3, 4 = TLSv1",
|
||||
options->addSection("ssl", "Configure SSL communication");
|
||||
options->addOption("--ssl.protocol",
|
||||
"ssl protocol (1 = SSLv2, 2 = SSLv23, 3 = SSLv3, 4 = "
|
||||
"TLSv1, 5 = TLSV1.2 (recommended)",
|
||||
new DiscreteValuesParameter<UInt64Parameter>(
|
||||
&_sslProtocol, sslProtocols));
|
||||
}
|
||||
|
||||
void ClientFeature::validateOptions(std::shared_ptr<ProgramOptions> options) {
|
||||
// if a username is specified explicitly, assume authentication is desired
|
||||
if (options->processingResult().touched(_section + ".username")) {
|
||||
if (options->processingResult().touched("server.username")) {
|
||||
_authentication = true;
|
||||
}
|
||||
|
||||
// check timeouts
|
||||
if (_connectionTimeout < 0.0) {
|
||||
LOG(FATAL) << "invalid value for --" << _section
|
||||
<< ".connect-timeout, must be >= 0";
|
||||
LOG(FATAL) << "invalid value for --server.connect-timeout, must be >= 0";
|
||||
FATAL_ERROR_EXIT();
|
||||
} else if (_connectionTimeout == 0.0) {
|
||||
_connectionTimeout = LONG_TIMEOUT;
|
||||
}
|
||||
|
||||
if (_requestTimeout < 0.0) {
|
||||
LOG(FATAL) << "invalid value for --" << _section
|
||||
<< ".request-timeout, must be positive";
|
||||
LOG(FATAL) << "invalid value for --server.request-timeout, must be positive";
|
||||
FATAL_ERROR_EXIT();
|
||||
} else if (_requestTimeout == 0.0) {
|
||||
_requestTimeout = LONG_TIMEOUT;
|
||||
}
|
||||
|
||||
if (_maxPacketSize < 1 * 1024 * 1024) {
|
||||
LOG(FATAL) << "invalid value for --server.max-packet-size, must be at least 1 MB";
|
||||
FATAL_ERROR_EXIT();
|
||||
}
|
||||
|
||||
// username must be non-empty
|
||||
if (_username.empty()) {
|
||||
LOG(FATAL) << "no value specified for --" << _section << ".username";
|
||||
LOG(FATAL) << "no value specified for --server.username";
|
||||
FATAL_ERROR_EXIT();
|
||||
}
|
||||
|
||||
// ask for a password
|
||||
if (_authentication &&
|
||||
!options->processingResult().touched(_section + ".password")) {
|
||||
!options->processingResult().touched("server.password")) {
|
||||
usleep(10 * 1000);
|
||||
|
||||
try {
|
||||
|
@ -145,6 +154,8 @@ void ClientFeature::validateOptions(std::shared_ptr<ProgramOptions> options) {
|
|||
std::cout << "Please specify a password: " << std::flush;
|
||||
std::getline(std::cin, _password);
|
||||
}
|
||||
|
||||
SimpleHttpClient::setMaxPacketSize(_maxPacketSize);
|
||||
}
|
||||
|
||||
std::unique_ptr<GeneralClientConnection> ClientFeature::createConnection() {
|
||||
|
|
|
@ -62,6 +62,7 @@ class ClientFeature final : public application_features::ApplicationFeature,
|
|||
void setPassword(std::string const& value) { _password = value; }
|
||||
double connectionTimeout() const { return _connectionTimeout; }
|
||||
double requestTimeout() const { return _requestTimeout; }
|
||||
uint64_t maxPacketSize() const { return _maxPacketSize; }
|
||||
uint64_t sslProtocol() const { return _sslProtocol; }
|
||||
|
||||
public:
|
||||
|
@ -89,10 +90,10 @@ class ClientFeature final : public application_features::ApplicationFeature,
|
|||
std::string _password;
|
||||
double _connectionTimeout;
|
||||
double _requestTimeout;
|
||||
uint64_t _maxPacketSize;
|
||||
uint64_t _sslProtocol;
|
||||
|
||||
private:
|
||||
std::string _section;
|
||||
size_t _retries;
|
||||
bool _warn;
|
||||
};
|
||||
|
|
|
@ -1,426 +1,432 @@
|
|||
include(GNUInstallDirs)
|
||||
|
||||
# etc -------------------------------
|
||||
set(ETCDIR "" CACHE path "System configuration directory (defaults to prefix/etc)")
|
||||
|
||||
# /etc -------------------------------
|
||||
if (ETCDIR STREQUAL "")
|
||||
set(ETCDIR_NATIVE "${CMAKE_INSTALL_PREFIX}/etc/arangodb3")
|
||||
set(ETCDIR_INSTALL "etc/arangodb3")
|
||||
else ()
|
||||
set(ETCDIR_NATIVE "${ETCDIR}/arangodb3")
|
||||
set(ETCDIR_INSTALL "${ETCDIR}/arangodb3")
|
||||
endif ()
|
||||
|
||||
# MS stuff ---------------------------
|
||||
if (MSVC)
|
||||
file(TO_NATIVE_PATH "${ETCDIR_INSTALL}" ETCDIR_INSTALL)
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ETCDIR_ESCAPED "${ETCDIR_INSTALL}")
|
||||
else ()
|
||||
file(TO_NATIVE_PATH "${ETCDIR_NATIVE}" ETCDIR_NATIVE)
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ETCDIR_ESCAPED "${ETCDIR_NATIVE}")
|
||||
endif ()
|
||||
|
||||
add_definitions("-D_SYSCONFDIR_=\"${ETCDIR_ESCAPED}\"")
|
||||
|
||||
# /var
|
||||
set(VARDIR ""
|
||||
CACHE path
|
||||
"System configuration directory (defaults to prefix/var/arangodb3)"
|
||||
)
|
||||
|
||||
if (VARDIR STREQUAL "")
|
||||
set(VARDIR_NATIVE "${CMAKE_INSTALL_PREFIX}/var")
|
||||
set(VARDIR_INSTALL "var")
|
||||
else ()
|
||||
set(VARDIR_NATIVE "${VARDIR}")
|
||||
set(VARDIR_INSTALL "${VARDIR}")
|
||||
endif ()
|
||||
|
||||
file(TO_NATIVE_PATH "${VARDIR_NATIVE}" VARDIR_NATIVE)
|
||||
|
||||
# database directory
|
||||
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/lib/arangodb3")
|
||||
|
||||
# apps
|
||||
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/lib/arangodb3-apps")
|
||||
|
||||
# logs
|
||||
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/log/arangodb")
|
||||
|
||||
# package
|
||||
set(TRI_PKGDATADIR "${CMAKE_INSTALL_PREFIX}/share/arangodb3")
|
||||
|
||||
# resources
|
||||
set(TRI_RESOURCEDIR "resources")
|
||||
|
||||
# sbinaries
|
||||
if (MSVC)
|
||||
set(ARANGODB_INSTALL_SBIN "bin")
|
||||
set(TRI_SBINDIR "${CMAKE_INSTALL_PREFIX}/bin")
|
||||
else ()
|
||||
set(ARANGODB_INSTALL_SBIN "sbin")
|
||||
set(TRI_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin")
|
||||
endif ()
|
||||
|
||||
# MS Windows -------------------------------------------------------------------
|
||||
if (MSVC)
|
||||
# icon paths
|
||||
file(TO_NATIVE_PATH
|
||||
"${TRI_RESOURCEDIR}/Icons/arangodb.ico"
|
||||
RELATIVE_ARANGO_ICON
|
||||
)
|
||||
|
||||
file(TO_NATIVE_PATH
|
||||
"${PROJECT_SOURCE_DIR}/Installation/Windows/Icons/arangodb.bmp"
|
||||
ARANGO_IMG
|
||||
)
|
||||
|
||||
file(TO_NATIVE_PATH
|
||||
"${PROJECT_SOURCE_DIR}/Installation/Windows/Icons/arangodb.ico"
|
||||
ARANGO_ICON
|
||||
)
|
||||
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ARANGO_IMG "${ARANGO_IMG}")
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ARANGO_ICON "${ARANGO_ICON}")
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" RELATIVE_ARANGO_ICON "${RELATIVE_ARANGO_ICON}")
|
||||
|
||||
# versioning
|
||||
set(CMAKE_MODULE_PATH
|
||||
${CMAKE_MODULE_PATH}
|
||||
${PROJECT_SOURCE_DIR}/Installation/Windows/version
|
||||
)
|
||||
|
||||
include("${PROJECT_SOURCE_DIR}/Installation/Windows/version/generate_product_version.cmake")
|
||||
endif ()
|
||||
|
||||
################################################################################
|
||||
## INSTALL
|
||||
################################################################################
|
||||
|
||||
# Global macros ----------------------------------------------------------------
|
||||
macro (generate_root_config name)
|
||||
FILE(READ ${PROJECT_SOURCE_DIR}/etc/arangodb3/${name}.conf.in FileContent)
|
||||
STRING(REPLACE "@PKGDATADIR@" "@ROOTDIR@/share/arangodb3"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@LOCALSTATEDIR@" "@ROOTDIR@/var"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@SBINDIR@" "@ROOTDIR@/bin"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@LIBEXECDIR@/arangodb3" "@ROOTDIR@/bin"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@SYSCONFDIR@" "@ROOTDIR@/etc/arangodb3"
|
||||
FileContent "${FileContent}")
|
||||
if (MSVC)
|
||||
STRING(REPLACE "@PROGRAM_SUFFIX@" ".exe"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REGEX REPLACE "[\r\n]file =" "\n# file ="
|
||||
FileContent "${FileContent}")
|
||||
endif ()
|
||||
FILE(WRITE ${PROJECT_BINARY_DIR}/etc/arangodb3/${name}.conf "${FileContent}")
|
||||
endmacro ()
|
||||
|
||||
# generates config file using the configured paths ----------------------------
|
||||
macro (generate_path_config name)
|
||||
FILE(READ ${PROJECT_SOURCE_DIR}/etc/arangodb3/${name}.conf.in FileContent)
|
||||
STRING(REPLACE "@PKGDATADIR@" "${TRI_PKGDATADIR}"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@LOCALSTATEDIR@" "${VARDIR_NATIVE}"
|
||||
FileContent "${FileContent}")
|
||||
FILE(WRITE ${PROJECT_BINARY_DIR}/etc/arangodb3/${name}.conf "${FileContent}")
|
||||
endmacro ()
|
||||
|
||||
# installs a config file -------------------------------------------------------
|
||||
macro (install_config name)
|
||||
if (MSVC OR DARWIN)
|
||||
generate_root_config(${name})
|
||||
else ()
|
||||
generate_path_config(${name})
|
||||
endif ()
|
||||
install(
|
||||
FILES ${PROJECT_BINARY_DIR}/etc/arangodb3/${name}.conf
|
||||
DESTINATION ${ETCDIR_INSTALL})
|
||||
endmacro ()
|
||||
|
||||
# installs a readme file converting EOL ----------------------------------------
|
||||
macro (install_readme input where output)
|
||||
FILE(READ ${PROJECT_SOURCE_DIR}/${input} FileContent)
|
||||
STRING(REPLACE "\r" "" FileContent "${FileContent}")
|
||||
if (MSVC)
|
||||
STRING(REPLACE "\n" "\r\n" FileContent "${FileContent}")
|
||||
endif ()
|
||||
FILE(WRITE ${PROJECT_BINARY_DIR}/${output} "${FileContent}")
|
||||
install(
|
||||
FILES ${PROJECT_BINARY_DIR}/${output}
|
||||
DESTINATION ${where})
|
||||
endmacro ()
|
||||
|
||||
# installs a link to an executable ---------------------------------------------
|
||||
macro (install_command_alias name where alias)
|
||||
if (MSVC)
|
||||
add_custom_command(
|
||||
TARGET ${name}
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:${name}>
|
||||
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/$(Configuration)/${alias}.exe)
|
||||
install(
|
||||
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/$(Configuration)/${alias}.exe
|
||||
DESTINATION ${where})
|
||||
else ()
|
||||
add_custom_command(
|
||||
TARGET ${name}
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${name}
|
||||
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${alias})
|
||||
install(
|
||||
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${alias}
|
||||
DESTINATION ${where})
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
# sub directories --------------------------------------------------------------
|
||||
|
||||
#if(BUILD_STATIC_EXECUTABLES)
|
||||
# set(CMAKE_EXE_LINKER_FLAGS -static)
|
||||
# set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||
# set(CMAKE_EXE_LINK_DYNAMIC_C_FLAGS) # remove -Wl,-Bdynamic
|
||||
# set(CMAKE_EXE_LINK_DYNAMIC_CXX_FLAGS)
|
||||
# set(CMAKE_SHARED_LIBRARY_C_FLAGS) # remove -fPIC
|
||||
# set(CMAKE_SHARED_LIBRARY_CXX_FLAGS)
|
||||
# set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS) # remove -rdynamic
|
||||
# set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS)
|
||||
# # Maybe this works as well, haven't tried yet.
|
||||
# # set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS FALSE)
|
||||
#else(BUILD_STATIC_EXECUTABLES)
|
||||
# # Set RPATH to use for installed targets; append linker search path
|
||||
# set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${LOFAR_LIBDIR}")
|
||||
# set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
#endif(BUILD_STATIC_EXECUTABLES)
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------
|
||||
#get_cmake_property(_variableNames VARIABLES)
|
||||
#foreach (_variableName ${_variableNames})
|
||||
# message(STATUS "${_variableName}=${${_variableName}}")
|
||||
#endforeach ()
|
||||
#--------------------------------------------------------------------------------
|
||||
|
||||
# install ----------------------------------------------------------------------
|
||||
install(DIRECTORY ${PROJECT_SOURCE_DIR}/Documentation/man/
|
||||
DESTINATION share/man)
|
||||
|
||||
if (MSVC)
|
||||
install_readme(README . README.txt)
|
||||
install_readme(README.md . README.md)
|
||||
install_readme(README.windows . README.windows.txt)
|
||||
endif ()
|
||||
|
||||
if (MSVC)
|
||||
install_readme(LICENSE . LICENSE.txt)
|
||||
install_readme(LICENSES-OTHER-COMPONENTS.md . LICENSES-OTHER-COMPONENTS.md)
|
||||
else ()
|
||||
install_readme(README share/doc/arangodb3 README)
|
||||
install_readme(README.md share/doc/arangodb3 README.md)
|
||||
install_readme(LICENSE share/doc/arangodb3 LICENSE)
|
||||
install_readme(LICENSES-OTHER-COMPONENTS.md share/doc/arangodb3 LICENSES-OTHER-COMPONENTS.md)
|
||||
endif ()
|
||||
|
||||
# Build package ----------------------------------------------------------------
|
||||
if (NOT(MSVC))
|
||||
set(CPACK_SET_DESTDIR ON)
|
||||
endif()
|
||||
|
||||
find_program(DH_INSTALLINIT dh_installinit)
|
||||
find_program(FAKEROOT fakeroot)
|
||||
|
||||
if (DH_INSTALLINIT AND FAKEROOT)
|
||||
add_custom_target(prepare_debian)
|
||||
SET(DEBIAN_CONTROL_EXTRA_BASENAMES
|
||||
postinst
|
||||
preinst
|
||||
postrm
|
||||
prerm
|
||||
)
|
||||
SET(DEBIAN_WORK_DIR "${PROJECT_BINARY_DIR}/debian-work")
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
remove_directory "${DEBIAN_WORK_DIR}"
|
||||
)
|
||||
foreach (_DEBIAN_CONTROL_EXTRA_BASENAME ${DEBIAN_CONTROL_EXTRA_BASENAMES})
|
||||
SET(RELATIVE_NAME "debian/${_DEBIAN_CONTROL_EXTRA_BASENAME}")
|
||||
SET(SRCFILE "${PROJECT_SOURCE_DIR}/Installation/${RELATIVE_NAME}")
|
||||
SET(DESTFILE "${DEBIAN_WORK_DIR}/${RELATIVE_NAME}")
|
||||
|
||||
list(APPEND DEBIAN_CONTROL_EXTRA_SRC "${SRCFILE}")
|
||||
list(APPEND DEBIAN_CONTROL_EXTRA_DEST "${DESTFILE}")
|
||||
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
copy ${SRCFILE} ${DESTFILE})
|
||||
endforeach()
|
||||
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
copy "${PROJECT_SOURCE_DIR}/Installation/debian/control" "${DEBIAN_WORK_DIR}/debian/control"
|
||||
)
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
copy "${PROJECT_SOURCE_DIR}/Installation/debian/compat" "${DEBIAN_WORK_DIR}/debian/compat"
|
||||
)
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND fakeroot "${DH_INSTALLINIT}" -o 2>/dev/null
|
||||
WORKING_DIRECTORY ${DEBIAN_WORK_DIR}
|
||||
)
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND fakeroot dh_installdeb
|
||||
WORKING_DIRECTORY ${DEBIAN_WORK_DIR}
|
||||
)
|
||||
endif()
|
||||
|
||||
# General
|
||||
set(CPACK_PACKAGE_NAME "arangodb3")
|
||||
set(CPACK_PACKAGE_VENDOR "ArangoDB GmbH")
|
||||
set(CPACK_PACKAGE_CONTACT "info@arangodb.com")
|
||||
set(CPACK_PACKAGE_VERSION "${ARANGODB_VERSION}")
|
||||
|
||||
set(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/LICENSE")
|
||||
|
||||
set(CPACK_STRIP_FILES "ON")
|
||||
set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "amd64")
|
||||
set(CPACK_DEBIAN_PACKAGE_SECTION "database")
|
||||
set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "a multi-purpose NoSQL database
|
||||
A distributed free and open-source database with a flexible data model for documents,
|
||||
graphs, and key-values. Build high performance applications using a convenient
|
||||
SQL-like query language or JavaScript extensions.
|
||||
.
|
||||
Copyright: 2014-2016 by ArangoDB GmbH
|
||||
Copyright: 2012-2013 by triAGENS GmbH
|
||||
ArangoDB Software
|
||||
www.arangodb.com
|
||||
")
|
||||
SET(CPACK_DEBIAN_PACKAGE_CONFLICTS "arangodb")
|
||||
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
|
||||
set(CPACK_DEBIAN_COMPRESSION_TYPE "xz")
|
||||
set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "https://www.arangodb.com/")
|
||||
set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/postinst;${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/preinst;${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/postrm;${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/prerm;")
|
||||
set(CPACK_BUNDLE_NAME "${CPACK_PACKAGE_NAME}")
|
||||
configure_file("${PROJECT_SOURCE_DIR}/Installation/MacOSX/Bundle/Info.plist.in" "${CMAKE_CURRENT_BINARY_DIR}/Info.plist")
|
||||
set(CPACK_BUNDLE_PLIST "${CMAKE_CURRENT_BINARY_DIR}/Info.plist")
|
||||
set(CPACK_BUNDLE_ICON "${PROJECT_SOURCE_DIR}/Installation/MacOSX/Bundle/icon.icns")
|
||||
set(CPACK_BUNDLE_STARTUP_COMMAND "${PROJECT_SOURCE_DIR}/Installation/MacOSX/Bundle/arangodb-cli.sh")
|
||||
|
||||
# MS installer
|
||||
if (MSVC)
|
||||
set(CPACK_PACKAGE_NAME "ArangoDB")
|
||||
set(CPACK_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Installation/Windows/Templates")
|
||||
set(CPACK_PLUGIN_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Installation/Windows/Plugins")
|
||||
set(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL 1)
|
||||
set(BITS 64)
|
||||
|
||||
if (CMAKE_CL_64)
|
||||
SET(CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES64")
|
||||
SET(BITS 64)
|
||||
else ()
|
||||
SET(CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES")
|
||||
SET(BITS 32)
|
||||
endif ()
|
||||
|
||||
message(STATUS "ARANGO_IMG: ${ARANGO_IMG}")
|
||||
message(STATUS "ARANGO_ICON: ${ARANGO_ICON}")
|
||||
message(STATUS "RELATIVE_ARANGO_ICON: ${RELATIVE_ARANGO_ICON}")
|
||||
|
||||
install(
|
||||
DIRECTORY "${PROJECT_SOURCE_DIR}/Installation/Windows/Icons"
|
||||
DESTINATION ${TRI_RESOURCEDIR})
|
||||
|
||||
set(CPACK_NSIS_DEFINES "
|
||||
!define BITS ${BITS}
|
||||
!define TRI_FRIENDLY_SVC_NAME '${ARANGODB_FRIENDLY_STRING}'
|
||||
!define TRI_AARDVARK_URL 'http://127.0.0.1:8529'
|
||||
")
|
||||
|
||||
set(CPACK_PACKAGE_ICON ${ARANGO_ICON})
|
||||
|
||||
set(CPACK_NSIS_MODIFY_PATH ON)
|
||||
set(CPACK_NSIS_MUI_ICON ${ARANGO_ICON})
|
||||
set(CPACK_NSIS_MUI_UNIICON ${ARANGO_ICON})
|
||||
set(CPACK_NSIS_INSTALLED_ICON_NAME ${RELATIVE_ARANGO_ICON})
|
||||
set(CPACK_NSIS_DISPLAY_NAME, ${ARANGODB_DISPLAY_NAME})
|
||||
set(CPACK_NSIS_HELP_LINK ${ARANGODB_HELP_LINK})
|
||||
set(CPACK_NSIS_URL_INFO_ABOUT ${ARANGODB_URL_INFO_ABOUT})
|
||||
set(CPACK_NSIS_CONTACT ${ARANGODB_CONTACT})
|
||||
endif ()
|
||||
|
||||
configure_file("${CMAKE_SOURCE_DIR}/Installation/cmake/CMakeCPackOptions.cmake.in"
|
||||
"${CMAKE_BINARY_DIR}/CMakeCPackOptions.cmake" @ONLY)
|
||||
set(CPACK_PROJECT_CONFIG_FILE "${CMAKE_BINARY_DIR}/CMakeCPackOptions.cmake")
|
||||
|
||||
if (NOT(MSVC))
|
||||
# components
|
||||
install(
|
||||
FILES ${PROJECT_SOURCE_DIR}/Installation/debian/arangodb.init
|
||||
PERMISSIONS OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
|
||||
DESTINATION ${ETCDIR}/init.d
|
||||
RENAME arangodb3
|
||||
COMPONENT debian-extras
|
||||
)
|
||||
endif()
|
||||
|
||||
# Custom targets ----------------------------------------------------------------
|
||||
|
||||
# love
|
||||
add_custom_target (love
|
||||
COMMENT "ArangoDB loves you."
|
||||
COMMAND ""
|
||||
)
|
||||
|
||||
|
||||
# Finally: user cpack
|
||||
include(CPack)
|
||||
|
||||
################################################################################
|
||||
### @brief install client-side JavaScript files
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_SOURCE_DIR}/js/common ${PROJECT_SOURCE_DIR}/js/client
|
||||
DESTINATION share/arangodb3/js
|
||||
FILES_MATCHING PATTERN "*.js"
|
||||
REGEX "^.*/common/test-data$" EXCLUDE
|
||||
REGEX "^.*/common/tests$" EXCLUDE
|
||||
REGEX "^.*/client/tests$" EXCLUDE)
|
||||
|
||||
################################################################################
|
||||
### @brief install server-side JavaScript files
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_SOURCE_DIR}/js/actions ${PROJECT_SOURCE_DIR}/js/apps ${PROJECT_SOURCE_DIR}/js/contrib ${PROJECT_SOURCE_DIR}/js/node ${PROJECT_SOURCE_DIR}/js/server
|
||||
DESTINATION share/arangodb3/js
|
||||
REGEX "^.*/server/tests$" EXCLUDE
|
||||
REGEX "^.*/aardvark/APP/node_modules$" EXCLUDE
|
||||
)
|
||||
|
||||
################################################################################
|
||||
### @brief install log directory
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_BINARY_DIR}/var/log/arangodb
|
||||
DESTINATION ${VARDIR_INSTALL}/log)
|
||||
|
||||
################################################################################
|
||||
### @brief install database directory
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_BINARY_DIR}/var/lib/arangodb3
|
||||
DESTINATION ${VARDIR_INSTALL}/lib)
|
||||
|
||||
################################################################################
|
||||
### @brief install apps directory
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_BINARY_DIR}/var/lib/arangodb3-apps
|
||||
DESTINATION ${VARDIR_INSTALL}/lib)
|
||||
include(GNUInstallDirs)
|
||||
|
||||
# install the visual studio runtime:
|
||||
if (MSVC)
|
||||
include(InstallRequiredSystemLibraries)
|
||||
INSTALL(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION bin COMPONENT Libraries)
|
||||
endif()
|
||||
|
||||
# etc -------------------------------
|
||||
set(ETCDIR "" CACHE path "System configuration directory (defaults to prefix/etc)")
|
||||
|
||||
# /etc -------------------------------
|
||||
if (ETCDIR STREQUAL "")
|
||||
set(ETCDIR_NATIVE "${CMAKE_INSTALL_PREFIX}/etc/arangodb3")
|
||||
set(ETCDIR_INSTALL "etc/arangodb3")
|
||||
else ()
|
||||
set(ETCDIR_NATIVE "${ETCDIR}/arangodb3")
|
||||
set(ETCDIR_INSTALL "${ETCDIR}/arangodb3")
|
||||
endif ()
|
||||
|
||||
# MS stuff ---------------------------
|
||||
if (MSVC)
|
||||
file(TO_NATIVE_PATH "${ETCDIR_INSTALL}" ETCDIR_INSTALL)
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ETCDIR_ESCAPED "${ETCDIR_INSTALL}")
|
||||
else ()
|
||||
file(TO_NATIVE_PATH "${ETCDIR_NATIVE}" ETCDIR_NATIVE)
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ETCDIR_ESCAPED "${ETCDIR_NATIVE}")
|
||||
endif ()
|
||||
|
||||
add_definitions("-D_SYSCONFDIR_=\"${ETCDIR_ESCAPED}\"")
|
||||
|
||||
# /var
|
||||
set(VARDIR ""
|
||||
CACHE path
|
||||
"System configuration directory (defaults to prefix/var/arangodb3)"
|
||||
)
|
||||
|
||||
if (VARDIR STREQUAL "")
|
||||
set(VARDIR_NATIVE "${CMAKE_INSTALL_PREFIX}/var")
|
||||
set(VARDIR_INSTALL "var")
|
||||
else ()
|
||||
set(VARDIR_NATIVE "${VARDIR}")
|
||||
set(VARDIR_INSTALL "${VARDIR}")
|
||||
endif ()
|
||||
|
||||
file(TO_NATIVE_PATH "${VARDIR_NATIVE}" VARDIR_NATIVE)
|
||||
|
||||
# database directory
|
||||
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/lib/arangodb3")
|
||||
|
||||
# apps
|
||||
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/lib/arangodb3-apps")
|
||||
|
||||
# logs
|
||||
FILE(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/var/log/arangodb")
|
||||
|
||||
# package
|
||||
set(TRI_PKGDATADIR "${CMAKE_INSTALL_PREFIX}/share/arangodb3")
|
||||
|
||||
# resources
|
||||
set(TRI_RESOURCEDIR "resources")
|
||||
|
||||
# sbinaries
|
||||
if (MSVC)
|
||||
set(ARANGODB_INSTALL_SBIN "bin")
|
||||
set(TRI_SBINDIR "${CMAKE_INSTALL_PREFIX}/bin")
|
||||
else ()
|
||||
set(ARANGODB_INSTALL_SBIN "sbin")
|
||||
set(TRI_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin")
|
||||
endif ()
|
||||
|
||||
# MS Windows -------------------------------------------------------------------
|
||||
if (MSVC)
|
||||
# icon paths
|
||||
file(TO_NATIVE_PATH
|
||||
"${TRI_RESOURCEDIR}/Icons/arangodb.ico"
|
||||
RELATIVE_ARANGO_ICON
|
||||
)
|
||||
|
||||
file(TO_NATIVE_PATH
|
||||
"${PROJECT_SOURCE_DIR}/Installation/Windows/Icons/arangodb.bmp"
|
||||
ARANGO_IMG
|
||||
)
|
||||
|
||||
file(TO_NATIVE_PATH
|
||||
"${PROJECT_SOURCE_DIR}/Installation/Windows/Icons/arangodb.ico"
|
||||
ARANGO_ICON
|
||||
)
|
||||
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ARANGO_IMG "${ARANGO_IMG}")
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" ARANGO_ICON "${ARANGO_ICON}")
|
||||
STRING(REGEX REPLACE "\\\\" "\\\\\\\\" RELATIVE_ARANGO_ICON "${RELATIVE_ARANGO_ICON}")
|
||||
|
||||
# versioning
|
||||
set(CMAKE_MODULE_PATH
|
||||
${CMAKE_MODULE_PATH}
|
||||
${PROJECT_SOURCE_DIR}/Installation/Windows/version
|
||||
)
|
||||
|
||||
include("${PROJECT_SOURCE_DIR}/Installation/Windows/version/generate_product_version.cmake")
|
||||
endif ()
|
||||
|
||||
################################################################################
|
||||
## INSTALL
|
||||
################################################################################
|
||||
|
||||
# Global macros ----------------------------------------------------------------
|
||||
macro (generate_root_config name)
|
||||
FILE(READ ${PROJECT_SOURCE_DIR}/etc/arangodb3/${name}.conf.in FileContent)
|
||||
STRING(REPLACE "@PKGDATADIR@" "@ROOTDIR@/share/arangodb3"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@LOCALSTATEDIR@" "@ROOTDIR@/var"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@SBINDIR@" "@ROOTDIR@/bin"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@LIBEXECDIR@/arangodb3" "@ROOTDIR@/bin"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@SYSCONFDIR@" "@ROOTDIR@/etc/arangodb3"
|
||||
FileContent "${FileContent}")
|
||||
if (MSVC)
|
||||
STRING(REPLACE "@PROGRAM_SUFFIX@" ".exe"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REGEX REPLACE "[\r\n]file =" "\n# file ="
|
||||
FileContent "${FileContent}")
|
||||
endif ()
|
||||
FILE(WRITE ${PROJECT_BINARY_DIR}/etc/arangodb3/${name}.conf "${FileContent}")
|
||||
endmacro ()
|
||||
|
||||
# generates config file using the configured paths ----------------------------
|
||||
macro (generate_path_config name)
|
||||
FILE(READ ${PROJECT_SOURCE_DIR}/etc/arangodb3/${name}.conf.in FileContent)
|
||||
STRING(REPLACE "@PKGDATADIR@" "${TRI_PKGDATADIR}"
|
||||
FileContent "${FileContent}")
|
||||
STRING(REPLACE "@LOCALSTATEDIR@" "${VARDIR_NATIVE}"
|
||||
FileContent "${FileContent}")
|
||||
FILE(WRITE ${PROJECT_BINARY_DIR}/etc/arangodb3/${name}.conf "${FileContent}")
|
||||
endmacro ()
|
||||
|
||||
# installs a config file -------------------------------------------------------
|
||||
macro (install_config name)
|
||||
if (MSVC OR (DARWIN AND NOT HOMEBREW))
|
||||
generate_root_config(${name})
|
||||
else ()
|
||||
generate_path_config(${name})
|
||||
endif ()
|
||||
install(
|
||||
FILES ${PROJECT_BINARY_DIR}/etc/arangodb3/${name}.conf
|
||||
DESTINATION ${ETCDIR_INSTALL})
|
||||
endmacro ()
|
||||
|
||||
# installs a readme file converting EOL ----------------------------------------
|
||||
macro (install_readme input where output)
|
||||
FILE(READ ${PROJECT_SOURCE_DIR}/${input} FileContent)
|
||||
STRING(REPLACE "\r" "" FileContent "${FileContent}")
|
||||
if (MSVC)
|
||||
STRING(REPLACE "\n" "\r\n" FileContent "${FileContent}")
|
||||
endif ()
|
||||
FILE(WRITE ${PROJECT_BINARY_DIR}/${output} "${FileContent}")
|
||||
install(
|
||||
FILES ${PROJECT_BINARY_DIR}/${output}
|
||||
DESTINATION ${where})
|
||||
endmacro ()
|
||||
|
||||
# installs a link to an executable ---------------------------------------------
|
||||
macro (install_command_alias name where alias)
|
||||
if (MSVC)
|
||||
add_custom_command(
|
||||
TARGET ${name}
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:${name}>
|
||||
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/$<CONFIGURATION>/${alias}.exe)
|
||||
install(
|
||||
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/$<CONFIGURATION>/${alias}.exe
|
||||
DESTINATION ${where})
|
||||
else ()
|
||||
add_custom_command(
|
||||
TARGET ${name}
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${name}
|
||||
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${alias})
|
||||
install(
|
||||
PROGRAMS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${alias}
|
||||
DESTINATION ${where})
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
# sub directories --------------------------------------------------------------
|
||||
|
||||
#if(BUILD_STATIC_EXECUTABLES)
|
||||
# set(CMAKE_EXE_LINKER_FLAGS -static)
|
||||
# set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||
# set(CMAKE_EXE_LINK_DYNAMIC_C_FLAGS) # remove -Wl,-Bdynamic
|
||||
# set(CMAKE_EXE_LINK_DYNAMIC_CXX_FLAGS)
|
||||
# set(CMAKE_SHARED_LIBRARY_C_FLAGS) # remove -fPIC
|
||||
# set(CMAKE_SHARED_LIBRARY_CXX_FLAGS)
|
||||
# set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS) # remove -rdynamic
|
||||
# set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS)
|
||||
# # Maybe this works as well, haven't tried yet.
|
||||
# # set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS FALSE)
|
||||
#else(BUILD_STATIC_EXECUTABLES)
|
||||
# # Set RPATH to use for installed targets; append linker search path
|
||||
# set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${LOFAR_LIBDIR}")
|
||||
# set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
#endif(BUILD_STATIC_EXECUTABLES)
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------
|
||||
#get_cmake_property(_variableNames VARIABLES)
|
||||
#foreach (_variableName ${_variableNames})
|
||||
# message(STATUS "${_variableName}=${${_variableName}}")
|
||||
#endforeach ()
|
||||
#--------------------------------------------------------------------------------
|
||||
|
||||
# install ----------------------------------------------------------------------
|
||||
install(DIRECTORY ${PROJECT_SOURCE_DIR}/Documentation/man/
|
||||
DESTINATION share/man)
|
||||
|
||||
if (MSVC)
|
||||
install_readme(README . README.txt)
|
||||
install_readme(README.md . README.md)
|
||||
install_readme(README.windows . README.windows.txt)
|
||||
endif ()
|
||||
|
||||
if (MSVC)
|
||||
install_readme(LICENSE . LICENSE.txt)
|
||||
install_readme(LICENSES-OTHER-COMPONENTS.md . LICENSES-OTHER-COMPONENTS.md)
|
||||
else ()
|
||||
install_readme(README share/doc/arangodb3 README)
|
||||
install_readme(README.md share/doc/arangodb3 README.md)
|
||||
install_readme(LICENSE share/doc/arangodb3 LICENSE)
|
||||
install_readme(LICENSES-OTHER-COMPONENTS.md share/doc/arangodb3 LICENSES-OTHER-COMPONENTS.md)
|
||||
endif ()
|
||||
|
||||
# Build package ----------------------------------------------------------------
|
||||
if (NOT(MSVC))
|
||||
set(CPACK_SET_DESTDIR ON)
|
||||
endif()
|
||||
|
||||
find_program(DH_INSTALLINIT dh_installinit)
|
||||
find_program(FAKEROOT fakeroot)
|
||||
|
||||
if (DH_INSTALLINIT AND FAKEROOT)
|
||||
add_custom_target(prepare_debian)
|
||||
SET(DEBIAN_CONTROL_EXTRA_BASENAMES
|
||||
postinst
|
||||
preinst
|
||||
postrm
|
||||
prerm
|
||||
)
|
||||
SET(DEBIAN_WORK_DIR "${PROJECT_BINARY_DIR}/debian-work")
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
remove_directory "${DEBIAN_WORK_DIR}"
|
||||
)
|
||||
foreach (_DEBIAN_CONTROL_EXTRA_BASENAME ${DEBIAN_CONTROL_EXTRA_BASENAMES})
|
||||
SET(RELATIVE_NAME "debian/${_DEBIAN_CONTROL_EXTRA_BASENAME}")
|
||||
SET(SRCFILE "${PROJECT_SOURCE_DIR}/Installation/${RELATIVE_NAME}")
|
||||
SET(DESTFILE "${DEBIAN_WORK_DIR}/${RELATIVE_NAME}")
|
||||
|
||||
list(APPEND DEBIAN_CONTROL_EXTRA_SRC "${SRCFILE}")
|
||||
list(APPEND DEBIAN_CONTROL_EXTRA_DEST "${DESTFILE}")
|
||||
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
copy ${SRCFILE} ${DESTFILE})
|
||||
endforeach()
|
||||
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
copy "${PROJECT_SOURCE_DIR}/Installation/debian/control" "${DEBIAN_WORK_DIR}/debian/control"
|
||||
)
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E
|
||||
copy "${PROJECT_SOURCE_DIR}/Installation/debian/compat" "${DEBIAN_WORK_DIR}/debian/compat"
|
||||
)
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND fakeroot "${DH_INSTALLINIT}" -o 2>/dev/null
|
||||
WORKING_DIRECTORY ${DEBIAN_WORK_DIR}
|
||||
)
|
||||
add_custom_command(TARGET prepare_debian POST_BUILD
|
||||
COMMAND fakeroot dh_installdeb
|
||||
WORKING_DIRECTORY ${DEBIAN_WORK_DIR}
|
||||
)
|
||||
endif()
|
||||
|
||||
# General
|
||||
set(CPACK_PACKAGE_NAME "arangodb3")
|
||||
set(CPACK_PACKAGE_VENDOR "ArangoDB GmbH")
|
||||
set(CPACK_PACKAGE_CONTACT "info@arangodb.com")
|
||||
set(CPACK_PACKAGE_VERSION "${ARANGODB_VERSION}")
|
||||
|
||||
set(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/LICENSE")
|
||||
|
||||
set(CPACK_STRIP_FILES "ON")
|
||||
set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "amd64")
|
||||
set(CPACK_DEBIAN_PACKAGE_SECTION "database")
|
||||
set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "a multi-purpose NoSQL database
|
||||
A distributed free and open-source database with a flexible data model for documents,
|
||||
graphs, and key-values. Build high performance applications using a convenient
|
||||
SQL-like query language or JavaScript extensions.
|
||||
.
|
||||
Copyright: 2014-2016 by ArangoDB GmbH
|
||||
Copyright: 2012-2013 by triAGENS GmbH
|
||||
ArangoDB Software
|
||||
www.arangodb.com
|
||||
")
|
||||
SET(CPACK_DEBIAN_PACKAGE_CONFLICTS "arangodb")
|
||||
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
|
||||
set(CPACK_DEBIAN_COMPRESSION_TYPE "xz")
|
||||
set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "https://www.arangodb.com/")
|
||||
set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/postinst;${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/preinst;${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/postrm;${PROJECT_BINARY_DIR}/debian-work/debian/${CPACK_PACKAGE_NAME}/DEBIAN/prerm;")
|
||||
set(CPACK_BUNDLE_NAME "${CPACK_PACKAGE_NAME}")
|
||||
configure_file("${PROJECT_SOURCE_DIR}/Installation/MacOSX/Bundle/Info.plist.in" "${CMAKE_CURRENT_BINARY_DIR}/Info.plist")
|
||||
set(CPACK_BUNDLE_PLIST "${CMAKE_CURRENT_BINARY_DIR}/Info.plist")
|
||||
set(CPACK_BUNDLE_ICON "${PROJECT_SOURCE_DIR}/Installation/MacOSX/Bundle/icon.icns")
|
||||
set(CPACK_BUNDLE_STARTUP_COMMAND "${PROJECT_SOURCE_DIR}/Installation/MacOSX/Bundle/arangodb-cli.sh")
|
||||
|
||||
# MS installer
|
||||
if (MSVC)
|
||||
set(CPACK_PACKAGE_NAME "ArangoDB")
|
||||
set(CPACK_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Installation/Windows/Templates")
|
||||
set(CPACK_PLUGIN_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Installation/Windows/Plugins")
|
||||
set(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL 1)
|
||||
set(BITS 64)
|
||||
|
||||
if (CMAKE_CL_64)
|
||||
SET(CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES64")
|
||||
SET(BITS 64)
|
||||
else ()
|
||||
SET(CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES")
|
||||
SET(BITS 32)
|
||||
endif ()
|
||||
|
||||
message(STATUS "ARANGO_IMG: ${ARANGO_IMG}")
|
||||
message(STATUS "ARANGO_ICON: ${ARANGO_ICON}")
|
||||
message(STATUS "RELATIVE_ARANGO_ICON: ${RELATIVE_ARANGO_ICON}")
|
||||
|
||||
install(
|
||||
DIRECTORY "${PROJECT_SOURCE_DIR}/Installation/Windows/Icons"
|
||||
DESTINATION ${TRI_RESOURCEDIR})
|
||||
|
||||
set(CPACK_NSIS_DEFINES "
|
||||
!define BITS ${BITS}
|
||||
!define TRI_FRIENDLY_SVC_NAME '${ARANGODB_FRIENDLY_STRING}'
|
||||
!define TRI_AARDVARK_URL 'http://127.0.0.1:8529'
|
||||
")
|
||||
|
||||
set(CPACK_PACKAGE_ICON ${ARANGO_ICON})
|
||||
|
||||
set(CPACK_NSIS_MODIFY_PATH ON)
|
||||
set(CPACK_NSIS_MUI_ICON ${ARANGO_ICON})
|
||||
set(CPACK_NSIS_MUI_UNIICON ${ARANGO_ICON})
|
||||
set(CPACK_NSIS_INSTALLED_ICON_NAME ${RELATIVE_ARANGO_ICON})
|
||||
set(CPACK_NSIS_DISPLAY_NAME, ${ARANGODB_DISPLAY_NAME})
|
||||
set(CPACK_NSIS_HELP_LINK ${ARANGODB_HELP_LINK})
|
||||
set(CPACK_NSIS_URL_INFO_ABOUT ${ARANGODB_URL_INFO_ABOUT})
|
||||
set(CPACK_NSIS_CONTACT ${ARANGODB_CONTACT})
|
||||
endif ()
|
||||
|
||||
configure_file("${CMAKE_SOURCE_DIR}/Installation/cmake/CMakeCPackOptions.cmake.in"
|
||||
"${CMAKE_BINARY_DIR}/CMakeCPackOptions.cmake" @ONLY)
|
||||
set(CPACK_PROJECT_CONFIG_FILE "${CMAKE_BINARY_DIR}/CMakeCPackOptions.cmake")
|
||||
|
||||
if (NOT(MSVC))
|
||||
# components
|
||||
install(
|
||||
FILES ${PROJECT_SOURCE_DIR}/Installation/debian/arangodb.init
|
||||
PERMISSIONS OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
|
||||
DESTINATION ${ETCDIR}/init.d
|
||||
RENAME arangodb3
|
||||
COMPONENT debian-extras
|
||||
)
|
||||
endif()
|
||||
|
||||
# Custom targets ----------------------------------------------------------------
|
||||
|
||||
# love
|
||||
add_custom_target (love
|
||||
COMMENT "ArangoDB loves you."
|
||||
COMMAND ""
|
||||
)
|
||||
|
||||
|
||||
# Finally: user cpack
|
||||
include(CPack)
|
||||
|
||||
################################################################################
|
||||
### @brief install client-side JavaScript files
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_SOURCE_DIR}/js/common ${PROJECT_SOURCE_DIR}/js/client
|
||||
DESTINATION share/arangodb3/js
|
||||
FILES_MATCHING PATTERN "*.js"
|
||||
REGEX "^.*/common/test-data$" EXCLUDE
|
||||
REGEX "^.*/common/tests$" EXCLUDE
|
||||
REGEX "^.*/client/tests$" EXCLUDE)
|
||||
|
||||
################################################################################
|
||||
### @brief install server-side JavaScript files
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_SOURCE_DIR}/js/actions ${PROJECT_SOURCE_DIR}/js/apps ${PROJECT_SOURCE_DIR}/js/contrib ${PROJECT_SOURCE_DIR}/js/node ${PROJECT_SOURCE_DIR}/js/server
|
||||
DESTINATION share/arangodb3/js
|
||||
REGEX "^.*/server/tests$" EXCLUDE
|
||||
REGEX "^.*/aardvark/APP/node_modules$" EXCLUDE
|
||||
)
|
||||
|
||||
################################################################################
|
||||
### @brief install log directory
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_BINARY_DIR}/var/log/arangodb
|
||||
DESTINATION ${VARDIR_INSTALL}/log)
|
||||
|
||||
################################################################################
|
||||
### @brief install database directory
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_BINARY_DIR}/var/lib/arangodb3
|
||||
DESTINATION ${VARDIR_INSTALL}/lib)
|
||||
|
||||
################################################################################
|
||||
### @brief install apps directory
|
||||
################################################################################
|
||||
|
||||
install(
|
||||
DIRECTORY ${PROJECT_BINARY_DIR}/var/lib/arangodb3-apps
|
||||
DESTINATION ${VARDIR_INSTALL}/lib)
|
||||
|
|
|
@ -1291,10 +1291,10 @@ actions.defineHttp({
|
|||
/// The attribute name is the collection name. Each value is an object
|
||||
/// of the following form:
|
||||
///
|
||||
/// { "collection1": { "Plan": { "s100001": ["DBServer1", "DBServer2"],
|
||||
/// "s100002": ["DBServer3", "DBServer4"] },
|
||||
/// "Current": { "s100001": ["DBServer1", "DBServer2"],
|
||||
/// "s100002": ["DBServer3"] } },
|
||||
/// { "collection1": { "Plan": { "s100001": ["DBServer001", "DBServer002"],
|
||||
/// "s100002": ["DBServer003", "DBServer004"] },
|
||||
/// "Current": { "s100001": ["DBServer001", "DBServer002"],
|
||||
/// "s100002": ["DBServer003"] } },
|
||||
/// "collection2": ...
|
||||
/// }
|
||||
///
|
||||
|
|
|
@ -308,7 +308,7 @@ foxxRouter.get('/download/zip', function (req, res) {
|
|||
const dir = fs.join(fs.makeAbsolute(service.root), service.path);
|
||||
const zipPath = fmUtils.zipDirectory(dir);
|
||||
const name = mount.replace(/^\/|\/$/g, '').replace(/\//g, '_');
|
||||
res.download(zipPath, `${name}.zip`);
|
||||
res.download(zipPath, `${name}_${service.manifest.version}.zip`);
|
||||
})
|
||||
.summary('Download a service as zip archive')
|
||||
.description(dd`
|
||||
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
|
@ -3004,49 +3004,87 @@ if (list.length > 0) {
|
|||
</div>
|
||||
|
||||
</div></script><script id="shardsView.ejs" type="text/template"><div id="shardsContent" class="innerContent">
|
||||
|
||||
<% var genClass = 'pure-u-1-3'; %>
|
||||
<% var disabled = ' '; %>
|
||||
<% var collectionName; %>
|
||||
<% var first = 0; %>
|
||||
|
||||
<% _.each(collections, function(collection, name) { %>
|
||||
<% if (name.substring(0,1) !== '_') { %>
|
||||
<% collectionName = name%>
|
||||
|
||||
<div class="sectionHeader pure-g">
|
||||
<div class="pure-u-1-1">
|
||||
<div class="title" style="position: relative; top: -4px;">
|
||||
<%= name %>
|
||||
<% if (first === 0) { %>
|
||||
<div class="sectionHeader pure-g" style="margin-top: -20px;">
|
||||
<% first++; %>
|
||||
<% } else { %>
|
||||
<div class="sectionHeader pure-g">
|
||||
<% } %>
|
||||
|
||||
<div class="pure-u-1-1">
|
||||
<div class="title" style="position: relative; top: -4px;">
|
||||
<%= name %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pure-g pure-table pure-table-header pure-title" style="clear: both">
|
||||
<div class="pure-table-row">
|
||||
<div class="<%= genClass %> left">Shard</div>
|
||||
<div class="<%= genClass %> left ">Leader</div>
|
||||
<div class="<%= genClass %> left">Followers</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% _.each(collection.Plan, function(shard, name) { %>
|
||||
<div class="pure-g pure-table pure-table-body">
|
||||
<div class="pure-table-row disabled">
|
||||
|
||||
<div class="<%= genClass %> left"><%= name %></div>
|
||||
<% if (name === Object.keys(collection.Current)[0]) { %>
|
||||
<div class="<%= genClass %> positive left"><%= shard.leader %></div>
|
||||
<% } else { %>
|
||||
<div class="<%= genClass %> negative left"><%= shard.leader %></div>
|
||||
<% } %>
|
||||
<% if (shard.followers.length === 0) { %>
|
||||
<div class="<%= genClass %> left"> no followers </div>
|
||||
<% } else { %>
|
||||
<div class="<%= genClass %> left"><%= shard.follower %></div>
|
||||
<% } %>
|
||||
<div class="pure-g pure-table pure-table-header pure-title" style="clear: both">
|
||||
<div class="pure-table-row">
|
||||
<div class="<%= genClass %> left">Shard</div>
|
||||
<div class="<%= genClass %> left ">Leader</div>
|
||||
<div class="<%= genClass %> left">Followers</div>
|
||||
</div>
|
||||
</div>
|
||||
<% }); %>
|
||||
|
||||
<% var counter = 0; %>
|
||||
<% _.each(collection.Plan, function(shard, name) { %>
|
||||
<div class="pure-g pure-table pure-table-body">
|
||||
<div class="pure-table-row" collection="<%= collectionName %>" shard="<%= name %>" leader="<%= shard.leader%>">
|
||||
|
||||
<div class="<%= genClass %> left"><%= name %></div>
|
||||
|
||||
<% if (name === Object.keys(collection.Current)[counter]) { %>
|
||||
<div class="<%= genClass %> positive left"><%= shard.leader %></div>
|
||||
<% } else { %>
|
||||
<div class="<%= genClass %> negative left"><%= shard.leader %></div>
|
||||
<% } %>
|
||||
|
||||
<% var found = null; %>
|
||||
<% _.each(shard.followers, function(db) { %>
|
||||
<% if (db === shard.leader) { %>
|
||||
<% found = true; %>
|
||||
<% } %>
|
||||
<% }); %>
|
||||
|
||||
<% if (found) { %>
|
||||
<div class="<%= genClass %> mid"><i class="fa fa-circle-o-notch fa-spin"></i></div>
|
||||
<% } else { %>
|
||||
<% if (shard.followers.length === 0) { %>
|
||||
<div class="<%= genClass %> left"> no followers </div>
|
||||
<% } else { %>
|
||||
|
||||
<% var string = ''; %>
|
||||
<% _.each(shard.followers, function(db) { %>
|
||||
<% if (shard.followers.length === 1) { %>
|
||||
<% string += db + " "; %>
|
||||
<% } else { %>
|
||||
<% string += db + ", "; %>
|
||||
<% } %>
|
||||
<% }); %>
|
||||
|
||||
<div class="<%= genClass %> left"><%= string %></div>
|
||||
<% } %>
|
||||
<% } %>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% counter++; %>
|
||||
<% }); %>
|
||||
|
||||
<% } %>
|
||||
<% }); %>
|
||||
|
||||
<button id="rebalanceShards" style="margin-top: 20px;" class="button-success pull-right">Rebalance Shards</button>
|
||||
</div></script><script id="shellView.ejs" type="text/template"><div class="headerBar">
|
||||
<a class="arangoHeader">JS Shell</a>
|
||||
</div>
|
||||
|
@ -3473,4 +3511,4 @@ var cutByResolution = function (str) {
|
|||
</div>
|
||||
|
||||
<div id="workMonitorContent" class="innerContent">
|
||||
</div></script></head><body><nav class="navbar" style="display: none"><div class="primary"><div class="navlogo"><a class="logo big" href="#"><img class="arangodbLogo" src="img/arangodb_logo_big.png"></a> <a class="logo small" href="#"><img class="arangodbLogo" src="img/arangodb_logo_small.png"></a> <a class="version"><span>VERSION:</span><span id="currentVersion"></span></a></div><div class="statmenu" id="statisticBar"></div><div class="navmenu" id="navigationBar"></div></div></nav><div id="modalPlaceholder"></div><div class="bodyWrapper" style="display: none"><div class="centralRow"><div id="navbar2" class="navbarWrapper secondary"><div class="subnavmenu" id="subNavigationBar"></div></div><div class="resizecontainer contentWrapper"><div id="loadingScreen" class="loadingScreen" style="display: none"><i class="fa fa-circle-o-notch fa-spin fa-3x fa-fw margin-bottom"></i> <span class="sr-only">Loading...</span></div><div id="content" class="centralContent"></div><footer class="footer"><div id="footerBar"></div></footer></div></div></div><div id="progressPlaceholder" style="display:none"></div><div id="spotlightPlaceholder" style="display:none"></div><div id="offlinePlaceholder" style="display:none"><div class="offline-div"><div class="pure-u"><div class="pure-u-1-4"></div><div class="pure-u-1-2 offline-window"><div class="offline-header"><h3>You have been disconnected from the server</h3></div><div class="offline-body"><p>The connection to the server has been lost. The server may be under heavy load.</p><p>Trying to reconnect in <span id="offlineSeconds">10</span> seconds.</p><p class="animation_state"><span><button class="button-success">Reconnect now</button></span></p></div></div><div class="pure-u-1-4"></div></div></div></div><div class="arangoFrame" style=""><div class="outerDiv"><div class="innerDiv"></div></div></div><script src="libs.js?version=1465737305640"></script><script src="app.js?version=1465737305640"></script></body></html>
|
||||
</div></script></head><body><nav class="navbar" style="display: none"><div class="primary"><div class="navlogo"><a class="logo big" href="#"><img class="arangodbLogo" src="img/arangodb_logo_big.png"></a> <a class="logo small" href="#"><img class="arangodbLogo" src="img/arangodb_logo_small.png"></a> <a class="version"><span>VERSION:</span><span id="currentVersion"></span></a></div><div class="statmenu" id="statisticBar"></div><div class="navmenu" id="navigationBar"></div></div></nav><div id="modalPlaceholder"></div><div class="bodyWrapper" style="display: none"><div class="centralRow"><div id="navbar2" class="navbarWrapper secondary"><div class="subnavmenu" id="subNavigationBar"></div></div><div class="resizecontainer contentWrapper"><div id="loadingScreen" class="loadingScreen" style="display: none"><i class="fa fa-circle-o-notch fa-spin fa-3x fa-fw margin-bottom"></i> <span class="sr-only">Loading...</span></div><div id="content" class="centralContent"></div><footer class="footer"><div id="footerBar"></div></footer></div></div></div><div id="progressPlaceholder" style="display:none"></div><div id="spotlightPlaceholder" style="display:none"></div><div id="offlinePlaceholder" style="display:none"><div class="offline-div"><div class="pure-u"><div class="pure-u-1-4"></div><div class="pure-u-1-2 offline-window"><div class="offline-header"><h3>You have been disconnected from the server</h3></div><div class="offline-body"><p>The connection to the server has been lost. The server may be under heavy load.</p><p>Trying to reconnect in <span id="offlineSeconds">10</span> seconds.</p><p class="animation_state"><span><button class="button-success">Reconnect now</button></span></p></div></div><div class="pure-u-1-4"></div></div></div></div><div class="arangoFrame" style=""><div class="outerDiv"><div class="innerDiv"></div></div></div><script src="libs.js?version=1465843263201"></script><script src="app.js?version=1465843263201"></script></body></html>
|
Binary file not shown.
|
@ -297,8 +297,26 @@
|
|||
this.buildSubNavBar(menus);
|
||||
},
|
||||
|
||||
buildNodesSubNav: function(activeKey, disabled) {
|
||||
var menus = {
|
||||
Overview: {
|
||||
route: '#nodes'
|
||||
},
|
||||
Shards: {
|
||||
route: '#shards'
|
||||
}
|
||||
};
|
||||
|
||||
menus[activeKey].active = true;
|
||||
if (disabled) {
|
||||
menus[disabled].disabled = true;
|
||||
}
|
||||
this.buildSubNavBar(menus);
|
||||
},
|
||||
|
||||
scaleability: undefined,
|
||||
|
||||
/*
|
||||
//nav for cluster/nodes view
|
||||
buildNodesSubNav: function(type) {
|
||||
|
||||
|
@ -363,6 +381,7 @@
|
|||
|
||||
this.buildSubNavBar(menus);
|
||||
},
|
||||
*/
|
||||
|
||||
//nav for collection view
|
||||
buildCollectionSubNav: function(collectionName, activeKey) {
|
||||
|
|
|
@ -302,6 +302,7 @@
|
|||
return;
|
||||
}
|
||||
this.shardsView = new window.ShardsView({
|
||||
dbServers: this.dbServers
|
||||
});
|
||||
this.shardsView.render();
|
||||
},
|
||||
|
|
|
@ -1,49 +1,87 @@
|
|||
<script id="shardsView.ejs" type="text/template">
|
||||
|
||||
<div id="shardsContent" class="innerContent">
|
||||
|
||||
<% var genClass = 'pure-u-1-3'; %>
|
||||
<% var disabled = ' '; %>
|
||||
<% var collectionName; %>
|
||||
<% var first = 0; %>
|
||||
|
||||
<% _.each(collections, function(collection, name) { %>
|
||||
<% if (name.substring(0,1) !== '_') { %>
|
||||
<% collectionName = name%>
|
||||
|
||||
<div class="sectionHeader pure-g">
|
||||
<div class="pure-u-1-1">
|
||||
<div class="title" style="position: relative; top: -4px;">
|
||||
<%= name %>
|
||||
<% if (first === 0) { %>
|
||||
<div class="sectionHeader pure-g" style="margin-top: -20px;">
|
||||
<% first++; %>
|
||||
<% } else { %>
|
||||
<div class="sectionHeader pure-g">
|
||||
<% } %>
|
||||
|
||||
<div class="pure-u-1-1">
|
||||
<div class="title" style="position: relative; top: -4px;">
|
||||
<%= name %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pure-g pure-table pure-table-header pure-title" style="clear: both">
|
||||
<div class="pure-table-row">
|
||||
<div class="<%= genClass %> left">Shard</div>
|
||||
<div class="<%= genClass %> left ">Leader</div>
|
||||
<div class="<%= genClass %> left">Followers</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% _.each(collection.Plan, function(shard, name) { %>
|
||||
<div class="pure-g pure-table pure-table-body">
|
||||
<div class="pure-table-row disabled">
|
||||
|
||||
<div class="<%= genClass %> left"><%= name %></div>
|
||||
<% if (name === Object.keys(collection.Current)[0]) { %>
|
||||
<div class="<%= genClass %> positive left"><%= shard.leader %></div>
|
||||
<% } else { %>
|
||||
<div class="<%= genClass %> negative left"><%= shard.leader %></div>
|
||||
<% } %>
|
||||
<% if (shard.followers.length === 0) { %>
|
||||
<div class="<%= genClass %> left"> no followers </div>
|
||||
<% } else { %>
|
||||
<div class="<%= genClass %> left"><%= shard.follower %></div>
|
||||
<% } %>
|
||||
<div class="pure-g pure-table pure-table-header pure-title" style="clear: both">
|
||||
<div class="pure-table-row">
|
||||
<div class="<%= genClass %> left">Shard</div>
|
||||
<div class="<%= genClass %> left ">Leader</div>
|
||||
<div class="<%= genClass %> left">Followers</div>
|
||||
</div>
|
||||
</div>
|
||||
<% }); %>
|
||||
|
||||
<% var counter = 0; %>
|
||||
<% _.each(collection.Plan, function(shard, name) { %>
|
||||
<div class="pure-g pure-table pure-table-body">
|
||||
<div class="pure-table-row" collection="<%= collectionName %>" shard="<%= name %>" leader="<%= shard.leader%>">
|
||||
|
||||
<div class="<%= genClass %> left"><%= name %></div>
|
||||
|
||||
<% if (name === Object.keys(collection.Current)[counter]) { %>
|
||||
<div class="<%= genClass %> positive left"><%= shard.leader %></div>
|
||||
<% } else { %>
|
||||
<div class="<%= genClass %> negative left"><%= shard.leader %></div>
|
||||
<% } %>
|
||||
|
||||
<% var found = null; %>
|
||||
<% _.each(shard.followers, function(db) { %>
|
||||
<% if (db === shard.leader) { %>
|
||||
<% found = true; %>
|
||||
<% } %>
|
||||
<% }); %>
|
||||
|
||||
<% if (found) { %>
|
||||
<div class="<%= genClass %> mid"><i class="fa fa-circle-o-notch fa-spin"></i></div>
|
||||
<% } else { %>
|
||||
<% if (shard.followers.length === 0) { %>
|
||||
<div class="<%= genClass %> left"> no followers </div>
|
||||
<% } else { %>
|
||||
|
||||
<% var string = ''; %>
|
||||
<% _.each(shard.followers, function(db) { %>
|
||||
<% if (shard.followers.length === 1) { %>
|
||||
<% string += db + " "; %>
|
||||
<% } else { %>
|
||||
<% string += db + ", "; %>
|
||||
<% } %>
|
||||
<% }); %>
|
||||
|
||||
<div class="<%= genClass %> left"><%= string %></div>
|
||||
<% } %>
|
||||
<% } %>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<% counter++; %>
|
||||
<% }); %>
|
||||
|
||||
<% } %>
|
||||
<% }); %>
|
||||
|
||||
<button id="rebalanceShards" style="margin-top: 20px;" class="button-success pull-right">Rebalance Shards</button>
|
||||
</div>
|
||||
|
||||
</script>
|
||||
|
|
|
@ -537,114 +537,14 @@
|
|||
},
|
||||
|
||||
getCoordStatHistory: function(callback) {
|
||||
var self = this, promises = [], historyUrl;
|
||||
|
||||
var merged = {
|
||||
http: {}
|
||||
};
|
||||
|
||||
var getHistory = function(url) {
|
||||
return $.get(url, {count: self.statCollectCoord.size()}, null, 'json');
|
||||
};
|
||||
|
||||
var mergeHistory = function(data) {
|
||||
|
||||
var onetime = ['times'];
|
||||
var values = [
|
||||
'physicalMemory',
|
||||
'residentSizeCurrent',
|
||||
'clientConnections15M',
|
||||
'clientConnectionsCurrent'
|
||||
];
|
||||
var http = [
|
||||
'optionsPerSecond',
|
||||
'putsPerSecond',
|
||||
'headsPerSecond',
|
||||
'postsPerSecond',
|
||||
'getsPerSecond',
|
||||
'deletesPerSecond',
|
||||
'othersPerSecond',
|
||||
'patchesPerSecond'
|
||||
];
|
||||
var arrays = [
|
||||
'bytesSentPerSecond',
|
||||
'bytesReceivedPerSecond',
|
||||
'avgRequestTime'
|
||||
];
|
||||
|
||||
var counter = 0, counter2;
|
||||
|
||||
_.each(data, function(stat) {
|
||||
if (stat.enabled) {
|
||||
self.statsEnabled = true;
|
||||
}
|
||||
else {
|
||||
self.statsEnabled = false;
|
||||
}
|
||||
|
||||
if (typeof stat === 'object') {
|
||||
if (counter === 0) {
|
||||
//one time value
|
||||
_.each(onetime, function(value) {
|
||||
merged[value] = stat[value];
|
||||
});
|
||||
|
||||
//values
|
||||
_.each(values, function(value) {
|
||||
merged[value] = stat[value];
|
||||
});
|
||||
|
||||
//http requests arrays
|
||||
_.each(http, function(value) {
|
||||
merged.http[value] = stat[value];
|
||||
});
|
||||
|
||||
//arrays
|
||||
_.each(arrays, function(value) {
|
||||
merged[value] = stat[value];
|
||||
});
|
||||
|
||||
}
|
||||
else {
|
||||
//values
|
||||
_.each(values, function(value) {
|
||||
merged[value] = merged[value] + stat[value];
|
||||
});
|
||||
//http requests arrays
|
||||
_.each(http, function(value) {
|
||||
counter2 = 0;
|
||||
_.each(stat[value], function(x) {
|
||||
merged.http[value][counter] = merged.http[value][counter] + x;
|
||||
counter2++;
|
||||
});
|
||||
});
|
||||
_.each(arrays, function(value) {
|
||||
counter2 = 0;
|
||||
_.each(stat[value], function(x) {
|
||||
merged[value][counter] = merged[value][counter] + x;
|
||||
counter2++;
|
||||
});
|
||||
});
|
||||
}
|
||||
counter++;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.statCollectCoord.each(function(coord) {
|
||||
historyUrl = coord.url + '/short';
|
||||
promises.push(getHistory(historyUrl));
|
||||
});
|
||||
|
||||
$.when.apply($, promises).done(function() {
|
||||
//wait until all data is here
|
||||
var arr = [];
|
||||
_.each(promises, function(stat) {
|
||||
arr.push(stat.responseJSON);
|
||||
});
|
||||
mergeHistory(arr);
|
||||
callback(merged);
|
||||
});
|
||||
$.ajax({
|
||||
url: "statistics/coordshort",
|
||||
json: true,
|
||||
})
|
||||
.success(function(data) {
|
||||
this.statsEnabled = data.enabled;
|
||||
callback(data.data);
|
||||
}.bind(this));
|
||||
}
|
||||
|
||||
});
|
||||
|
|
|
@ -361,7 +361,6 @@
|
|||
label: user.get("user")
|
||||
});
|
||||
});
|
||||
console.log(users);
|
||||
|
||||
tableContent.push(
|
||||
window.modalView.createSelectEntry(
|
||||
|
|
|
@ -111,7 +111,7 @@
|
|||
self.dbServer = self.dbServers[0];
|
||||
|
||||
self.dbServer.each(function(model) {
|
||||
if (model.get("name") === 'DBServer1') {
|
||||
if (model.get("name") === 'DBServer001') {
|
||||
self.dbServer = model;
|
||||
}
|
||||
});
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
//start polling with interval
|
||||
this.intervalFunction = window.setInterval(function() {
|
||||
if (window.location.hash === '#nodes') {
|
||||
self.render();
|
||||
self.render(false);
|
||||
}
|
||||
}, this.interval);
|
||||
}
|
||||
|
@ -45,7 +45,7 @@
|
|||
window.App.navigate("#node/" + encodeURIComponent(name), {trigger: true});
|
||||
},
|
||||
|
||||
render: function () {
|
||||
render: function (navi) {
|
||||
|
||||
var self = this;
|
||||
|
||||
|
@ -75,6 +75,9 @@
|
|||
}
|
||||
});
|
||||
|
||||
if (navi !== false) {
|
||||
arangoHelper.buildNodesSubNav('Overview');
|
||||
}
|
||||
},
|
||||
|
||||
continueRender: function(nodes, scaling) {
|
||||
|
|
|
@ -12,10 +12,14 @@
|
|||
knownServers: [],
|
||||
|
||||
events: {
|
||||
"click #shardsContent .pure-table-row" : "moveShard",
|
||||
"click #rebalanceShards" : "rebalanceShards"
|
||||
},
|
||||
|
||||
initialize: function () {
|
||||
initialize: function (options) {
|
||||
var self = this;
|
||||
|
||||
self.dbServers = options.dbServers;
|
||||
clearInterval(this.intervalFunction);
|
||||
|
||||
if (window.App.isCluster) {
|
||||
|
@ -24,13 +28,13 @@
|
|||
//start polling with interval
|
||||
this.intervalFunction = window.setInterval(function() {
|
||||
if (window.location.hash === '#shards') {
|
||||
self.render();
|
||||
self.render(false);
|
||||
}
|
||||
}, this.interval);
|
||||
}
|
||||
},
|
||||
|
||||
render: function () {
|
||||
render: function (navi) {
|
||||
|
||||
var self = this;
|
||||
|
||||
|
@ -48,6 +52,121 @@
|
|||
arangoHelper.arangoError("Cluster", "Could not fetch sharding information.");
|
||||
}
|
||||
});
|
||||
|
||||
if (navi !== false) {
|
||||
arangoHelper.buildNodesSubNav('Shards');
|
||||
}
|
||||
},
|
||||
|
||||
moveShard: function(e) {
|
||||
var dbName = window.App.currentDB.get("name");
|
||||
var collectionName = $(e.currentTarget).attr("collection");
|
||||
var shardName = $(e.currentTarget).attr("shard");
|
||||
var fromServer = $(e.currentTarget).attr("leader");
|
||||
|
||||
var buttons = [],
|
||||
tableContent = [];
|
||||
|
||||
var array = [];
|
||||
this.dbServers[0].each(function(db) {
|
||||
if (db.get("name") !== fromServer) {
|
||||
array.push({
|
||||
value: db.get("name"),
|
||||
label: db.get("name")
|
||||
});
|
||||
}
|
||||
});
|
||||
array = array.reverse();
|
||||
|
||||
tableContent.push(
|
||||
window.modalView.createSelectEntry(
|
||||
"toDBServer",
|
||||
"Destination",
|
||||
undefined,
|
||||
//this.users !== null ? this.users.whoAmI() : 'root',
|
||||
"Please select the target databse server. The selected database " +
|
||||
"server will be the new leader of the shard.",
|
||||
array
|
||||
)
|
||||
);
|
||||
|
||||
buttons.push(
|
||||
window.modalView.createSuccessButton(
|
||||
"Move",
|
||||
this.confirmMoveShards.bind(this, dbName, collectionName, shardName, fromServer)
|
||||
)
|
||||
);
|
||||
|
||||
window.modalView.show(
|
||||
"modalTable.ejs",
|
||||
"Move shard: " + shardName,
|
||||
buttons,
|
||||
tableContent
|
||||
);
|
||||
|
||||
},
|
||||
|
||||
confirmMoveShards: function(dbName, collectionName, shardName, fromServer) {
|
||||
var self = this;
|
||||
var toServer = $('#toDBServer').val();
|
||||
|
||||
var data = {
|
||||
database: dbName,
|
||||
collection: collectionName,
|
||||
shard: shardName,
|
||||
fromServer: fromServer,
|
||||
toServer: toServer
|
||||
};
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
cache: false,
|
||||
url: arangoHelper.databaseUrl("/_admin/cluster/moveShard"),
|
||||
contentType: "application/json",
|
||||
processData: false,
|
||||
data: JSON.stringify(data),
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data === true) {
|
||||
window.setTimeout(function() {
|
||||
self.render(false);
|
||||
}, 1500);
|
||||
arangoHelper.arangoNotification("Shard " + shardName + " will be moved to " + toServer + ".");
|
||||
}
|
||||
},
|
||||
error: function() {
|
||||
arangoHelper.arangoNotification("Shard " + shardName + " could not be moved to " + toServer + ".");
|
||||
}
|
||||
});
|
||||
|
||||
window.modalView.hide();
|
||||
},
|
||||
|
||||
rebalanceShards: function() {
|
||||
var self = this;
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
cache: false,
|
||||
url: arangoHelper.databaseUrl("/_admin/cluster/rebalanceShards"),
|
||||
contentType: "application/json",
|
||||
processData: false,
|
||||
data: JSON.stringify({}),
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data === true) {
|
||||
window.setTimeout(function() {
|
||||
self.render(false);
|
||||
}, 1500);
|
||||
arangoHelper.arangoNotification("Started rebalance process.");
|
||||
}
|
||||
},
|
||||
error: function() {
|
||||
arangoHelper.arangoNotification("Could not start rebalance process.");
|
||||
}
|
||||
});
|
||||
|
||||
window.modalView.hide();
|
||||
},
|
||||
|
||||
continueRender: function(collections) {
|
||||
|
|
|
@ -26,9 +26,11 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const internal = require('internal');
|
||||
const download = internal.download;
|
||||
const cluster = require('@arangodb/cluster');
|
||||
|
||||
const db = require('@arangodb').db;
|
||||
const _ = require('lodash');
|
||||
|
||||
const STATISTICS_INTERVAL = require('@arangodb/statistics').STATISTICS_INTERVAL;
|
||||
const STATISTICS_HISTORY_INTERVAL = require('@arangodb/statistics').STATISTICS_HISTORY_INTERVAL;
|
||||
|
@ -403,6 +405,108 @@ router.use((req, res, next) => {
|
|||
next();
|
||||
});
|
||||
|
||||
router.get("/coordshort", function(req, res) {
|
||||
var merged = {
|
||||
http: {}
|
||||
};
|
||||
|
||||
var mergeHistory = function(data) {
|
||||
|
||||
var onetime = ['times'];
|
||||
var values = [
|
||||
'physicalMemory',
|
||||
'residentSizeCurrent',
|
||||
'clientConnections15M',
|
||||
'clientConnectionsCurrent'
|
||||
];
|
||||
var http = [
|
||||
'optionsPerSecond',
|
||||
'putsPerSecond',
|
||||
'headsPerSecond',
|
||||
'postsPerSecond',
|
||||
'getsPerSecond',
|
||||
'deletesPerSecond',
|
||||
'othersPerSecond',
|
||||
'patchesPerSecond'
|
||||
];
|
||||
var arrays = [
|
||||
'bytesSentPerSecond',
|
||||
'bytesReceivedPerSecond',
|
||||
'avgRequestTime'
|
||||
];
|
||||
|
||||
var counter = 0, counter2;
|
||||
|
||||
_.each(data, function(stat) {
|
||||
//if (stat.enabled) {
|
||||
// self.statsEnabled = true;
|
||||
// }
|
||||
//else {
|
||||
// self.statsEnabled = false;
|
||||
//}
|
||||
|
||||
if (typeof stat === 'object') {
|
||||
if (counter === 0) {
|
||||
//one time value
|
||||
_.each(onetime, function(value) {
|
||||
merged[value] = stat[value];
|
||||
});
|
||||
|
||||
//values
|
||||
_.each(values, function(value) {
|
||||
merged[value] = stat[value];
|
||||
});
|
||||
|
||||
//http requests arrays
|
||||
_.each(http, function(value) {
|
||||
merged.http[value] = stat[value];
|
||||
});
|
||||
|
||||
//arrays
|
||||
_.each(arrays, function(value) {
|
||||
merged[value] = stat[value];
|
||||
});
|
||||
|
||||
}
|
||||
else {
|
||||
//values
|
||||
_.each(values, function(value) {
|
||||
merged[value] = merged[value] + stat[value];
|
||||
});
|
||||
//http requests arrays
|
||||
_.each(http, function(value) {
|
||||
counter2 = 0;
|
||||
_.each(stat[value], function(x) {
|
||||
merged.http[value][counter] = merged.http[value][counter] + x;
|
||||
counter2++;
|
||||
});
|
||||
});
|
||||
_.each(arrays, function(value) {
|
||||
counter2 = 0;
|
||||
_.each(stat[value], function(x) {
|
||||
merged[value][counter] = merged[value][counter] + x;
|
||||
counter2++;
|
||||
});
|
||||
});
|
||||
}
|
||||
counter++;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var coordinators = global.ArangoClusterInfo.getCoordinators();
|
||||
|
||||
var coordinatorStats = coordinators.map(coordinator => {
|
||||
var endpoint = global.ArangoClusterInfo.getServerEndpoint(coordinator);
|
||||
var response = download(endpoint.replace(/^tcp/, "http") + "/_db/_system/_admin/aardvark/statistics/short?count=" + coordinators.length);
|
||||
return JSON.parse(response.body);
|
||||
});
|
||||
|
||||
mergeHistory(coordinatorStats);
|
||||
res.json({"enabled": coordinatorStats.some(stat => stat.enabled), "data": merged});
|
||||
})
|
||||
.summary("Short term history for all coordinators")
|
||||
.description("This function is used to get the statistics history.");
|
||||
|
||||
router.get("/short", function (req, res) {
|
||||
const start = req.queryParams.start;
|
||||
|
|
|
@ -362,7 +362,7 @@ function agencyTestSuite () {
|
|||
}
|
||||
|
||||
var requests = [
|
||||
["/_api/agency/write", [[{"/arango/Plan/DBServers/DBServer1":{"new":"none","op":"set"}}]]],
|
||||
["/_api/agency/write", [[{"/arango/Plan/DBServers/DBServer001":{"new":"none","op":"set"}}]]],
|
||||
["/_api/agency/read", [["/arango/Plan/DBServers"]]],
|
||||
];
|
||||
|
||||
|
@ -371,7 +371,7 @@ function agencyTestSuite () {
|
|||
assertEqual(res.statusCode, 200);
|
||||
});
|
||||
|
||||
assertEqual(res.body, JSON.stringify([{"arango":{"Plan":{"DBServers":{"DBServer1":"none"}}}}]));
|
||||
assertEqual(res.body, JSON.stringify([{"arango":{"Plan":{"DBServers":{"DBServer001":"none"}}}}]));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1022,7 +1022,7 @@ function synchronizeOneShard(database, shard, planId, leader) {
|
|||
try {
|
||||
sy = rep.syncCollection(shard,
|
||||
{ endpoint: ep, incremental: true,
|
||||
keepBarrier: true });
|
||||
keepBarrier: true, useCollectionId: false });
|
||||
break;
|
||||
}
|
||||
catch (err) {
|
||||
|
@ -1063,8 +1063,7 @@ function synchronizeOneShard(database, shard, planId, leader) {
|
|||
if (lockJobId !== false) {
|
||||
try {
|
||||
var sy2 = rep.syncCollectionFinalize(
|
||||
database, shard, sy.collections[0].id,
|
||||
sy.lastLogTick, { endpoint: ep });
|
||||
database, shard, sy.lastLogTick, { endpoint: ep });
|
||||
if (sy2.error) {
|
||||
console.error("synchronizeOneShard: Could not synchronize shard",
|
||||
shard, sy2);
|
||||
|
|
|
@ -334,7 +334,6 @@ function mountController(service, mount, filename) {
|
|||
}
|
||||
|
||||
exports.routeService = function (service, throwOnErrors) {
|
||||
const defaultDocument = service.manifest.defaultDocument;
|
||||
let error = null;
|
||||
|
||||
service.routes = {
|
||||
|
@ -353,21 +352,6 @@ exports.routeService = function (service, throwOnErrors) {
|
|||
}
|
||||
};
|
||||
|
||||
if (defaultDocument) {
|
||||
// only add redirection if src and target are not the same
|
||||
service.routes.routes.push({
|
||||
url: {match: '/'},
|
||||
action: {
|
||||
do: '@arangodb/actions/redirectRequest',
|
||||
options: {
|
||||
permanently: false,
|
||||
destination: defaultDocument,
|
||||
relative: true
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// mount all controllers
|
||||
let controllerFiles = service.manifest.controllers;
|
||||
|
|
|
@ -74,7 +74,11 @@ module.exports = class SyntheticResponse {
|
|||
if (typeof data === 'string' || data instanceof Buffer) {
|
||||
this._raw.body = data;
|
||||
} else if (typeof data === 'object') {
|
||||
this._raw.body = JSON.stringify(data);
|
||||
if (this.context.isDevelopment) {
|
||||
this._raw.body = JSON.stringify(data, null, 2);
|
||||
} else {
|
||||
this._raw.body = JSON.stringify(data);
|
||||
}
|
||||
} else {
|
||||
this._raw.body = String(data);
|
||||
}
|
||||
|
@ -118,7 +122,11 @@ module.exports = class SyntheticResponse {
|
|||
}
|
||||
if (!dataIsBuffer) {
|
||||
if (typeof data === 'object') {
|
||||
data = JSON.stringify(data);
|
||||
if (this.context.isDevelopment) {
|
||||
data = JSON.stringify(data, null, 2);
|
||||
} else {
|
||||
data = JSON.stringify(data);
|
||||
}
|
||||
} else {
|
||||
data = String(data);
|
||||
}
|
||||
|
|
|
@ -141,6 +141,21 @@ exports.routeService = function (service, throwOnErrors) {
|
|||
error = routeLegacyService(service, throwOnErrors);
|
||||
}
|
||||
|
||||
const defaultDocument = service.manifest.defaultDocument;
|
||||
if (defaultDocument) {
|
||||
service.routes.routes.push({
|
||||
url: {match: '/'},
|
||||
action: {
|
||||
do: '@arangodb/actions/redirectRequest',
|
||||
options: {
|
||||
permanently: false,
|
||||
destination: defaultDocument,
|
||||
relative: true
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (service.manifest.files) {
|
||||
const files = service.manifest.files;
|
||||
_.each(files, function (file, path) {
|
||||
|
|
|
@ -205,11 +205,11 @@ var mType = {
|
|||
REPLICATION_MARKER_REMOVE: 2302
|
||||
};
|
||||
|
||||
function syncCollectionFinalize(database, collname, collid, from, config) {
|
||||
function syncCollectionFinalize(database, collname, from, config) {
|
||||
var url = endpointToURL(config.endpoint) + "/_db/" + database +
|
||||
"/_api/replication/logger-follow?collection=" + collid + "&from=";
|
||||
"/_api/replication/logger-follow?collection=" + collname + "&from=";
|
||||
|
||||
var coll = require("internal").db[collid];
|
||||
var coll = require("internal").db[collname];
|
||||
|
||||
var transactions = {};
|
||||
|
||||
|
@ -237,11 +237,12 @@ function syncCollectionFinalize(database, collname, collid, from, config) {
|
|||
return;
|
||||
}
|
||||
catch (err) {
|
||||
console.debug("syncCollectionFinalize: insert1", entry, JSON.stringify(err));
|
||||
}
|
||||
try {
|
||||
coll.replace(entry.data._key, entry.data, {isRestore: true});
|
||||
} catch (errx) {
|
||||
console.error("syncCollectionFinalize: replace1", entry, errx);
|
||||
console.error("syncCollectionFinalize: replace1", entry, JSON.stringify(errx));
|
||||
throw errx;
|
||||
}
|
||||
} else if (entry.type === mType.REPLICATION_MARKER_EDGE) {
|
||||
|
@ -253,11 +254,12 @@ function syncCollectionFinalize(database, collname, collid, from, config) {
|
|||
return;
|
||||
}
|
||||
catch (err) {
|
||||
console.debug("syncCollectionFinalize: insert2", entry, JSON.stringify(err));
|
||||
}
|
||||
try {
|
||||
coll.replace(entry.key, entry.data, {isRestore: true});
|
||||
} catch (errx) {
|
||||
console.error("syncCollectionFinalize: replace2", entry, errx);
|
||||
console.error("syncCollectionFinalize: replace2", entry, JSON.stringify(errx));
|
||||
throw errx;
|
||||
}
|
||||
} else if (entry.type === mType.REPLICATION_MARKER_REMOVE) {
|
||||
|
@ -267,7 +269,7 @@ function syncCollectionFinalize(database, collname, collid, from, config) {
|
|||
try {
|
||||
coll.remove(entry.key);
|
||||
} catch (errx) {
|
||||
console.error("syncCollectionFinalize: remove", entry, errx);
|
||||
console.error("syncCollectionFinalize: remove", entry, JSON.stringify(errx));
|
||||
throw errx;
|
||||
}
|
||||
} else if (entry.type === mType.REPLICATION_TRANSACTION_START) {
|
||||
|
@ -286,21 +288,21 @@ function syncCollectionFinalize(database, collname, collid, from, config) {
|
|||
try {
|
||||
coll.ensureIndex(entry.index);
|
||||
} catch(errx) {
|
||||
console.error("syncCollectionFinalize: ensureIndex", entry, errx);
|
||||
console.error("syncCollectionFinalize: ensureIndex", entry, JSON.stringify(errx));
|
||||
throw errx;
|
||||
}
|
||||
} else if (entry.type === mType.REPLICATION_INDEX_DROP) {
|
||||
try {
|
||||
coll.dropIndex(entry.id);
|
||||
} catch(errx) {
|
||||
console.error("syncCollectionFinalize: dropIndex", entry, errx);
|
||||
console.error("syncCollectionFinalize: dropIndex", entry, JSON.stringify(errx));
|
||||
throw errx;
|
||||
}
|
||||
} else if (entry.type === mType.REPLICATION_COLLECTION_CHANGE) {
|
||||
try {
|
||||
coll.properties(entry.collection);
|
||||
} catch(errx) {
|
||||
console.error("syncCollectionFinalize: properties", entry, errx);
|
||||
console.error("syncCollectionFinalize: properties", entry, JSON.stringify(errx));
|
||||
throw errx;
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -666,7 +666,8 @@ function ahuacatlQueryBreadthFirstTestSuite () {
|
|||
/// +---------+---------+
|
||||
/// \|/ | \|/
|
||||
/// D <- B <- A -> E -> F
|
||||
/// | |
|
||||
/// | /|\ |
|
||||
/// | | |
|
||||
/// +--> C <--+
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
@ -683,33 +684,96 @@ function ahuacatlQueryBreadthFirstTestSuite () {
|
|||
vertex.save({_key: "E"});
|
||||
vertex.save({_key: "F"});
|
||||
|
||||
let makeEdge = function(from, to) {
|
||||
let makeEdge = function(from, to, type) {
|
||||
edge.save({
|
||||
_from: vn + "/" + from,
|
||||
_to: vn + "/" + to,
|
||||
_key: from + "" + to
|
||||
_key: from + "" + to,
|
||||
type: type
|
||||
});
|
||||
};
|
||||
|
||||
makeEdge("A", "B");
|
||||
makeEdge("A", "D");
|
||||
makeEdge("A", "E");
|
||||
makeEdge("A", "F");
|
||||
makeEdge("A", "B","friend");
|
||||
makeEdge("A", "D","friend");
|
||||
makeEdge("A", "E","enemy");
|
||||
makeEdge("A", "F","enemy");
|
||||
|
||||
makeEdge("B", "C");
|
||||
makeEdge("B", "D");
|
||||
makeEdge("B", "C","enemy");
|
||||
makeEdge("B", "D","friend");
|
||||
|
||||
makeEdge("E", "C");
|
||||
makeEdge("E", "F");
|
||||
makeEdge("E", "C","enemy");
|
||||
makeEdge("E", "F","friend");
|
||||
|
||||
makeEdge("C","A","friend");
|
||||
},
|
||||
|
||||
tearDown : cleanUp,
|
||||
|
||||
testNonUniqueVerticesDefaultDepth : function() {
|
||||
var query = `
|
||||
FOR v IN OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 4);
|
||||
assertEqual(actual, [ "B","D","E","F" ]);
|
||||
},
|
||||
|
||||
testNonUniqueVerticesMaxDepth2 : function() {
|
||||
var query = `
|
||||
FOR v IN 1..2 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 8);
|
||||
assertEqual(actual, [ "B","C","C","D","D","E","F","F" ]);
|
||||
},
|
||||
|
||||
testNonUniqueVerticesMinDepth0 : function() {
|
||||
var query = `
|
||||
FOR v IN 0..2 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 9);
|
||||
assertEqual(actual, [ "A","B","C","C","D","D","E","F","F" ]);
|
||||
},
|
||||
|
||||
testNonUniqueVerticesMinDepth2 : function() {
|
||||
var query = `
|
||||
FOR v IN 2..2 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 4);
|
||||
assertEqual(actual, [ "C","C","D","F" ]);
|
||||
},
|
||||
|
||||
testUniqueVerticesMaxDepth2 : function () {
|
||||
var query = `
|
||||
FOR v IN 1..2 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueVertices: 'global'}
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 5);
|
||||
assertEqual(actual, [ "B","C","D","E","F" ]);
|
||||
},
|
||||
|
||||
testUniqueVerticesMinDepth0 : function () {
|
||||
var query = `
|
||||
FOR v IN 0..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueVertices: 'global'}
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 6);
|
||||
assertEqual(actual, [ "A","B","C","D","E","F" ]);
|
||||
},
|
||||
|
||||
testUniqueVerticesMinDepth2 : function () {
|
||||
var query = `
|
||||
FOR n IN 2..2 OUTBOUND "${center}" ${en}
|
||||
FOR v IN 2..2 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueVertices: 'global'}
|
||||
SORT n._key RETURN n._key`;
|
||||
SORT v._key RETURN v._key`;
|
||||
var actual;
|
||||
|
||||
// A is directly connected to every other vertex accept "C"
|
||||
|
@ -717,6 +781,142 @@ function ahuacatlQueryBreadthFirstTestSuite () {
|
|||
actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 1);
|
||||
assertEqual(actual, [ "C" ]);
|
||||
},
|
||||
|
||||
testNonUniqueEdgesDefaultDepth : function() {
|
||||
var query = `
|
||||
FOR v,e IN OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 4);
|
||||
assertEqual(actual, [ "AB","AD","AE","AF" ]);
|
||||
},
|
||||
|
||||
testNonUniqueEdgesMaxDepth2 : function() {
|
||||
var query = `
|
||||
FOR v,e IN 1..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 10);
|
||||
assertEqual(actual, [ "AB","AD","AE","AF","BC","BD","CA","CA","EC","EF" ]);
|
||||
},
|
||||
|
||||
testNonUniqueEdgesMinDepth0 : function() {
|
||||
var query = `
|
||||
FOR v,e IN 0..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: false}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 11);
|
||||
assertEqual(actual, [ null,"AB","AD","AE","AF","BC","BD","CA","CA","EC","EF" ]);
|
||||
},
|
||||
|
||||
testNonUniqueEdgesMinDepth2 : function() {
|
||||
var query = `
|
||||
FOR v,e IN 2..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 6);
|
||||
assertEqual(actual, [ "BC","BD","CA","CA","EC","EF" ]);
|
||||
},
|
||||
|
||||
testUniqueEdgesMaxDepth4 : function () {
|
||||
var query = `
|
||||
FOR v,e IN 1..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 9);
|
||||
assertEqual(actual, [ "AB","AD","AE","AF","BC","BD","CA","EC","EF" ]);
|
||||
},
|
||||
|
||||
testUniqueEdgesMinDepth0 : function () {
|
||||
var query = `
|
||||
FOR v,e IN 0..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 10);
|
||||
assertEqual(actual, [ null,"AB","AD","AE","AF","BC","BD","CA","EC","EF" ]);
|
||||
},
|
||||
|
||||
testUniqueEdgesMinDepth2 : function () {
|
||||
var query = `
|
||||
FOR v,e IN 2..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 5);
|
||||
assertEqual(actual, [ "BC","BD","CA","EC","EF" ]);
|
||||
},
|
||||
|
||||
testFilterPathMaxDepth3 : function () {
|
||||
var query = `
|
||||
FOR v,e,p IN 1..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
FILTER p.edges[0].type == "friend" && p.edges[1].type == "enemy"
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 2);
|
||||
assertEqual(actual, [ "BC","CA" ]);
|
||||
},
|
||||
|
||||
testFilterPathMaxDepth2 : function () {
|
||||
var query = `
|
||||
FOR v,e,p IN 1..2 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
FILTER p.edges[0].type == "friend" && p.edges[1].type == "enemy"
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 1);
|
||||
assertEqual(actual, [ "BC" ]);
|
||||
},
|
||||
|
||||
testFilterPathMinDepth3 : function () {
|
||||
var query = `
|
||||
FOR v,e,p IN 3..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
FILTER p.edges[0].type == "friend" && p.edges[1].type == "enemy"
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 1);
|
||||
assertEqual(actual, [ "CA" ]);
|
||||
},
|
||||
|
||||
testFilterPathDepth3 : function () {
|
||||
var query = `
|
||||
FOR v,e,p IN 1..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
FILTER p.edges[0].type == "enemy" && p.edges[1].type == "enemy" && p.edges[2].type == "friend"
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 1);
|
||||
assertEqual(actual, [ "CA" ]);
|
||||
},
|
||||
|
||||
testFilterPathStartsWith : function () {
|
||||
var query = `
|
||||
FOR v,e,p IN 1..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
FILTER p.edges[0].type == "friend"
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 5);
|
||||
assertEqual(actual, [ "AB","AD","BC","BD","CA" ]);
|
||||
},
|
||||
|
||||
testFilterPathStartsWithDepth2 : function () {
|
||||
var query = `
|
||||
FOR v,e,p IN 2..3 OUTBOUND "${center}" ${en}
|
||||
OPTIONS {bfs: true, uniqueEdges: 'global'}
|
||||
FILTER p.edges[0].type == "friend"
|
||||
SORT e._key RETURN e._key`;
|
||||
var actual = getQueryResults(query);
|
||||
assertEqual(actual.length, 3);
|
||||
assertEqual(actual, [ "BC","BD","CA" ]);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -131,7 +131,13 @@ function MovingShardsSuite () {
|
|||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Now get to work, first get the write lock on the Plan in the Agency:
|
||||
//var success = ArangoAgency.lockRead("Target/CleanedServers", 0.5);
|
||||
//print(ArangoAgency.get("Target/CleanedServers", false, true));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -140,7 +146,7 @@ function MovingShardsSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function cleanOutServer(id) {
|
||||
var coordEndpoint = global.ArangoClusterInfo.getServerEndpoint("Coordinator1");
|
||||
var coordEndpoint = global.ArangoClusterInfo.getServerEndpoint("Coordinator001");
|
||||
var request = require("@arangodb/request");
|
||||
var endpointToURL = require("@arangodb/cluster").endpointToURL;
|
||||
var url = endpointToURL(coordEndpoint);
|
||||
|
@ -150,12 +156,43 @@ function MovingShardsSuite () {
|
|||
body: JSON.stringify(body) });
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief order the cluster to reduce number of db servers
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function shrinkCluster(toNum) {
|
||||
var coordEndpoint = global.ArangoClusterInfo.getServerEndpoint("Coordinator001");
|
||||
var request = require("@arangodb/request");
|
||||
var endpointToURL = require("@arangodb/cluster").endpointToURL;
|
||||
var url = endpointToURL(coordEndpoint);
|
||||
var body = {"numberOfDBServers":toNum};
|
||||
return request({ method: "PUT",
|
||||
url: url + "/_admin/cluster/numberOfServers",
|
||||
body: JSON.stringify(body) });
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief order the cluster to clean out a server:
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function resetCleanedOutServers() {
|
||||
var coordEndpoint = global.ArangoClusterInfo.getServerEndpoint("Coordinator001");
|
||||
var request = require("@arangodb/request");
|
||||
var endpointToURL = require("@arangodb/cluster").endpointToURL;
|
||||
var url = endpointToURL(coordEndpoint);
|
||||
var numberOfDBServers = global.ArangoClusterInfo.getDBServers().length;
|
||||
var body = {"cleanedServers":[], "numberOfDBServers":numberOfDBServers};
|
||||
return request({ method: "PUT",
|
||||
url: url + "/_admin/cluster/numberOfServers",
|
||||
body: JSON.stringify(body) });
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief move a single shard
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function moveShard(database, collection, shard, fromServer, toServer) {
|
||||
var coordEndpoint = global.ArangoClusterInfo.getServerEndpoint("Coordinator1");
|
||||
var coordEndpoint = global.ArangoClusterInfo.getServerEndpoint("Coordinator001");
|
||||
var request = require("@arangodb/request");
|
||||
var endpointToURL = require("@arangodb/cluster").endpointToURL;
|
||||
var url = endpointToURL(coordEndpoint);
|
||||
|
@ -197,10 +234,17 @@ function MovingShardsSuite () {
|
|||
|
||||
function findServerNotOnList(list) {
|
||||
var count = 1;
|
||||
var name = "DBServer" + count;
|
||||
var str = "" + count;
|
||||
var pad = "000";
|
||||
var ans = pad.substring(0, pad.length - str.length) + str;
|
||||
|
||||
var name = "DBServer" + ans;
|
||||
while (list.indexOf(name) >= 0) {
|
||||
require("internal").print(list, name);
|
||||
count += 1;
|
||||
name = "DBServer" + count;
|
||||
str = "" + count;
|
||||
ans = pad.substring(0, pad.length - str.length) + str;
|
||||
name = "DBServer" + ans;
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
@ -228,6 +272,7 @@ function MovingShardsSuite () {
|
|||
c[i].drop();
|
||||
}
|
||||
c = [];
|
||||
resetCleanedOutServers();
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -238,6 +283,23 @@ function MovingShardsSuite () {
|
|||
assertTrue(waitForSynchronousReplication("_system"));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief cleaning out collection with one shard without replication
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* testShrinkNoReplication : function() {
|
||||
assertTrue(waitForSynchronousReplication("_system"));
|
||||
shrinkCluster(4);
|
||||
assertTrue(testServerEmpty("DBServer005"));
|
||||
shrinkCluster(3);
|
||||
assertTrue(testServerEmpty("DBServer004"));
|
||||
shrinkCluster(2);
|
||||
assertTrue(testServerEmpty("DBServer003"));
|
||||
shrinkCluster(1);
|
||||
assert(testServerEmpty("DBServer002"));
|
||||
},*/
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief moving away a shard from a follower
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
#include <deque>
|
||||
#include <stack>
|
||||
#include <thread>
|
||||
|
||||
namespace arangodb {
|
||||
namespace basics {
|
||||
|
@ -49,6 +48,7 @@ struct VertexGetter {
|
|||
virtual ~VertexGetter() = default;
|
||||
virtual bool getVertex(edgeIdentifier const&, vertexIdentifier const&, size_t,
|
||||
vertexIdentifier&) = 0;
|
||||
virtual void setStartVertex(std::string const&) {}
|
||||
};
|
||||
|
||||
template <typename edgeIdentifier, typename vertexIdentifier, typename edgeItem>
|
||||
|
@ -162,7 +162,7 @@ class DepthFirstEnumerator : public PathEnumerator<edgeIdentifier, vertexIdentif
|
|||
/// @brief Get the next Path element from the traversal.
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const EnumeratedPath<edgeIdentifier, vertexIdentifier>& next() override {
|
||||
EnumeratedPath<edgeIdentifier, vertexIdentifier> const& next() override {
|
||||
if (this->_isFirst) {
|
||||
this->_isFirst = false;
|
||||
return this->_enumeratedPath;
|
||||
|
@ -245,7 +245,7 @@ class BreadthFirstEnumerator : public PathEnumerator<edgeIdentifier, vertexIdent
|
|||
PathStep(){};
|
||||
|
||||
public:
|
||||
PathStep(vertexIdentifier const& vertex) : sourceIdx(0), vertex(vertex){};
|
||||
PathStep(vertexIdentifier const& vertex) : sourceIdx(0), vertex(vertex) {}
|
||||
|
||||
PathStep(size_t sourceIdx, edgeIdentifier const& edge,
|
||||
vertexIdentifier const& vertex)
|
||||
|
@ -262,7 +262,7 @@ class BreadthFirstEnumerator : public PathEnumerator<edgeIdentifier, vertexIdent
|
|||
vertexIdentifier vertex;
|
||||
|
||||
private:
|
||||
NextStep(){};
|
||||
NextStep() {}
|
||||
|
||||
public:
|
||||
NextStep(size_t sourceIdx, vertexIdentifier const& vertex)
|
||||
|
|
|
@ -37,15 +37,11 @@ using namespace arangodb::basics;
|
|||
namespace arangodb {
|
||||
namespace httpclient {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief empty map, used for headers
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
std::unordered_map<std::string, std::string> const SimpleHttpClient::NO_HEADERS {};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// constructors and destructors
|
||||
// -----------------------------------------------------------------------------
|
||||
/// @brief default value for max packet size
|
||||
size_t SimpleHttpClient::MaxPacketSize = 128 * 1024 * 1024;
|
||||
|
||||
SimpleHttpClient::SimpleHttpClient(GeneralClientConnection* connection,
|
||||
double requestTimeout, bool warn)
|
||||
|
@ -60,7 +56,7 @@ SimpleHttpClient::SimpleHttpClient(GeneralClientConnection* connection,
|
|||
_locationRewriter({nullptr, nullptr}),
|
||||
_nextChunkedSize(0),
|
||||
_result(nullptr),
|
||||
_maxPacketSize(128 * 1024 * 1024),
|
||||
_maxPacketSize(MaxPacketSize),
|
||||
_maxRetries(3),
|
||||
_retryWaitTime(1 * 1000 * 1000),
|
||||
_retryMessage(),
|
||||
|
|
|
@ -43,8 +43,8 @@ class GeneralClientConnection;
|
|||
|
||||
class SimpleHttpClient {
|
||||
private:
|
||||
SimpleHttpClient(SimpleHttpClient const&);
|
||||
SimpleHttpClient& operator=(SimpleHttpClient const&);
|
||||
SimpleHttpClient(SimpleHttpClient const&) = delete;
|
||||
SimpleHttpClient& operator=(SimpleHttpClient const&) = delete;
|
||||
|
||||
public:
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -176,6 +176,14 @@ class SimpleHttpClient {
|
|||
_locationRewriter.data = data;
|
||||
_locationRewriter.func = func;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief set the value for max packet size
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static void setMaxPacketSize(size_t value) {
|
||||
MaxPacketSize = value;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief enable or disable keep-alive
|
||||
|
@ -408,7 +416,7 @@ class SimpleHttpClient {
|
|||
|
||||
std::vector<std::pair<std::string, std::string>> _pathToBasicAuth;
|
||||
|
||||
size_t const _maxPacketSize;
|
||||
size_t _maxPacketSize;
|
||||
|
||||
public:
|
||||
size_t _maxRetries;
|
||||
|
@ -434,6 +442,9 @@ class SimpleHttpClient {
|
|||
|
||||
// empty map, used for headers
|
||||
static std::unordered_map<std::string, std::string> const NO_HEADERS;
|
||||
|
||||
// default value for max packet size
|
||||
static size_t MaxPacketSize;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,8 +40,8 @@ SslServerFeature::SslServerFeature(application_features::ApplicationServer* serv
|
|||
_keyfile(),
|
||||
_sessionCache(false),
|
||||
_cipherList(),
|
||||
_protocol(TLS_V1),
|
||||
_options(
|
||||
_sslProtocol(TLS_V1),
|
||||
_sslOptions(
|
||||
(long)(SSL_OP_TLS_ROLLBACK_BUG | SSL_OP_CIPHER_SERVER_PREFERENCE)),
|
||||
_ecdhCurve("prime256v1"),
|
||||
_sslContext(nullptr) {
|
||||
|
@ -75,16 +75,17 @@ void SslServerFeature::collectOptions(std::shared_ptr<ProgramOptions> options) {
|
|||
"ssl cipers to use, see OpenSSL documentation",
|
||||
new StringParameter(&_cipherList));
|
||||
|
||||
std::unordered_set<uint64_t> sslProtocols = {1, 2, 3, 4};
|
||||
std::unordered_set<uint64_t> sslProtocols = {1, 2, 3, 4, 5};
|
||||
|
||||
options->addOption("--ssl.protocol",
|
||||
"ssl protocol (1 = SSLv2, 2 = SSLv23, 3 = SSLv3, 4 = "
|
||||
"TLSv1, 5 = TLSV1.2 (recommended)",
|
||||
new UInt64Parameter(&_protocol));
|
||||
new DiscreteValuesParameter<UInt64Parameter>(
|
||||
&_sslProtocol, sslProtocols));
|
||||
|
||||
options->addHiddenOption(
|
||||
"--ssl.options", "ssl connection options, see OpenSSL documentation",
|
||||
new DiscreteValuesParameter<UInt64Parameter>(&_options, sslProtocols));
|
||||
new DiscreteValuesParameter<UInt64Parameter>(&_sslOptions, sslProtocols));
|
||||
|
||||
options->addOption(
|
||||
"--ssl.ecdh-curve",
|
||||
|
@ -95,7 +96,7 @@ void SslServerFeature::collectOptions(std::shared_ptr<ProgramOptions> options) {
|
|||
void SslServerFeature::prepare() {
|
||||
createSslContext();
|
||||
|
||||
LOG(INFO) << "using SSL options: " << stringifySslOptions(_options);
|
||||
LOG(INFO) << "using SSL options: " << stringifySslOptions(_sslOptions);
|
||||
|
||||
if (!_cipherList.empty()) {
|
||||
LOG(INFO) << "using SSL cipher-list '" << _cipherList << "'";
|
||||
|
@ -128,14 +129,14 @@ void SslServerFeature::createSslContext() {
|
|||
}
|
||||
|
||||
// validate protocol
|
||||
if (_protocol <= SSL_UNKNOWN || _protocol >= SSL_LAST) {
|
||||
if (_sslProtocol <= SSL_UNKNOWN || _sslProtocol >= SSL_LAST) {
|
||||
LOG(FATAL) << "invalid SSL protocol version specified. Please use a valid "
|
||||
"value for '--ssl.protocol.'";
|
||||
FATAL_ERROR_EXIT();
|
||||
}
|
||||
|
||||
LOG(DEBUG) << "using SSL protocol version '"
|
||||
<< protocolName((protocol_e)_protocol) << "'";
|
||||
<< protocolName((protocol_e)_sslProtocol) << "'";
|
||||
|
||||
if (!FileUtils::exists(_keyfile)) {
|
||||
LOG(FATAL) << "unable to find SSL keyfile '" << _keyfile << "'";
|
||||
|
@ -143,7 +144,7 @@ void SslServerFeature::createSslContext() {
|
|||
}
|
||||
|
||||
// create context
|
||||
_sslContext = ::sslContext(protocol_e(_protocol), _keyfile);
|
||||
_sslContext = ::sslContext(protocol_e(_sslProtocol), _keyfile);
|
||||
|
||||
if (_sslContext == nullptr) {
|
||||
LOG(FATAL) << "failed to create SSL context, cannot create HTTPS server";
|
||||
|
@ -159,7 +160,7 @@ void SslServerFeature::createSslContext() {
|
|||
}
|
||||
|
||||
// set options
|
||||
SSL_CTX_set_options(_sslContext, (long)_options);
|
||||
SSL_CTX_set_options(_sslContext, (long)_sslOptions);
|
||||
|
||||
if (!_cipherList.empty()) {
|
||||
if (SSL_CTX_set_cipher_list(_sslContext, _cipherList.c_str()) != 1) {
|
||||
|
|
|
@ -45,8 +45,8 @@ class SslServerFeature final : public application_features::ApplicationFeature {
|
|||
std::string _keyfile;
|
||||
bool _sessionCache;
|
||||
std::string _cipherList;
|
||||
uint64_t _protocol;
|
||||
uint64_t _options;
|
||||
uint64_t _sslProtocol;
|
||||
uint64_t _sslOptions;
|
||||
std::string _ecdhCurve;
|
||||
|
||||
private:
|
||||
|
|
|
@ -272,6 +272,15 @@ def LIT(txt, wordboundary = ['<b>','</b>']):
|
|||
|
||||
return r.sub(subpattern, txt)
|
||||
|
||||
################################################################################
|
||||
### @brief LIT
|
||||
###
|
||||
### \ -> needs to become \\ so \n's in the text can be differciated.
|
||||
################################################################################
|
||||
|
||||
def BACKSLASH(txt):
|
||||
return txt.replace('\\', '\\\\\\')
|
||||
|
||||
################################################################################
|
||||
### @brief Typegraphy
|
||||
################################################################################
|
||||
|
@ -292,7 +301,7 @@ def Typography(txt):
|
|||
r = rc(r"""@ref [a-zA-Z0-9]+""", MS)
|
||||
txt = r.sub("the manual", txt)
|
||||
txt = re.sub(r"@endDocuBlock", "", txt)
|
||||
|
||||
txt = BACKSLASH(txt);
|
||||
return txt
|
||||
|
||||
################################################################################
|
||||
|
|
Loading…
Reference in New Issue