mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of github.com:arangodb/arangodb into vpack
This commit is contained in:
commit
acadb089ee
|
@ -8,6 +8,7 @@ branches:
|
|||
- "2.5"
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
- "2.8"
|
||||
|
||||
language: cpp
|
||||
compiler: g++
|
||||
|
|
11
CHANGELOG
11
CHANGELOG
|
@ -1,8 +1,19 @@
|
|||
v2.8.0 (XXXX-XX-XX)
|
||||
-------------------
|
||||
|
||||
* better error reporting for arangodump and arangorestore
|
||||
|
||||
* arangodump will now fail by default when trying to dump edges that
|
||||
refer to already dropped collections. This can be circumvented by
|
||||
specifying the option `--force true` when invoking arangodump
|
||||
|
||||
* fixed cluster upgrade procedure
|
||||
|
||||
* the AQL functions `NEAR` and `WITHIN` now have stricter validations
|
||||
for their input parameters `limit`, `radius` and `distance`. They may now throw
|
||||
exceptions when invalid parameters are passed that may have not led
|
||||
to exceptions in previous versions.
|
||||
|
||||
|
||||
v2.8.0-beta1 (2015-12-06)
|
||||
-------------------------
|
||||
|
|
|
@ -137,7 +137,7 @@ build-book:
|
|||
echo cp $${WD}/$${pic} $${pic}; \
|
||||
cp $${WD}/$${pic} $${pic}; \
|
||||
done
|
||||
python generateMdFiles.py $(NAME) ppbooks/ ../../js/apps/system/_admin/aardvark/APP/api-docs.json $(FILTER)
|
||||
python ../Scripts/generateMdFiles.py $(NAME) ppbooks/ ../../js/apps/system/_admin/aardvark/APP/api-docs.json $(FILTER)
|
||||
|
||||
cd ppbooks/$(NAME) && sed -i -e 's/VERSION_NUMBER/v$(newVersionNumber)/g' styles/header.js
|
||||
cd ppbooks/$(NAME) && sed -i -e 's/VERSION_NUMBER/v$(newVersionNumber)/g' README.md
|
||||
|
@ -154,7 +154,7 @@ build-book:
|
|||
|
||||
cd ppbooks/$(NAME) && gitbook install
|
||||
cd ppbooks/$(NAME) && gitbook build ./ ./../../books/$(NAME)
|
||||
python deprecated.py
|
||||
python ../Scripts/deprecated.py
|
||||
|
||||
make book-check-markdown-leftovers
|
||||
|
||||
|
@ -183,8 +183,6 @@ check-docublocks:
|
|||
grep -v ppbook |\
|
||||
grep -v allComments.txt |\
|
||||
grep -v Makefile |\
|
||||
grep -v codeBlockReader.py |\
|
||||
grep -v generateMdFiles.py |\
|
||||
grep -v '.*~:.*' |\
|
||||
grep -v '.*#.*:.*' \
|
||||
> /tmp/rawindoc.txt
|
||||
|
@ -192,8 +190,6 @@ check-docublocks:
|
|||
grep -v ppbook |\
|
||||
grep -v allComments.txt |\
|
||||
grep -v Makefile |\
|
||||
grep -v codeBlockReader.py |\
|
||||
grep -v generateMdFiles.py |\
|
||||
grep -v '.*~:.*' |\
|
||||
grep -v '.*#.*:.*' \
|
||||
>> /tmp/rawindoc.txt
|
||||
|
@ -204,8 +200,6 @@ check-docublocks:
|
|||
grep -v ppbook |\
|
||||
grep -v allComments.txt |\
|
||||
grep -v Makefile |\
|
||||
grep -v codeBlockReader.py |\
|
||||
grep -v generateMdFiles.py |\
|
||||
grep -v '.*~:.*' |\
|
||||
grep -v '.*#.*:.*' \
|
||||
>> /tmp/rawinprog.txt
|
||||
|
@ -248,7 +242,7 @@ clean: clean-intermediate
|
|||
|
||||
build-books-keep-md:
|
||||
@test -d books || mkdir books
|
||||
python codeBlockReader.py
|
||||
python ../Scripts/codeBlockReader.py
|
||||
make build-book NAME=Users
|
||||
|
||||
build-books: clean-intermediate build-books-keep-md check-docublocks
|
||||
|
|
|
@ -273,3 +273,5 @@ And finally clean it up again:
|
|||
|
||||
|
||||
If this traversal is not powerful enough for your needs, so you cannot describe your conditions as AQL filter statements you might want to look at [manually crafted traverser](../Traversals/README.md).
|
||||
|
||||
[See here for more traversal examples](../AqlExamples/CombiningGraphTraversals.md).
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
!CHAPTER Combining Graph Traversals
|
||||
!SUBSECTION Finding the start vertex via a geo query
|
||||
Our first example will locate the start vertex for a graph traversal via [a geo index](../IndexHandling/Geo.md).
|
||||
We use [the city graph](../Graphs/README.md#the-city-graph) and its geo indices:
|
||||
|
||||

|
||||
|
||||
|
||||
@startDocuBlockInline COMBINING_GRAPH_01_create_graph
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_01_create_graph}
|
||||
~addIgnoreCollection("germanHighway");
|
||||
~addIgnoreCollection("germanCity");
|
||||
~addIgnoreCollection("frenchHighway");
|
||||
~addIgnoreCollection("frenchCity");
|
||||
~addIgnoreCollection("internationalHighway");
|
||||
var examples = require("org/arangodb/graph-examples/example-graph.js");
|
||||
var g = examples.loadGraph("routeplanner");
|
||||
var bonn=[50.7340, 7.0998];
|
||||
|db._query(`FOR startCity IN
|
||||
| WITHIN(germanCity, @lat, @long, @radius)
|
||||
| RETURN startCity`,
|
||||
| {lat: bonn[0], long: bonn[1], radius: 400000}
|
||||
).toArray()
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
@endDocuBlock COMBINING_GRAPH_01_create_graph
|
||||
|
||||
We search all german cities in a range of 400 km around the ex-capital **Bonn**: **Hambung** and **Cologne**.
|
||||
We won't find **Paris** since its in the `frenchCity` collection.
|
||||
|
||||
@startDocuBlockInline COMBINING_GRAPH_02_combine
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_02_combine}
|
||||
~var bonn=[50.7340, 7.0998]
|
||||
|db._query(`FOR startCity IN
|
||||
| WITHIN(germanCity, @lat, @long, @radius)
|
||||
| FOR v, e, p IN 1..1 OUTBOUND startCity
|
||||
| GRAPH 'routeplanner'
|
||||
| RETURN {startcity: startCity._key, traversedCity: v}`,
|
||||
|{
|
||||
| lat: bonn[0],
|
||||
| long: bonn[1],
|
||||
| radius: 400000
|
||||
} ).toArray()
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
@endDocuBlock COMBINING_GRAPH_02_combine
|
||||
|
||||
The geo index query returns us `startCity` (**Cologne** and **Hamburg**) which we then use as starting point for our graph traversal. For simplicity we only return their direct neighbours. We format the return result so we can see from which `startCity` the traversal came.
|
||||
|
||||
Alternatively we could use a `LET` statement with a subquery to group the traversals by their `startCity` efficiently:
|
||||
|
||||
@startDocuBlockInline COMBINING_GRAPH_03_combine_let
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_03_combine_let}
|
||||
~var bonn=[50.7340, 7.0998];
|
||||
|db._query(`FOR startCity IN
|
||||
| WITHIN(germanCity, @lat, @long, @radius)
|
||||
| LET oneCity = (FOR v, e, p IN 1..1 OUTBOUND startCity
|
||||
| GRAPH 'routeplanner' RETURN v)
|
||||
| return {startCity: startCity._key, connectedCities: oneCity}`,
|
||||
|{
|
||||
| lat: bonn[0],
|
||||
| long: bonn[1],
|
||||
| radius: 400000
|
||||
} ).toArray();
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
@endDocuBlock COMBINING_GRAPH_03_combine_let
|
||||
|
||||
Finaly we clean up again:
|
||||
|
||||
@startDocuBlockInline COMBINING_GRAPH_04_cleanup
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_04_cleanup}
|
||||
~var examples = require("org/arangodb/graph-examples/example-graph.js");
|
||||
examples.dropGraph("routeplanner");
|
||||
~removeIgnoreCollection("germanHighway");
|
||||
~removeIgnoreCollection("germanCity");
|
||||
~removeIgnoreCollection("frenchHighway");
|
||||
~removeIgnoreCollection("frenchCity");
|
||||
~removeIgnoreCollection("internationalHighway");
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
@endDocuBlock COMBINING_GRAPH_04_cleanup
|
|
@ -1,9 +1,13 @@
|
|||
!CHAPTER Foxx console
|
||||
|
||||
Foxx injects a **console** object into each Foxx app that allows writing log entries to the database and querying them from within the app itself.
|
||||
Foxx injects a **console** object into each Foxx app that allows writing log entries to the database in addition to the ArangoDB log file and querying them from within the app itself.
|
||||
|
||||
The **console** object supports the CommonJS Console API found in Node.js and modern browsers, while also providing some ArangoDB-specific additions.
|
||||
|
||||
ArangoDB also provides [the `console` module](../../ModuleConsole/README.md) which only supports the CommonJS Console API and only writes log entries to the ArangoDB log.
|
||||
|
||||
When working with transactions, keep in mind that the Foxx console will attempt to write to the `_foxxlog` system collection. This behaviour can be disabled using the `setDatabaseLogging` method if you don't want to explicitly allow writing to the log collection during transactions or for performance reasons.
|
||||
|
||||
!SECTION Logging
|
||||
|
||||
!SUBSECTION Logging console messages
|
||||
|
@ -19,8 +23,6 @@ If the first argument is not a formatting string or any of the additional argume
|
|||
**Examples**
|
||||
|
||||
```js
|
||||
var console = require("console");
|
||||
|
||||
console.log("%s, %s!", "Hello", "World"); // => "Hello, World!"
|
||||
console.log("%s, World!", "Hello", "extra"); // => "Hello, World! extra"
|
||||
console.log("Hello,", "beautiful", "world!"); // => "Hello, beautiful world!"
|
||||
|
@ -40,11 +42,11 @@ By default, `console.log` uses log level **INFO**, making it functionally equiva
|
|||
|
||||
The built-in log levels are:
|
||||
|
||||
* -2: **TRACE**
|
||||
* -1: **DEBUG**
|
||||
* -200: **TRACE**
|
||||
* -100: **DEBUG**
|
||||
* 0: **INFO**
|
||||
* 1: **WARN**
|
||||
* 2: **ERROR**
|
||||
* 100: **WARN**
|
||||
* 200: **ERROR**
|
||||
|
||||
!SUBSECTION Logging with timers
|
||||
|
||||
|
@ -160,7 +162,7 @@ This method returns a function that logs messages with the given log level (e.g.
|
|||
**Parameter**
|
||||
|
||||
* **name**: name of the log level as it appears in the database, usually all-uppercase
|
||||
* **value** (optional): value of the log level. Default: `999`
|
||||
* **value** (optional): value of the log level. Default: `50`
|
||||
|
||||
The **value** is used when determining whether a log entry meets the minimum log level that can be defined in various places. For a list of the built-in log levels and their values see the section on logging with different log levels above.
|
||||
|
||||
|
@ -188,6 +190,24 @@ If **trace** is set to `true`, all log entries will be logged with a parsed stac
|
|||
|
||||
Because this results in every logging call creating a stack trace (which may have a significant performance impact), this option is disabled by default.
|
||||
|
||||
!SUBSECTION Disabling logging to the ArangoDB console
|
||||
|
||||
You can toggle whether logs should be written to the ArangoDB console.
|
||||
|
||||
`console.setNativeLogging(nativeLogging)`
|
||||
|
||||
If **nativeLogging** is set to `false`, log entries will not be logged to the ArangoDB console (which usually writes to the file system).
|
||||
|
||||
!SUBSECTION Disabling logging to the database
|
||||
|
||||
You can toggle whether logs should be written to the database.
|
||||
|
||||
`console.setDatabaseLogging(databaseLogging)`
|
||||
|
||||
If **databaseLogging** is set to `false`, log entries will not be logged to the internal `_foxxlog` collection.
|
||||
|
||||
This is only useful if logging to the ArangoDB console is not also disabled.
|
||||
|
||||
!SUBSECTION Enabling assertion errors
|
||||
|
||||
You can toggle whether console assertions should throw if they fail.
|
||||
|
|
|
@ -102,6 +102,7 @@
|
|||
* [Projections and filters](AqlExamples/ProjectionsAndFilters.md)
|
||||
* [Joins](AqlExamples/Join.md)
|
||||
* [Grouping](AqlExamples/Grouping.md)
|
||||
* [Traversals](AqlExamples/CombiningGraphTraversals.md)
|
||||
* [Graphs](Graphs/README.md)
|
||||
* [General Graphs](GeneralGraphs/README.md)
|
||||
* [Graph Management](GeneralGraphs/Management.md)
|
||||
|
|
|
@ -26,6 +26,13 @@ FOR doc IN `OUTBOUND`
|
|||
RETURN doc.`any`
|
||||
```
|
||||
|
||||
!SUBSECTION Changed behavior
|
||||
|
||||
The AQL functions `NEAR` and `WITHIN` now have stricter validations
|
||||
for their input parameters `limit`, `radius` and `distance`. They may now throw
|
||||
exceptions when invalid parameters are passed that may have not led
|
||||
to exceptions in previous versions.
|
||||
|
||||
|
||||
!SUBSECTION Deadlock handling
|
||||
|
||||
|
@ -123,3 +130,10 @@ let response = request({
|
|||
|
||||
* the built-in support for CoffeeScript source files is deprecated, it will raise a warning
|
||||
if you use it. Please pre-compile CoffeeScript source files.
|
||||
|
||||
|
||||
!SECTION Client tools
|
||||
|
||||
arangodump will now fail by default when trying to dump edges that
|
||||
refer to already dropped collections. This can be circumvented by
|
||||
specifying the option `--force true` when invoking arangodump
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
arangosh> <span class="hljs-keyword">var</span> examples = <span class="hljs-built_in">require</span>(<span class="hljs-string">"org/arangodb/graph-examples/example-graph.js"</span>);
|
||||
arangosh> <span class="hljs-keyword">var</span> g = examples.loadGraph(<span class="hljs-string">"routeplanner"</span>);
|
||||
arangosh> <span class="hljs-keyword">var</span> bonn=[<span class="hljs-number">50.7340</span>, <span class="hljs-number">7.0998</span>];
|
||||
arangosh> db._query(<span class="hljs-string">`FOR startCity IN
|
||||
........> WITHIN(germanCity, @lat, @long, @radius)
|
||||
........> RETURN startCity`</span>,
|
||||
........> {lat: bonn[<span class="hljs-number">0</span>], long: bonn[<span class="hljs-number">1</span>], radius: <span class="hljs-number">400000</span>}
|
||||
........> ).toArray()
|
||||
[
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">1000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">50.9364</span>,
|
||||
<span class="hljs-number">6.9528</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"germanCity/Cologne"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"24341039"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Cologne"</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">1000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">53.5653</span>,
|
||||
<span class="hljs-number">10.0014</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"germanCity/Hamburg"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"24537647"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Hamburg"</span>
|
||||
}
|
||||
]
|
|
@ -0,0 +1,82 @@
|
|||
arangosh> db._query(<span class="hljs-string">`FOR startCity IN
|
||||
........> WITHIN(germanCity, @lat, @long, @radius)
|
||||
........> FOR v, e, p IN 1..1 OUTBOUND startCity
|
||||
........> GRAPH 'routeplanner'
|
||||
........> RETURN {startcity: startCity._key, traversedCity: v}`</span>,
|
||||
........> {
|
||||
........> lat: bonn[<span class="hljs-number">0</span>],
|
||||
........> long: bonn[<span class="hljs-number">1</span>],
|
||||
........> radius: <span class="hljs-number">400000</span>
|
||||
........> } ).toArray()
|
||||
[
|
||||
{
|
||||
<span class="hljs-string">"startcity"</span> : <span class="hljs-string">"Cologne"</span>,
|
||||
<span class="hljs-string">"traversedCity"</span> : {
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">80000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">45.76</span>,
|
||||
<span class="hljs-number">4.84</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Lyon"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25061935"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Lyon"</span>
|
||||
}
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"startcity"</span> : <span class="hljs-string">"Cologne"</span>,
|
||||
<span class="hljs-string">"traversedCity"</span> : {
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">4000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">48.8567</span>,
|
||||
<span class="hljs-number">2.3508</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Paris"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25258543"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Paris"</span>
|
||||
}
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"startcity"</span> : <span class="hljs-string">"Hamburg"</span>,
|
||||
<span class="hljs-string">"traversedCity"</span> : {
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">4000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">48.8567</span>,
|
||||
<span class="hljs-number">2.3508</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Paris"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25258543"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Paris"</span>
|
||||
}
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"startcity"</span> : <span class="hljs-string">"Hamburg"</span>,
|
||||
<span class="hljs-string">"traversedCity"</span> : {
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">80000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">45.76</span>,
|
||||
<span class="hljs-number">4.84</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Lyon"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25061935"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Lyon"</span>
|
||||
}
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"startcity"</span> : <span class="hljs-string">"Hamburg"</span>,
|
||||
<span class="hljs-string">"traversedCity"</span> : {
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">1000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">50.9364</span>,
|
||||
<span class="hljs-number">6.9528</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"germanCity/Cologne"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"24341039"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Cologne"</span>
|
||||
}
|
||||
}
|
||||
]
|
|
@ -0,0 +1,77 @@
|
|||
arangosh> db._query(<span class="hljs-string">`FOR startCity IN
|
||||
........> WITHIN(germanCity, @lat, @long, @radius)
|
||||
........> LET oneCity = (FOR v, e, p IN 1..1 OUTBOUND startCity
|
||||
........> GRAPH 'routeplanner' RETURN v)
|
||||
........> return {startCity: startCity._key, connectedCities: oneCity}`</span>,
|
||||
........> {
|
||||
........> lat: bonn[<span class="hljs-number">0</span>],
|
||||
........> long: bonn[<span class="hljs-number">1</span>],
|
||||
........> radius: <span class="hljs-number">400000</span>
|
||||
........> } ).toArray();
|
||||
[
|
||||
{
|
||||
<span class="hljs-string">"startCity"</span> : <span class="hljs-string">"Cologne"</span>,
|
||||
<span class="hljs-string">"connectedCities"</span> : [
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">80000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">45.76</span>,
|
||||
<span class="hljs-number">4.84</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Lyon"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25061935"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Lyon"</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">4000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">48.8567</span>,
|
||||
<span class="hljs-number">2.3508</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Paris"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25258543"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Paris"</span>
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"startCity"</span> : <span class="hljs-string">"Hamburg"</span>,
|
||||
<span class="hljs-string">"connectedCities"</span> : [
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">4000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">48.8567</span>,
|
||||
<span class="hljs-number">2.3508</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Paris"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25258543"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Paris"</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">80000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">45.76</span>,
|
||||
<span class="hljs-number">4.84</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"frenchCity/Lyon"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"25061935"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Lyon"</span>
|
||||
},
|
||||
{
|
||||
<span class="hljs-string">"isCapital"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"population"</span> : <span class="hljs-number">1000000</span>,
|
||||
<span class="hljs-string">"loc"</span> : [
|
||||
<span class="hljs-number">50.9364</span>,
|
||||
<span class="hljs-number">6.9528</span>
|
||||
],
|
||||
<span class="hljs-string">"_id"</span> : <span class="hljs-string">"germanCity/Cologne"</span>,
|
||||
<span class="hljs-string">"_rev"</span> : <span class="hljs-string">"24341039"</span>,
|
||||
<span class="hljs-string">"_key"</span> : <span class="hljs-string">"Cologne"</span>
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
|
@ -0,0 +1,2 @@
|
|||
arangosh> examples.dropGraph(<span class="hljs-string">"routeplanner"</span>);
|
||||
<span class="hljs-literal">true</span>
|
|
@ -43,6 +43,16 @@ def getReference(name, source, verb):
|
|||
raise Exception("invalid reference: " + ref + " in " + fn)
|
||||
return ref
|
||||
|
||||
removeDoubleLF = re.compile("\n\n")
|
||||
removeLF = re.compile("\n")
|
||||
|
||||
def TrimThisParam(text, indent):
|
||||
text = text.rstrip('\n').lstrip('\n')
|
||||
text = removeDoubleLF.sub("\n", text)
|
||||
if (indent > 0):
|
||||
indent = (indent + 2) # align the text right of the list...
|
||||
return removeLF.sub("\n" + ' ' * indent, text)
|
||||
|
||||
def unwrapPostJson(reference, layer):
|
||||
global swagger
|
||||
rc = ''
|
||||
|
@ -54,35 +64,29 @@ def unwrapPostJson(reference, layer):
|
|||
if '$ref' in thisParam:
|
||||
subStructRef = getReference(thisParam, reference, None)
|
||||
|
||||
rc += "<li><strong>" + param + "</strong>: "
|
||||
rc += swagger['definitions'][subStructRef]['description'] + "<ul class=\"swagger-list\">"
|
||||
rc += ' ' * layer + " - **" + param + "**:\n"
|
||||
rc += unwrapPostJson(subStructRef, layer + 1)
|
||||
rc += "</li></ul>"
|
||||
|
||||
|
||||
elif thisParam['type'] == 'object':
|
||||
rc += ' ' * layer + "<li><strong>" + param + "</strong>: " + brTrim(thisParam['description']) + "</li>"
|
||||
rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer) + "\n"
|
||||
elif swagger['definitions'][reference]['properties'][param]['type'] == 'array':
|
||||
rc += ' ' * layer + "<li><strong>" + param + "</strong>: " + brTrim(thisParam['description'])
|
||||
rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer)
|
||||
if 'type' in thisParam['items']:
|
||||
rc += " of type " + thisParam['items']['type']#
|
||||
rc += " of type " + thisParam['items']['type'] + "\n"
|
||||
else:
|
||||
if len(thisParam['items']) == 0:
|
||||
rc += "anonymous json object"
|
||||
rc += "anonymous json object\n"
|
||||
else:
|
||||
try:
|
||||
subStructRef = getReference(thisParam['items'], reference, None)
|
||||
except:
|
||||
print >>sys.stderr, "while analyzing: " + param
|
||||
print >>sys.stderr, thisParam
|
||||
rc += "\n<ul class=\"swagger-list\">"
|
||||
rc += unwrapPostJson(subStructRef, layer + 1)
|
||||
rc += "</ul>"
|
||||
rc += '</li>'
|
||||
rc += "\n" + unwrapPostJson(subStructRef, layer + 1)
|
||||
else:
|
||||
rc += ' ' * layer + "<li><strong>" + param + "</strong>: " + thisParam['description'] + '</li>'
|
||||
rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(thisParam['description'], layer) + '\n'
|
||||
return rc
|
||||
|
||||
|
||||
def getRestBodyParam():
|
||||
rc = "\n**Body Parameters**\n"
|
||||
addText = ''
|
||||
|
@ -93,13 +97,13 @@ def getRestBodyParam():
|
|||
if 'additionalProperties' in thisVerb['parameters'][nParam]['schema']:
|
||||
addText = "free style json body"
|
||||
else:
|
||||
addText = "<ul class=\"swagger-list\">" + unwrapPostJson(
|
||||
getReference(thisVerb['parameters'][nParam]['schema'], route, verb),0) + "</ul>"
|
||||
addText = unwrapPostJson(
|
||||
getReference(thisVerb['parameters'][nParam]['schema'], route, verb),0)
|
||||
rc += addText
|
||||
return rc
|
||||
|
||||
def getRestReplyBodyParam(param):
|
||||
rc = "\n**Reply Body**\n<ul>"
|
||||
rc = "\n**Reply Body**\n"
|
||||
|
||||
try:
|
||||
rc += unwrapPostJson(getReference(thisVerb['responses'][param]['schema'], route, verb), 0)
|
||||
|
@ -107,7 +111,7 @@ def getRestReplyBodyParam(param):
|
|||
print >>sys.stderr,"failed to search " + param + " in: "
|
||||
print >>sys.stderr,json.dumps(thisVerb, indent=4, separators=(', ',': '), sort_keys=True)
|
||||
raise
|
||||
return rc + "</ul>\n"
|
||||
return rc + "\n"
|
||||
|
||||
|
||||
SIMPL_REPL_DICT = {
|
|
@ -289,10 +289,10 @@ def Typography(txt):
|
|||
txt = txt[0:-1]
|
||||
|
||||
# txt = BackTicks(txt)
|
||||
txt = AsteriskBold(txt)
|
||||
txt = AsteriskItalic(txt)
|
||||
# txt = AsteriskBold(txt)
|
||||
# txt = AsteriskItalic(txt)
|
||||
# txt = FN(txt)
|
||||
txt = LIT(txt)
|
||||
# txt = LIT(txt)
|
||||
# txt = FA(txt)
|
||||
#
|
||||
# no way to find out the correct link for Swagger,
|
||||
|
@ -487,32 +487,8 @@ def generic_handler_desc(cargo, r, message, op, para, name):
|
|||
continue
|
||||
|
||||
line = Typography(line)
|
||||
para[name] += line + '\n'
|
||||
|
||||
if r.DESCRIPTION_LI.match(line):
|
||||
line = "<li>" + line[2:]
|
||||
inLI = True
|
||||
elif inLI and r.DESCRIPTION_SP.match(line):
|
||||
line = line[2:]
|
||||
elif inLI and r.DESCRIPTION_BL.match(line):
|
||||
line = ""
|
||||
else:
|
||||
inLI = False
|
||||
|
||||
if not inUL and inLI:
|
||||
line = " <ul class=\"swagger-list\">" + line
|
||||
inUL = True
|
||||
elif inUL and r.EMPTY_LINE.match(line):
|
||||
line = "</ul> " + line
|
||||
inUL = False
|
||||
|
||||
elif inLI and r.EMPTY_LINE.match(line):
|
||||
line = "</li> " + line
|
||||
inUL = False
|
||||
|
||||
if not inLI and r.EMPTY_LINE.match(line):
|
||||
line = "<br>"
|
||||
|
||||
para[name] += line + ' '
|
||||
para[name] = removeTrailingBR.sub("", para[name])
|
||||
|
||||
def start_docublock(cargo, r=Regexen()):
|
||||
|
@ -675,7 +651,7 @@ def restbodyparam(cargo, r=Regexen()):
|
|||
if restBodyParam == None:
|
||||
# https://github.com/swagger-api/swagger-ui/issues/1430
|
||||
# once this is solved we can skip this:
|
||||
operation['description'] += "**A json post document with these Properties is required:**"
|
||||
operation['description'] += "**A json post document with these Properties is required:**\n"
|
||||
restBodyParam = {
|
||||
'name': 'Json Post Body',
|
||||
'x-description-offset': len(swagger['paths'][httpPath][method]['description']),
|
||||
|
@ -913,7 +889,7 @@ def restreplybody(cargo, r=Regexen()):
|
|||
if restReplyBodyParam == None:
|
||||
# https://github.com/swagger-api/swagger-ui/issues/1430
|
||||
# once this is solved we can skip this:
|
||||
operation['description'] += "**A json document with these Properties is returned:**"
|
||||
operation['description'] += "**A json document with these Properties is returned:**\n"
|
||||
swagger['paths'][httpPath][method]['responses'][currentReturnCode][
|
||||
'x-description-offset'] = len(swagger['paths'][httpPath][method]['description'])
|
||||
swagger['paths'][httpPath][method]['responses'][currentReturnCode]['schema'] = {
|
||||
|
@ -1169,6 +1145,16 @@ def getReference(name, source, verb):
|
|||
raise Exception("invalid reference: " + ref + " in " + fn)
|
||||
return ref
|
||||
|
||||
removeDoubleLF = re.compile("\n\n")
|
||||
removeLF = re.compile("\n")
|
||||
|
||||
def TrimThisParam(text, indent):
|
||||
text = text.rstrip('\n').lstrip('\n')
|
||||
text = removeDoubleLF.sub("\n", text)
|
||||
if (indent > 0):
|
||||
indent = (indent + 2) # align the text right of the list...
|
||||
return removeLF.sub("\n" + ' ' * indent, text)
|
||||
|
||||
def unwrapPostJson(reference, layer):
|
||||
global swagger
|
||||
rc = ''
|
||||
|
@ -1180,32 +1166,27 @@ def unwrapPostJson(reference, layer):
|
|||
if '$ref' in thisParam:
|
||||
subStructRef = getReference(thisParam, reference, None)
|
||||
|
||||
rc += "<li><strong>" + param + "</strong>: "
|
||||
rc += swagger['definitions'][subStructRef]['description'] + "<ul class=\"swagger-list\">"
|
||||
rc += ' ' * layer + " - **" + param + "**:\n"
|
||||
rc += unwrapPostJson(subStructRef, layer + 1)
|
||||
rc += "</li></ul>"
|
||||
|
||||
|
||||
elif thisParam['type'] == 'object':
|
||||
rc += ' ' * layer + "<li><strong>" + param + "</strong>: " + brTrim(thisParam['description']) + "</li>"
|
||||
rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer) + "\n"
|
||||
elif swagger['definitions'][reference]['properties'][param]['type'] == 'array':
|
||||
rc += ' ' * layer + "<li><strong>" + param + "</strong>: " + brTrim(thisParam['description'])
|
||||
rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer)
|
||||
if 'type' in thisParam['items']:
|
||||
rc += " of type " + thisParam['items']['type']#
|
||||
rc += " of type " + thisParam['items']['type'] + "\n"
|
||||
else:
|
||||
if len(thisParam['items']) == 0:
|
||||
rc += "anonymous json object"
|
||||
rc += "anonymous json object\n"
|
||||
else:
|
||||
try:
|
||||
subStructRef = getReference(thisParam['items'], reference, None)
|
||||
except:
|
||||
print >>sys.stderr, "while analyzing: " + param
|
||||
print >>sys.stderr, thisParam
|
||||
rc += "\n<ul class=\"swagger-list\">"
|
||||
rc += unwrapPostJson(subStructRef, layer + 1)
|
||||
rc += "</ul>"
|
||||
rc += '</li>'
|
||||
rc += "\n" + unwrapPostJson(subStructRef, layer + 1)
|
||||
else:
|
||||
rc += ' ' * layer + "<li><strong>" + param + "</strong>: " + thisParam['description'] + '</li>'
|
||||
rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(thisParam['description'], layer) + '\n'
|
||||
return rc
|
||||
|
||||
|
||||
|
@ -1299,12 +1280,14 @@ for route in swagger['paths'].keys():
|
|||
if thisVerb['parameters'][nParam]['in'] == 'body':
|
||||
descOffset = thisVerb['parameters'][nParam]['x-description-offset']
|
||||
addText = ''
|
||||
postText = thisVerb['description'][:descOffset]
|
||||
postText = ''
|
||||
paramDesc = thisVerb['description'][:descOffset]
|
||||
if len(paramDesc) > 0:
|
||||
postText += paramDesc
|
||||
if 'additionalProperties' in thisVerb['parameters'][nParam]['schema']:
|
||||
addText = "free style json body"
|
||||
addText = "\nfree style json body\n\n"
|
||||
else:
|
||||
addText = "<ul class=\"swagger-list\">" + unwrapPostJson(
|
||||
getReference(thisVerb['parameters'][nParam]['schema'], route, verb),0) + "</ul>"
|
||||
addText = "\n" + unwrapPostJson(getReference(thisVerb['parameters'][nParam]['schema'], route, verb),1) + "\n\n"
|
||||
|
||||
postText += addText
|
||||
postText += thisVerb['description'][descOffset:]
|
||||
|
@ -1315,30 +1298,34 @@ for route in swagger['paths'].keys():
|
|||
if 'responses' in thisVerb:
|
||||
for nRC in thisVerb['responses']:
|
||||
if 'x-description-offset' in thisVerb['responses'][nRC]:
|
||||
|
||||
descOffset = thisVerb['responses'][nRC]['x-description-offset']
|
||||
#print descOffset
|
||||
#print offsetPlus
|
||||
descOffset += offsetPlus
|
||||
addText = ''
|
||||
addText = '\n##HTTP ' + nRC
|
||||
#print thisVerb['responses'][nRC]['description']
|
||||
postText = thisVerb['description'][:descOffset]
|
||||
#print postText
|
||||
replyDescription = TrimThisParam(thisVerb['responses'][nRC]['description'], 0)
|
||||
if (len(replyDescription) > 0):
|
||||
addText += '\n' + replyDescription + '\n'
|
||||
if 'additionalProperties' in thisVerb['responses'][nRC]['schema']:
|
||||
addText = "free style json body"
|
||||
addText += "\n free style json body\n"
|
||||
else:
|
||||
addText = "<ul class=\"swagger-list\">" + unwrapPostJson(
|
||||
getReference(thisVerb['responses'][nRC]['schema'], route, verb),0) + "</ul>"
|
||||
addText += "\n" + unwrapPostJson(
|
||||
getReference(thisVerb['responses'][nRC]['schema'], route, verb),0) + '\n'
|
||||
#print addText
|
||||
postText += addText
|
||||
postText += thisVerb['responses'][nRC]['description'][descOffset:]
|
||||
postText += thisVerb['description'][descOffset:]
|
||||
offsetPlus += len(addText)
|
||||
thisVerb['description'] = postText
|
||||
|
||||
#print '-'*80
|
||||
#print thisVerb['description']
|
||||
|
||||
# Append the examples to the description:
|
||||
if 'x-examples' in thisVerb and len(thisVerb['x-examples']) > 0:
|
||||
thisVerb['description'] += '<br>'
|
||||
thisVerb['description'] += '\n'
|
||||
for nExample in range(0, len(thisVerb['x-examples'])):
|
||||
thisVerb['description'] += thisVerb['x-examples'][nExample]
|
||||
thisVerb['x-examples'] = []# todo unset!
|
||||
|
|
|
@ -3032,6 +3032,7 @@ static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx,
|
|||
std::string const& collectionName,
|
||||
std::string const& identifier) {
|
||||
std::vector<std::string> parts = triagens::basics::StringUtils::split(identifier, "/");
|
||||
|
||||
TRI_doc_mptr_copy_t mptr;
|
||||
if (parts.size() == 1) {
|
||||
int res = trx->readSingle(collection, &mptr, parts[0]);
|
||||
|
@ -3062,24 +3063,37 @@ static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx,
|
|||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Helper function to get a document by it's _id
|
||||
/// @brief Helper function to get a document by its _id
|
||||
/// This function will lazy read-lock the collection.
|
||||
/// this function will not throw if the document or the collection cannot be
|
||||
/// found
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx,
|
||||
CollectionNameResolver const* resolver,
|
||||
std::string& identifier) {
|
||||
std::string const& identifier) {
|
||||
std::vector<std::string> parts = triagens::basics::StringUtils::split(identifier, "/");
|
||||
|
||||
if (parts.size() != 2) {
|
||||
return Json(Json::Null);
|
||||
}
|
||||
std::string collectionName = parts[0];
|
||||
TRI_transaction_collection_t* collection = nullptr;
|
||||
TRI_voc_cid_t cid = 0;
|
||||
RegisterCollectionInTransaction(trx, collectionName, cid, collection);
|
||||
try {
|
||||
RegisterCollectionInTransaction(trx, collectionName, cid, collection);
|
||||
}
|
||||
catch (triagens::basics::Exception const& ex) {
|
||||
// don't throw if collection is not found
|
||||
if (ex.code() == TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND) {
|
||||
return Json(Json::Null);
|
||||
}
|
||||
throw ex;
|
||||
}
|
||||
|
||||
TRI_doc_mptr_copy_t mptr;
|
||||
int res = trx->readSingle(collection, &mptr, parts[1]);
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
return Json(Json::Null);
|
||||
}
|
||||
|
@ -3090,8 +3104,7 @@ static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx,
|
|||
cid,
|
||||
&mptr
|
||||
);
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief function Document
|
||||
|
@ -3147,15 +3160,32 @@ AqlValue Functions::Document (triagens::aql::Query* query,
|
|||
|
||||
TRI_transaction_collection_t* collection = nullptr;
|
||||
TRI_voc_cid_t cid;
|
||||
RegisterCollectionInTransaction(trx, collectionName, cid, collection);
|
||||
bool notFound = false;
|
||||
|
||||
try {
|
||||
RegisterCollectionInTransaction(trx, collectionName, cid, collection);
|
||||
}
|
||||
catch (triagens::basics::Exception const& ex) {
|
||||
// don't throw if collection is not found
|
||||
if (ex.code() != TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND) {
|
||||
throw ex;
|
||||
}
|
||||
notFound = true;
|
||||
}
|
||||
|
||||
Json id = ExtractFunctionParameter(trx, parameters, 1, false);
|
||||
if (id.isString()) {
|
||||
if (notFound) {
|
||||
return AqlValue(new Json(Json::Null));
|
||||
}
|
||||
std::string identifier = triagens::basics::JsonHelper::getStringValue(id.json(), "");
|
||||
Json result = getDocumentByIdentifier(trx, resolver, collection, cid, collectionName, identifier);
|
||||
return AqlValue(new Json(TRI_UNKNOWN_MEM_ZONE, result.steal()));
|
||||
}
|
||||
else if (id.isArray()) {
|
||||
if (notFound) {
|
||||
return AqlValue(new Json(Json::Array));
|
||||
}
|
||||
size_t const n = id.size();
|
||||
Json result(Json::Array, n);
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
#include "Basics/WriteLocker.h"
|
||||
#include "Basics/json.h"
|
||||
#include "Basics/logging.h"
|
||||
#include "Basics/random.h"
|
||||
#include "Cluster/ServerState.h"
|
||||
#include "Rest/Endpoint.h"
|
||||
#include "Rest/HttpRequest.h"
|
||||
|
@ -992,6 +993,24 @@ bool AgencyComm::increaseVersion (std::string const& key) {
|
|||
return result.successful();
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief update a version number in the agency, retry until it works
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void AgencyComm::increaseVersionRepeated (std::string const& key) {
|
||||
bool ok = false;
|
||||
while (! ok) {
|
||||
ok = increaseVersion(key);
|
||||
if (ok) {
|
||||
return;
|
||||
}
|
||||
uint32_t val = 300 + TRI_UInt32Random() % 400;
|
||||
LOG_INFO("Could not increase %s in agency, retrying in %dms!",
|
||||
key.c_str(), val);
|
||||
usleep(val * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates a directory in the backend
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -451,6 +451,12 @@ namespace triagens {
|
|||
|
||||
bool increaseVersion (std::string const&);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief update a version number in the agency, retry until it works
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void increaseVersionRepeated (std::string const& key);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates a directory in the backend
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -333,7 +333,7 @@ void ClusterInfo::flush () {
|
|||
loadCurrentCoordinators();
|
||||
loadPlannedDatabases();
|
||||
loadCurrentDatabases();
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
loadCurrentCollections(true);
|
||||
}
|
||||
|
||||
|
@ -649,7 +649,7 @@ void ClusterInfo::loadCurrentDatabases () {
|
|||
|
||||
static const std::string prefixPlannedCollections = "Plan/Collections";
|
||||
|
||||
void ClusterInfo::loadPlannedCollections (bool acquireLock) {
|
||||
void ClusterInfo::loadPlannedCollections () {
|
||||
|
||||
uint64_t storedVersion = _plannedCollectionsProt.version;
|
||||
MUTEX_LOCKER(_plannedCollectionsProt.mutex);
|
||||
|
@ -661,19 +661,14 @@ void ClusterInfo::loadPlannedCollections (bool acquireLock) {
|
|||
// Now contact the agency:
|
||||
AgencyCommResult result;
|
||||
{
|
||||
if (acquireLock) {
|
||||
AgencyCommLocker locker("Plan", "READ");
|
||||
AgencyCommLocker locker("Plan", "READ");
|
||||
|
||||
if (locker.successful()) {
|
||||
result = _agency.getValues(prefixPlannedCollections, true);
|
||||
}
|
||||
else {
|
||||
LOG_ERROR("Error while locking %s", prefixPlannedCollections.c_str());
|
||||
return;
|
||||
}
|
||||
if (locker.successful()) {
|
||||
result = _agency.getValues(prefixPlannedCollections, true);
|
||||
}
|
||||
else {
|
||||
result = _agency.getValues(prefixPlannedCollections, true);
|
||||
LOG_ERROR("Error while locking %s", prefixPlannedCollections.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -769,7 +764,7 @@ shared_ptr<CollectionInfo> ClusterInfo::getCollection
|
|||
int tries = 0;
|
||||
|
||||
if (! _plannedCollectionsProt.isValid) {
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
++tries;
|
||||
}
|
||||
|
||||
|
@ -793,7 +788,7 @@ shared_ptr<CollectionInfo> ClusterInfo::getCollection
|
|||
}
|
||||
|
||||
// must load collections outside the lock
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
}
|
||||
|
||||
return shared_ptr<CollectionInfo>(new CollectionInfo());
|
||||
|
@ -849,7 +844,7 @@ const std::vector<shared_ptr<CollectionInfo> > ClusterInfo::getCollections
|
|||
std::vector<shared_ptr<CollectionInfo> > result;
|
||||
|
||||
// always reload
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
|
||||
READ_LOCKER(_plannedCollectionsProt.lock);
|
||||
// look up database by id
|
||||
|
@ -1188,7 +1183,7 @@ int ClusterInfo::dropDatabaseCoordinator (string const& name, string& errorMsg,
|
|||
|
||||
// Load our own caches:
|
||||
loadPlannedDatabases();
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
|
||||
// Now wait for it to appear and be complete:
|
||||
res.clear();
|
||||
|
@ -1261,50 +1256,43 @@ int ClusterInfo::createCollectionCoordinator (string const& databaseName,
|
|||
const double endTime = TRI_microtime() + realTimeout;
|
||||
const double interval = getPollInterval();
|
||||
{
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
// check if a collection with the same name is already planned
|
||||
loadPlannedCollections();
|
||||
|
||||
READ_LOCKER(_plannedCollectionsProt.lock);
|
||||
AllCollections::const_iterator it = _plannedCollections.find(databaseName);
|
||||
if (it != _plannedCollections.end()) {
|
||||
const std::string name = JsonHelper::getStringValue(json, "name", "");
|
||||
|
||||
if (! locker.successful()) {
|
||||
return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN, errorMsg);
|
||||
}
|
||||
DatabaseCollections::const_iterator it2 = (*it).second.find(name);
|
||||
|
||||
{
|
||||
// check if a collection with the same name is already planned
|
||||
loadPlannedCollections(false);
|
||||
|
||||
READ_LOCKER(_plannedCollectionsProt.lock);
|
||||
AllCollections::const_iterator it = _plannedCollections.find(databaseName);
|
||||
if (it != _plannedCollections.end()) {
|
||||
const std::string name = JsonHelper::getStringValue(json, "name", "");
|
||||
|
||||
DatabaseCollections::const_iterator it2 = (*it).second.find(name);
|
||||
|
||||
if (it2 != (*it).second.end()) {
|
||||
// collection already exists!
|
||||
return TRI_ERROR_ARANGO_DUPLICATE_NAME;
|
||||
}
|
||||
if (it2 != (*it).second.end()) {
|
||||
// collection already exists!
|
||||
return TRI_ERROR_ARANGO_DUPLICATE_NAME;
|
||||
}
|
||||
}
|
||||
|
||||
if (! ac.exists("Plan/Databases/" + databaseName)) {
|
||||
return setErrormsg(TRI_ERROR_ARANGO_DATABASE_NOT_FOUND, errorMsg);
|
||||
}
|
||||
|
||||
if (ac.exists("Plan/Collections/" + databaseName + "/" + collectionID)) {
|
||||
return setErrormsg(TRI_ERROR_CLUSTER_COLLECTION_ID_EXISTS, errorMsg);
|
||||
}
|
||||
|
||||
AgencyCommResult result
|
||||
= ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID,
|
||||
json, 0.0);
|
||||
if (!result.successful()) {
|
||||
return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN,
|
||||
errorMsg);
|
||||
}
|
||||
}
|
||||
|
||||
if (! ac.exists("Plan/Databases/" + databaseName)) {
|
||||
return setErrormsg(TRI_ERROR_ARANGO_DATABASE_NOT_FOUND, errorMsg);
|
||||
}
|
||||
|
||||
if (ac.exists("Plan/Collections/" + databaseName + "/" + collectionID)) {
|
||||
return setErrormsg(TRI_ERROR_CLUSTER_COLLECTION_ID_EXISTS, errorMsg);
|
||||
}
|
||||
|
||||
AgencyCommResult result
|
||||
= ac.casValue("Plan/Collections/" + databaseName + "/" + collectionID,
|
||||
json, false, 0.0, 0.0);
|
||||
if (!result.successful()) {
|
||||
return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN,
|
||||
errorMsg);
|
||||
}
|
||||
|
||||
ac.increaseVersionRepeated("Plan/Version");
|
||||
|
||||
// Update our cache:
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
|
||||
// Now wait for it to appear and be complete:
|
||||
AgencyCommResult res = ac.getValues("Current/Version", false);
|
||||
|
@ -1348,7 +1336,7 @@ int ClusterInfo::createCollectionCoordinator (string const& databaseName,
|
|||
errorMsg = "Error in creation of collection:" + tmpMsg;
|
||||
return TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION;
|
||||
}
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
return setErrormsg(TRI_ERROR_NO_ERROR, errorMsg);
|
||||
}
|
||||
}
|
||||
|
@ -1402,7 +1390,7 @@ int ClusterInfo::dropCollectionCoordinator (string const& databaseName,
|
|||
}
|
||||
|
||||
// Update our own cache:
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
|
||||
// Now wait for it to appear and be complete:
|
||||
res.clear();
|
||||
|
@ -1455,56 +1443,58 @@ int ClusterInfo::setCollectionPropertiesCoordinator (string const& databaseName,
|
|||
AgencyComm ac;
|
||||
AgencyCommResult res;
|
||||
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
{
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
|
||||
if (! locker.successful()) {
|
||||
return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN;
|
||||
if (! locker.successful()) {
|
||||
return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN;
|
||||
}
|
||||
|
||||
if (! ac.exists("Plan/Databases/" + databaseName)) {
|
||||
return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND;
|
||||
}
|
||||
|
||||
res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false);
|
||||
|
||||
if (! res.successful()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
res.parse("", false);
|
||||
std::map<std::string, AgencyCommResultEntry>::const_iterator it = res._values.begin();
|
||||
|
||||
if (it == res._values.end()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
TRI_json_t* json = (*it).second._json;
|
||||
if (json == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json);
|
||||
if (copy == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact");
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize");
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync");
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets");
|
||||
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_doCompact));
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_maximalSize));
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_waitForSync));
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_indexBuckets));
|
||||
|
||||
res.clear();
|
||||
res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0);
|
||||
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy);
|
||||
}
|
||||
|
||||
if (! ac.exists("Plan/Databases/" + databaseName)) {
|
||||
return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND;
|
||||
}
|
||||
|
||||
res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false);
|
||||
|
||||
if (! res.successful()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
res.parse("", false);
|
||||
std::map<std::string, AgencyCommResultEntry>::const_iterator it = res._values.begin();
|
||||
|
||||
if (it == res._values.end()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
TRI_json_t* json = (*it).second._json;
|
||||
if (json == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json);
|
||||
if (copy == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact");
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize");
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync");
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets");
|
||||
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_doCompact));
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_maximalSize));
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_waitForSync));
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_indexBuckets));
|
||||
|
||||
res.clear();
|
||||
res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0);
|
||||
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy);
|
||||
|
||||
if (res.successful()) {
|
||||
loadPlannedCollections(false);
|
||||
loadPlannedCollections();
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
|
@ -1521,56 +1511,58 @@ int ClusterInfo::setCollectionStatusCoordinator (string const& databaseName,
|
|||
AgencyComm ac;
|
||||
AgencyCommResult res;
|
||||
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
{
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
|
||||
if (! locker.successful()) {
|
||||
return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN;
|
||||
if (! locker.successful()) {
|
||||
return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN;
|
||||
}
|
||||
|
||||
if (! ac.exists("Plan/Databases/" + databaseName)) {
|
||||
return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND;
|
||||
}
|
||||
|
||||
res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false);
|
||||
|
||||
if (! res.successful()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
res.parse("", false);
|
||||
std::map<std::string, AgencyCommResultEntry>::const_iterator it = res._values.begin();
|
||||
|
||||
if (it == res._values.end()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
TRI_json_t* json = (*it).second._json;
|
||||
if (json == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_vocbase_col_status_e old = (TRI_vocbase_col_status_e) triagens::basics::JsonHelper::getNumericValue<int>(json, "status", (int) TRI_VOC_COL_STATUS_CORRUPTED);
|
||||
|
||||
if (old == status) {
|
||||
// no status change
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json);
|
||||
if (copy == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status");
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, status));
|
||||
|
||||
res.clear();
|
||||
res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0);
|
||||
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy);
|
||||
}
|
||||
|
||||
if (! ac.exists("Plan/Databases/" + databaseName)) {
|
||||
return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND;
|
||||
}
|
||||
|
||||
res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false);
|
||||
|
||||
if (! res.successful()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
res.parse("", false);
|
||||
std::map<std::string, AgencyCommResultEntry>::const_iterator it = res._values.begin();
|
||||
|
||||
if (it == res._values.end()) {
|
||||
return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND;
|
||||
}
|
||||
|
||||
TRI_json_t* json = (*it).second._json;
|
||||
if (json == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_vocbase_col_status_e old = (TRI_vocbase_col_status_e) triagens::basics::JsonHelper::getNumericValue<int>(json, "status", (int) TRI_VOC_COL_STATUS_CORRUPTED);
|
||||
|
||||
if (old == status) {
|
||||
// no status change
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json);
|
||||
if (copy == nullptr) {
|
||||
return TRI_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status");
|
||||
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, status));
|
||||
|
||||
res.clear();
|
||||
res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0);
|
||||
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy);
|
||||
|
||||
if (res.successful()) {
|
||||
loadPlannedCollections(false);
|
||||
loadPlannedCollections();
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
|
@ -1633,6 +1625,23 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName,
|
|||
|
||||
string const idString = triagens::basics::StringUtils::itoa(iid);
|
||||
|
||||
string const key = "Plan/Collections/" + databaseName + "/" + collectionID;
|
||||
AgencyCommResult previous = ac.getValues(key, false);
|
||||
previous.parse("", false);
|
||||
auto it = previous._values.begin();
|
||||
TRI_json_t const* previousVal;
|
||||
if (it == previous._values.end()) {
|
||||
LOG_INFO("Entry for collection in Plan does not exist!");
|
||||
previousVal = nullptr;
|
||||
}
|
||||
else {
|
||||
previousVal = it->second._json;
|
||||
}
|
||||
|
||||
loadPlannedCollections();
|
||||
// It is possible that between the fetching of the planned collections
|
||||
// and the write lock we acquire below something has changed. Therefore
|
||||
// we first get the previous value and then do a compare and swap operation.
|
||||
{
|
||||
TRI_json_t* collectionJson = nullptr;
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
|
@ -1642,7 +1651,6 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName,
|
|||
}
|
||||
|
||||
{
|
||||
loadPlannedCollections(false);
|
||||
|
||||
shared_ptr<CollectionInfo> c = getCollection(databaseName, collectionID);
|
||||
|
||||
|
@ -1741,9 +1749,13 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName,
|
|||
|
||||
TRI_PushBack3ArrayJson(TRI_UNKNOWN_MEM_ZONE, idx, TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, newIndex));
|
||||
|
||||
AgencyCommResult result = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID,
|
||||
collectionJson,
|
||||
0.0);
|
||||
AgencyCommResult result;
|
||||
if (previousVal != nullptr) {
|
||||
result = ac.casValue(key, previousVal, collectionJson, 0.0, 0.0);
|
||||
}
|
||||
else { // only when there is no previous value
|
||||
result = ac.setValue(key, collectionJson, 0.0);
|
||||
}
|
||||
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, collectionJson);
|
||||
|
||||
|
@ -1755,7 +1767,7 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName,
|
|||
}
|
||||
|
||||
// reload our own cache:
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
|
||||
TRI_ASSERT(numberOfShards > 0);
|
||||
|
||||
|
@ -1856,6 +1868,17 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName,
|
|||
int numberOfShards = 0;
|
||||
string const idString = triagens::basics::StringUtils::itoa(iid);
|
||||
|
||||
string const key = "Plan/Collections/" + databaseName + "/" + collectionID;
|
||||
AgencyCommResult previous = ac.getValues(key, false);
|
||||
previous.parse("", false);
|
||||
auto it = previous._values.begin();
|
||||
TRI_ASSERT(it != previous._values.end());
|
||||
TRI_json_t const* previousVal = it->second._json;
|
||||
|
||||
loadPlannedCollections();
|
||||
// It is possible that between the fetching of the planned collections
|
||||
// and the write lock we acquire below something has changed. Therefore
|
||||
// we first get the previous value and then do a compare and swap operation.
|
||||
{
|
||||
AgencyCommLocker locker("Plan", "WRITE");
|
||||
|
||||
|
@ -1867,8 +1890,6 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName,
|
|||
TRI_json_t const* indexes = nullptr;
|
||||
|
||||
{
|
||||
loadPlannedCollections(false);
|
||||
|
||||
shared_ptr<CollectionInfo> c = getCollection(databaseName, collectionID);
|
||||
|
||||
READ_LOCKER(_plannedCollectionsProt.lock);
|
||||
|
@ -1944,9 +1965,8 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName,
|
|||
return setErrormsg(TRI_ERROR_ARANGO_INDEX_NOT_FOUND, errorMsg);
|
||||
}
|
||||
|
||||
AgencyCommResult result = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID,
|
||||
collectionJson,
|
||||
0.0);
|
||||
AgencyCommResult result = ac.casValue(key, previousVal, collectionJson,
|
||||
0.0, 0.0);
|
||||
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, collectionJson);
|
||||
|
||||
|
@ -1957,7 +1977,7 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName,
|
|||
}
|
||||
|
||||
// load our own cache:
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
|
||||
TRI_ASSERT(numberOfShards > 0);
|
||||
|
||||
|
@ -2395,7 +2415,7 @@ int ClusterInfo::getResponsibleShard (CollectionID const& collectionID,
|
|||
// from Plan, since they are immutable. Later we will have to switch
|
||||
// this to Current, when we allow to add and remove shards.
|
||||
if (! _plannedCollectionsProt.isValid) {
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
}
|
||||
|
||||
int tries = 0;
|
||||
|
@ -2434,7 +2454,7 @@ int ClusterInfo::getResponsibleShard (CollectionID const& collectionID,
|
|||
if (++tries >= 2) {
|
||||
break;
|
||||
}
|
||||
loadPlannedCollections(true);
|
||||
loadPlannedCollections();
|
||||
}
|
||||
|
||||
if (! found) {
|
||||
|
|
|
@ -808,7 +808,7 @@ namespace triagens {
|
|||
/// Usually one does not have to call this directly.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void loadPlannedCollections (bool);
|
||||
void loadPlannedCollections ();
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief (re-)load the information about planned databases
|
||||
|
|
|
@ -1256,13 +1256,13 @@ int getAllDocumentsOnCoordinator (
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int getAllEdgesOnCoordinator (
|
||||
string const& dbname,
|
||||
string const& collname,
|
||||
string const& vertex,
|
||||
std::string const& dbname,
|
||||
std::string const& collname,
|
||||
std::string const& vertex,
|
||||
TRI_edge_direction_e const& direction,
|
||||
triagens::rest::HttpResponse::HttpResponseCode& responseCode,
|
||||
string& contentType,
|
||||
string& resultBody ) {
|
||||
std::string& contentType,
|
||||
std::string& resultBody) {
|
||||
triagens::basics::Json result(triagens::basics::Json::Object);
|
||||
std::vector<traverser::TraverserExpression*> expTmp;
|
||||
int res = getFilteredEdgesOnCoordinator(dbname, collname, vertex, direction, expTmp, responseCode, contentType, result);
|
||||
|
@ -1271,14 +1271,14 @@ int getAllEdgesOnCoordinator (
|
|||
}
|
||||
|
||||
int getFilteredEdgesOnCoordinator (
|
||||
string const& dbname,
|
||||
string const& collname,
|
||||
string const& vertex,
|
||||
std::string const& dbname,
|
||||
std::string const& collname,
|
||||
std::string const& vertex,
|
||||
TRI_edge_direction_e const& direction,
|
||||
std::vector<traverser::TraverserExpression*> const& expressions,
|
||||
triagens::rest::HttpResponse::HttpResponseCode& responseCode,
|
||||
string& contentType,
|
||||
triagens::basics::Json& result ) {
|
||||
std::string& contentType,
|
||||
triagens::basics::Json& result) {
|
||||
TRI_ASSERT(result.isObject());
|
||||
TRI_ASSERT(result.members() == 0);
|
||||
|
||||
|
@ -1294,8 +1294,8 @@ int getFilteredEdgesOnCoordinator (
|
|||
|
||||
ClusterCommResult* res;
|
||||
|
||||
map<ShardID, ServerID> shards = collinfo->shardIds();
|
||||
map<ShardID, ServerID>::iterator it;
|
||||
std::map<ShardID, ServerID> shards = collinfo->shardIds();
|
||||
std::map<ShardID, ServerID>::iterator it;
|
||||
CoordTransactionID coordTransactionID = TRI_NewTickServer();
|
||||
std::string queryParameters = "?vertex=" + StringUtils::urlEncode(vertex);
|
||||
if (direction == TRI_EDGE_IN) {
|
||||
|
@ -1315,7 +1315,7 @@ int getFilteredEdgesOnCoordinator (
|
|||
reqBodyString->append(body.toString());
|
||||
}
|
||||
for (it = shards.begin(); it != shards.end(); ++it) {
|
||||
map<string, string>* headers = new map<string, string>;
|
||||
std::map<std::string, std::string>* headers = new std::map<std::string, std::string>;
|
||||
res = cc->asyncRequest("", coordTransactionID, "shard:" + it->first,
|
||||
triagens::rest::HttpRequest::HTTP_REQUEST_PUT,
|
||||
"/_db/" + StringUtils::urlEncode(dbname) + "/_api/edges/" + it->first + queryParameters,
|
||||
|
@ -1338,19 +1338,27 @@ int getFilteredEdgesOnCoordinator (
|
|||
cc->drop( "", coordTransactionID, 0, "");
|
||||
return TRI_ERROR_CLUSTER_TIMEOUT;
|
||||
}
|
||||
if (res->status == CL_COMM_ERROR || res->status == CL_COMM_DROPPED ||
|
||||
res->answer_code == triagens::rest::HttpResponse::NOT_FOUND) {
|
||||
if (res->status == CL_COMM_ERROR || res->status == CL_COMM_DROPPED) {
|
||||
delete res;
|
||||
cc->drop( "", coordTransactionID, 0, "");
|
||||
return TRI_ERROR_INTERNAL;
|
||||
}
|
||||
|
||||
if (res->status == CL_COMM_RECEIVED) {
|
||||
}
|
||||
|
||||
std::unique_ptr<TRI_json_t> shardResult(TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, res->answer->body()));
|
||||
|
||||
if (shardResult == nullptr || ! TRI_IsObjectJson(shardResult.get())) {
|
||||
delete res;
|
||||
return TRI_ERROR_INTERNAL;
|
||||
}
|
||||
|
||||
bool const isError = triagens::basics::JsonHelper::checkAndGetBooleanValue(shardResult.get(), "error");
|
||||
if (isError) {
|
||||
// shared returned an error
|
||||
delete res;
|
||||
return triagens::basics::JsonHelper::getNumericValue<int>(shardResult.get(), "errorNum", TRI_ERROR_INTERNAL);
|
||||
}
|
||||
|
||||
auto docs = TRI_LookupObjectJson(shardResult.get(), "edges");
|
||||
|
||||
|
@ -1415,8 +1423,6 @@ int modifyDocumentOnCoordinator (
|
|||
json.get(), headers, responseCode, resultHeaders, resultBody);
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief modify a document in a coordinator
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -66,6 +66,9 @@ namespace triagens {
|
|||
for (auto& it : _vertices) {
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, it.second);
|
||||
}
|
||||
for (auto& it : _edges) {
|
||||
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, it.second);
|
||||
}
|
||||
}
|
||||
|
||||
void setStartVertex (VertexId const& v) override;
|
||||
|
|
|
@ -508,7 +508,7 @@ triagens::basics::Json RestCursorHandler::buildExtra (triagens::aql::QueryResult
|
|||
/// error occurs during query processing, the server will respond with *HTTP 400*.
|
||||
/// Again, the body of the response will contain details about the error.
|
||||
///
|
||||
/// A list of query errors can be found (../ArangoErrors/README.md) here.
|
||||
/// A [list of query errors can be found here](../ErrorCodes/README.md).
|
||||
///
|
||||
///
|
||||
/// @RESTRETURNCODE{404}
|
||||
|
|
|
@ -50,7 +50,6 @@ RestEdgesHandler::RestEdgesHandler (HttpRequest* request)
|
|||
: RestVocbaseBaseHandler(request) {
|
||||
}
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- Handler methods
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -277,8 +276,10 @@ bool RestEdgesHandler::readEdges (std::vector<traverser::TraverserExpression*> c
|
|||
generateError(responseCode, res);
|
||||
return false;
|
||||
}
|
||||
|
||||
resultDocument.set("error", triagens::basics::Json(false));
|
||||
resultDocument.set("code", triagens::basics::Json(200));
|
||||
generateResult(resultDocument.json());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -374,7 +375,6 @@ bool RestEdgesHandler::readEdges (std::vector<traverser::TraverserExpression*> c
|
|||
return true;
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// Internal function for optimized edge retrieval.
|
||||
/// Allows to send an TraverserExpression for filtering in the body
|
||||
|
@ -405,7 +405,7 @@ bool RestEdgesHandler::readFilteredEdges () {
|
|||
if (! body.isArray()) {
|
||||
generateError(HttpResponse::BAD,
|
||||
TRI_ERROR_HTTP_BAD_PARAMETER,
|
||||
"Expected a list of traverser expressions as body parameter");
|
||||
"Expected an array of traverser expressions as body parameter");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -420,3 +420,4 @@ bool RestEdgesHandler::readFilteredEdges () {
|
|||
}
|
||||
return readEdges(expressions);
|
||||
}
|
||||
|
||||
|
|
|
@ -1130,7 +1130,7 @@ static void CreateCollectionCoordinator (const v8::FunctionCallbackInfo<v8::Valu
|
|||
if (myerrno != TRI_ERROR_NO_ERROR) {
|
||||
TRI_V8_THROW_EXCEPTION_MESSAGE(myerrno, errorMsg);
|
||||
}
|
||||
ci->loadPlannedCollections(true);
|
||||
ci->loadPlannedCollections();
|
||||
|
||||
shared_ptr<CollectionInfo> const& c = ci->getCollection(databaseName, cid);
|
||||
TRI_vocbase_col_t* newcoll = CoordinatorCollection(vocbase, *c);
|
||||
|
|
|
@ -337,6 +337,7 @@ exports.infoLines = function () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
exports.log = exports.info;
|
||||
exports._log = log;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief logLines
|
||||
|
|
|
@ -337,6 +337,7 @@ exports.infoLines = function () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
exports.log = exports.info;
|
||||
exports._log = log;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief logLines
|
||||
|
|
|
@ -350,11 +350,9 @@ else {
|
|||
|
||||
exports.safeJoin = function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var path = safeJoin(args.shift(), args.shift());
|
||||
while (args.length) {
|
||||
path = safeJoin(path, args.shift());
|
||||
}
|
||||
return path;
|
||||
return args.reduce(function (base, relative) {
|
||||
return safeJoin(base, relative);
|
||||
}, args.shift());
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -81,11 +81,13 @@ var createRoutePlannerGraph = function() {
|
|||
);
|
||||
|
||||
var g = Graph._create("routeplanner", edgeDefinition);
|
||||
var berlin = g.germanCity.save({_key: "Berlin", population : 3000000, isCapital : true});
|
||||
var cologne = g.germanCity.save({_key: "Cologne", population : 1000000, isCapital : false});
|
||||
var hamburg = g.germanCity.save({_key: "Hamburg", population : 1000000, isCapital : false});
|
||||
var lyon = g.frenchCity.save({_key: "Lyon", population : 80000, isCapital : false});
|
||||
var paris = g.frenchCity.save({_key: "Paris", population : 4000000, isCapital : true});
|
||||
var berlin = g.germanCity.save({_key: "Berlin", population : 3000000, isCapital : true, loc: [52.5167, 13.3833]});
|
||||
var cologne = g.germanCity.save({_key: "Cologne", population : 1000000, isCapital : false, loc: [50.9364, 6.9528]});
|
||||
var hamburg = g.germanCity.save({_key: "Hamburg", population : 1000000, isCapital : false, loc: [53.5653, 10.0014]});
|
||||
var lyon = g.frenchCity.save({_key: "Lyon", population : 80000, isCapital : false, loc: [45.7600, 4.8400]});
|
||||
var paris = g.frenchCity.save({_key: "Paris", population : 4000000, isCapital : true, loc: [48.8567, 2.3508]});
|
||||
g.germanCity.ensureGeoIndex("loc");
|
||||
g.frenchCity.ensureGeoIndex("loc");
|
||||
g.germanHighway.save(berlin._id, cologne._id, {distance: 850});
|
||||
g.germanHighway.save(berlin._id, hamburg._id, {distance: 400});
|
||||
g.germanHighway.save(hamburg._id, cologne._id, {distance: 500});
|
||||
|
|
|
@ -693,20 +693,27 @@ function INDEX (collection, indexTypes) {
|
|||
/// @brief get access to a collection
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function COLLECTION (name) {
|
||||
function COLLECTION (name, func) {
|
||||
'use strict';
|
||||
|
||||
if (typeof name !== 'string') {
|
||||
THROW(null, INTERNAL.errors.ERROR_INTERNAL);
|
||||
THROW(func, INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, func);
|
||||
}
|
||||
|
||||
var c;
|
||||
if (name.substring(0, 1) === '_') {
|
||||
// system collections need to be accessed slightly differently as they
|
||||
// are not returned by the propertyGetter of db
|
||||
return INTERNAL.db._collection(name);
|
||||
c = INTERNAL.db._collection(name);
|
||||
}
|
||||
else {
|
||||
c = INTERNAL.db[name];
|
||||
}
|
||||
|
||||
return INTERNAL.db[name];
|
||||
if (c === null || c === undefined) {
|
||||
THROW(func, INTERNAL.errors.ERROR_ARANGO_COLLECTION_NOT_FOUND, String(name));
|
||||
}
|
||||
return c;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -1311,7 +1318,7 @@ function AQL_DOCUMENT (collection, id) {
|
|||
}
|
||||
|
||||
if (TYPEWEIGHT(id) === TYPEWEIGHT_ARRAY) {
|
||||
var c = COLLECTION(collection);
|
||||
var c = COLLECTION(collection, "DOCUMENT");
|
||||
|
||||
var result = [ ], i;
|
||||
for (i = 0; i < id.length; ++i) {
|
||||
|
@ -1325,7 +1332,7 @@ function AQL_DOCUMENT (collection, id) {
|
|||
}
|
||||
|
||||
try {
|
||||
return COLLECTION(collection).document(id);
|
||||
return COLLECTION(collection, "DOCUMENT").document(id);
|
||||
}
|
||||
catch (e2) {
|
||||
return null;
|
||||
|
@ -1336,16 +1343,16 @@ function AQL_DOCUMENT (collection, id) {
|
|||
/// @brief get all documents from the specified collection
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function GET_DOCUMENTS (collection) {
|
||||
function GET_DOCUMENTS (collection, func) {
|
||||
'use strict';
|
||||
|
||||
WARN(null, INTERNAL.errors.ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION, AQL_TO_STRING(collection));
|
||||
|
||||
if (isCoordinator) {
|
||||
return COLLECTION(collection).all().toArray();
|
||||
return COLLECTION(collection, func).all().toArray();
|
||||
}
|
||||
|
||||
return COLLECTION(collection).ALL(0, null).documents;
|
||||
return COLLECTION(collection, func).ALL(0, null).documents;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -3928,27 +3935,30 @@ function AQL_NEAR (collection, latitude, longitude, limit, distanceAttribute) {
|
|||
limit = 100;
|
||||
}
|
||||
else {
|
||||
if (TYPEWEIGHT(limit) !== TYPEWEIGHT_NUMBER) {
|
||||
THROW("NEAR", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH);
|
||||
}
|
||||
limit = AQL_TO_NUMBER(limit);
|
||||
}
|
||||
|
||||
var weight = TYPEWEIGHT(distanceAttribute);
|
||||
if (weight !== TYPEWEIGHT_NULL && weight !== TYPEWEIGHT_STRING) {
|
||||
WARN("NEAR", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH);
|
||||
THROW("NEAR", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH);
|
||||
}
|
||||
|
||||
if (isCoordinator) {
|
||||
var query = COLLECTION(collection).near(latitude, longitude);
|
||||
var query = COLLECTION(collection, "NEAR").near(latitude, longitude);
|
||||
query._distance = distanceAttribute;
|
||||
return query.limit(limit).toArray();
|
||||
}
|
||||
|
||||
var idx = INDEX(COLLECTION(collection), [ "geo1", "geo2" ]);
|
||||
var idx = INDEX(COLLECTION(collection, "NEAR"), [ "geo1", "geo2" ]);
|
||||
|
||||
if (idx === null) {
|
||||
THROW("NEAR", INTERNAL.errors.ERROR_QUERY_GEO_INDEX_MISSING, collection);
|
||||
}
|
||||
|
||||
var result = COLLECTION(collection).NEAR(idx.id, latitude, longitude, limit);
|
||||
var result = COLLECTION(collection, "NEAR").NEAR(idx.id, latitude, longitude, limit);
|
||||
|
||||
if (distanceAttribute === null || distanceAttribute === undefined) {
|
||||
return result.documents;
|
||||
|
@ -3976,22 +3986,28 @@ function AQL_WITHIN (collection, latitude, longitude, radius, distanceAttribute)
|
|||
|
||||
var weight = TYPEWEIGHT(distanceAttribute);
|
||||
if (weight !== TYPEWEIGHT_NULL && weight !== TYPEWEIGHT_STRING) {
|
||||
WARN("WITHIN", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH);
|
||||
THROW("WITHIN", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH);
|
||||
}
|
||||
|
||||
weight = TYPEWEIGHT(radius);
|
||||
if (weight !== TYPEWEIGHT_NULL && weight !== TYPEWEIGHT_NUMBER) {
|
||||
THROW("WITHIN", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH);
|
||||
}
|
||||
radius = AQL_TO_NUMBER(radius);
|
||||
|
||||
if (isCoordinator) {
|
||||
var query = COLLECTION(collection).within(latitude, longitude, radius);
|
||||
var query = COLLECTION(collection, "WITHIN").within(latitude, longitude, radius);
|
||||
query._distance = distanceAttribute;
|
||||
return query.toArray();
|
||||
}
|
||||
|
||||
var idx = INDEX(COLLECTION(collection), [ "geo1", "geo2" ]);
|
||||
var idx = INDEX(COLLECTION(collection, "WITHIN"), [ "geo1", "geo2" ]);
|
||||
|
||||
if (idx === null) {
|
||||
THROW("WITHIN", INTERNAL.errors.ERROR_QUERY_GEO_INDEX_MISSING, collection);
|
||||
}
|
||||
|
||||
var result = COLLECTION(collection).WITHIN(idx.id, latitude, longitude, radius);
|
||||
var result = COLLECTION(collection, "WITHIN").WITHIN(idx.id, latitude, longitude, radius);
|
||||
|
||||
if (distanceAttribute === null || distanceAttribute === undefined) {
|
||||
return result.documents;
|
||||
|
@ -4023,7 +4039,12 @@ function AQL_WITHIN_RECTANGLE (collection, latitude1, longitude1, latitude2, lon
|
|||
return null;
|
||||
}
|
||||
|
||||
return COLLECTION(collection).withinRectangle(latitude1, longitude1, latitude2, longitude2).toArray();
|
||||
return COLLECTION(collection, "WITHIN_RECTANGLE").withinRectangle(
|
||||
latitude1,
|
||||
longitude1,
|
||||
latitude2,
|
||||
longitude2
|
||||
).toArray();
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -4105,7 +4126,7 @@ function AQL_IS_IN_POLYGON (points, latitude, longitude) {
|
|||
function AQL_FULLTEXT (collection, attribute, query, limit) {
|
||||
'use strict';
|
||||
|
||||
var idx = INDEX_FULLTEXT(COLLECTION(collection), attribute);
|
||||
var idx = INDEX_FULLTEXT(COLLECTION(collection, "FULLTEXT"), attribute);
|
||||
|
||||
if (idx === null) {
|
||||
THROW("FULLTEXT", INTERNAL.errors.ERROR_QUERY_FULLTEXT_INDEX_MISSING, collection);
|
||||
|
@ -4113,12 +4134,12 @@ function AQL_FULLTEXT (collection, attribute, query, limit) {
|
|||
|
||||
if (isCoordinator) {
|
||||
if (limit !== undefined && limit !== null && limit > 0) {
|
||||
return COLLECTION(collection).fulltext(attribute, query, idx).limit(limit).toArray();
|
||||
return COLLECTION(collection, "FULLTEXT").fulltext(attribute, query, idx).limit(limit).toArray();
|
||||
}
|
||||
return COLLECTION(collection).fulltext(attribute, query, idx).toArray();
|
||||
return COLLECTION(collection, "FULLTEXT").fulltext(attribute, query, idx).toArray();
|
||||
}
|
||||
|
||||
return COLLECTION(collection).FULLTEXT(idx, query, limit).documents;
|
||||
return COLLECTION(collection, "FULLTEXT").FULLTEXT(idx, query, limit).documents;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -5486,7 +5507,7 @@ function AQL_PATHS (vertices, edgeCollection, direction, options) {
|
|||
}
|
||||
|
||||
var searchAttributes = {
|
||||
edgeCollection : COLLECTION(edgeCollection),
|
||||
edgeCollection : COLLECTION(edgeCollection, "PATHS"),
|
||||
minLength : minLength,
|
||||
maxLength : maxLength,
|
||||
direction : searchDirection,
|
||||
|
@ -5612,7 +5633,7 @@ function AQL_GRAPH_PATHS (graphName, options) {
|
|||
return null;
|
||||
}
|
||||
if (edgeCollections.indexOf(def.collection) === -1) {
|
||||
edgeCollections.push(COLLECTION(def.collection));
|
||||
edgeCollections.push(COLLECTION(def.collection, "GRAPH_PATHS"));
|
||||
}
|
||||
|
||||
});
|
||||
|
@ -5633,7 +5654,7 @@ function AQL_GRAPH_PATHS (graphName, options) {
|
|||
followCycles : followCycles
|
||||
};
|
||||
|
||||
var vertices = GET_DOCUMENTS(startCollection);
|
||||
var vertices = GET_DOCUMENTS(startCollection, "GRAPH_PATHS");
|
||||
var n = vertices.length, i, j;
|
||||
for (i = 0; i < n; ++i) {
|
||||
var vertex = vertices[i];
|
||||
|
@ -6018,11 +6039,11 @@ function FILTER_RESTRICTION (list, restrictionList) {
|
|||
/// @brief get all document _ids matching the given examples
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function DOCUMENT_IDS_BY_EXAMPLE (collectionList, example) {
|
||||
function DOCUMENT_IDS_BY_EXAMPLE (func, collectionList, example) {
|
||||
var res = [ ];
|
||||
if (example === "null" || example === null || ! example) {
|
||||
collectionList.forEach(function (c) {
|
||||
res = res.concat(COLLECTION(c).toArray().map(function(t) { return t._id; }));
|
||||
res = res.concat(COLLECTION(c, func).toArray().map(function(t) { return t._id; }));
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
@ -6045,7 +6066,7 @@ function DOCUMENT_IDS_BY_EXAMPLE (collectionList, example) {
|
|||
});
|
||||
collectionList.forEach(function (c) {
|
||||
tmp.forEach(function (e) {
|
||||
res = res.concat(COLLECTION(c).byExample(e).toArray().map(function(t) {
|
||||
res = res.concat(COLLECTION(c, func).byExample(e).toArray().map(function(t) {
|
||||
return t._id;
|
||||
}));
|
||||
});
|
||||
|
@ -6057,11 +6078,11 @@ function DOCUMENT_IDS_BY_EXAMPLE (collectionList, example) {
|
|||
/// @brief getAllDocsByExample
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function DOCUMENTS_BY_EXAMPLE (collectionList, example) {
|
||||
function DOCUMENTS_BY_EXAMPLE (func, collectionList, example) {
|
||||
var res = [ ];
|
||||
if (example === "null" || example === null || ! example) {
|
||||
collectionList.forEach(function (c) {
|
||||
res = res.concat(COLLECTION(c).toArray());
|
||||
res = res.concat(COLLECTION(c, func).toArray());
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
@ -6082,7 +6103,7 @@ function DOCUMENTS_BY_EXAMPLE (collectionList, example) {
|
|||
});
|
||||
collectionList.forEach(function (c) {
|
||||
tmp.forEach(function (e) {
|
||||
res = res.concat(COLLECTION(c).byExample(e).toArray());
|
||||
res = res.concat(COLLECTION(c, func).byExample(e).toArray());
|
||||
});
|
||||
});
|
||||
return res;
|
||||
|
@ -6152,7 +6173,7 @@ function RESOLVE_GRAPH_TO_FROM_VERTICES (graphname, options, funcname) {
|
|||
if (options.includeOrphans) {
|
||||
collections.fromCollections = collections.fromCollections.concat(collections.orphanCollections);
|
||||
}
|
||||
return DOCUMENTS_BY_EXAMPLE(
|
||||
return DOCUMENTS_BY_EXAMPLE(funcname,
|
||||
collections.fromCollections.filter(removeDuplicates), options.fromVertexExample
|
||||
);
|
||||
}
|
||||
|
@ -6168,7 +6189,7 @@ function RESOLVE_GRAPH_TO_TO_VERTICES (graphname, options, funcname) {
|
|||
return self.indexOf(elem) === pos;
|
||||
};
|
||||
|
||||
return DOCUMENTS_BY_EXAMPLE(
|
||||
return DOCUMENTS_BY_EXAMPLE(funcname,
|
||||
collections.toCollection.filter(removeDuplicates), options.toVertexExample
|
||||
);
|
||||
}
|
||||
|
@ -6188,7 +6209,7 @@ function RESOLVE_GRAPH_START_VERTICES (graphName, options, funcname) {
|
|||
var removeDuplicates = function(elem, pos, self) {
|
||||
return self.indexOf(elem) === pos;
|
||||
};
|
||||
return DOCUMENTS_BY_EXAMPLE(
|
||||
return DOCUMENTS_BY_EXAMPLE(funcname,
|
||||
collections.fromCollections.filter(removeDuplicates), options.fromVertexExample
|
||||
);
|
||||
}
|
||||
|
@ -6211,13 +6232,13 @@ function RESOLVE_GRAPH_TO_DOCUMENTS (graphname, options, funcname) {
|
|||
};
|
||||
|
||||
var result = {
|
||||
fromVertices : DOCUMENTS_BY_EXAMPLE(
|
||||
fromVertices : DOCUMENTS_BY_EXAMPLE(funcname,
|
||||
collections.fromCollections.filter(removeDuplicates), options.fromVertexExample
|
||||
),
|
||||
toVertices : DOCUMENTS_BY_EXAMPLE(
|
||||
toVertices : DOCUMENTS_BY_EXAMPLE(funcname,
|
||||
collections.toCollection.filter(removeDuplicates), options.toVertexExample
|
||||
),
|
||||
edges : DOCUMENTS_BY_EXAMPLE(
|
||||
edges : DOCUMENTS_BY_EXAMPLE(funcname,
|
||||
collections.edgeCollections.filter(removeDuplicates), options.edgeExamples
|
||||
),
|
||||
edgeCollections : collections.edgeCollections,
|
||||
|
@ -6367,7 +6388,7 @@ function AQL_SHORTEST_PATH (vertexCollection,
|
|||
) {
|
||||
params = SHORTEST_PATH_PARAMS(params);
|
||||
var a = TRAVERSAL_FUNC("SHORTEST_PATH",
|
||||
TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection)),
|
||||
TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection, "SHORTEST_PATH")),
|
||||
TO_ID(startVertex, vertexCollection),
|
||||
TO_ID(endVertex, vertexCollection),
|
||||
direction,
|
||||
|
@ -6900,14 +6921,17 @@ function AQL_GRAPH_SHORTEST_PATH (graphName,
|
|||
let startVertices;
|
||||
if (options.hasOwnProperty("startVertexCollectionRestriction")
|
||||
&& Array.isArray(options.startVertexCollectionRestriction)) {
|
||||
startVertices = DOCUMENT_IDS_BY_EXAMPLE(options.startVertexCollectionRestriction, startVertexExample);
|
||||
startVertices = DOCUMENT_IDS_BY_EXAMPLE(
|
||||
"GRAPH_SHORTEST_PATH", options.startVertexCollectionRestriction, startVertexExample);
|
||||
}
|
||||
else if (options.hasOwnProperty("startVertexCollectionRestriction")
|
||||
&& typeof options.startVertexCollectionRestriction === 'string') {
|
||||
startVertices = DOCUMENT_IDS_BY_EXAMPLE([ options.startVertexCollectionRestriction ], startVertexExample);
|
||||
startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH",
|
||||
[ options.startVertexCollectionRestriction ], startVertexExample);
|
||||
}
|
||||
else {
|
||||
startVertices = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, startVertexExample);
|
||||
startVertices = DOCUMENT_IDS_BY_EXAMPLE(
|
||||
"GRAPH_SHORTEST_PATH", vertexCollections, startVertexExample);
|
||||
}
|
||||
if (startVertices.length === 0) {
|
||||
return [];
|
||||
|
@ -6916,14 +6940,17 @@ function AQL_GRAPH_SHORTEST_PATH (graphName,
|
|||
let endVertices;
|
||||
if (options.hasOwnProperty("endVertexCollectionRestriction")
|
||||
&& Array.isArray(options.endVertexCollectionRestriction)) {
|
||||
endVertices = DOCUMENT_IDS_BY_EXAMPLE(options.endVertexCollectionRestriction, endVertexExample);
|
||||
endVertices = DOCUMENT_IDS_BY_EXAMPLE(
|
||||
"GRAPH_SHORTEST_PATH", options.endVertexCollectionRestriction, endVertexExample);
|
||||
}
|
||||
else if (options.hasOwnProperty("endVertexCollectionRestriction")
|
||||
&& typeof options.endVertexCollectionRestriction === 'string') {
|
||||
endVertices = DOCUMENT_IDS_BY_EXAMPLE([ options.endVertexCollectionRestriction ], endVertexExample);
|
||||
endVertices = DOCUMENT_IDS_BY_EXAMPLE(
|
||||
"GRAPH_SHORTEST_PATH", [ options.endVertexCollectionRestriction ], endVertexExample);
|
||||
}
|
||||
else {
|
||||
endVertices = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, endVertexExample);
|
||||
endVertices = DOCUMENT_IDS_BY_EXAMPLE(
|
||||
"GRAPH_SHORTEST_PATH", vertexCollections, endVertexExample);
|
||||
}
|
||||
if (endVertices.length === 0) {
|
||||
return [];
|
||||
|
@ -6971,7 +6998,7 @@ function AQL_TRAVERSAL (vertexCollection,
|
|||
params = TRAVERSAL_PARAMS(params);
|
||||
|
||||
return TRAVERSAL_FUNC("TRAVERSAL",
|
||||
TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection)),
|
||||
TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection, "TRAVERSAL")),
|
||||
TO_ID(startVertex, vertexCollection),
|
||||
undefined,
|
||||
direction,
|
||||
|
@ -7148,7 +7175,7 @@ function AQL_TRAVERSAL_TREE (vertexCollection,
|
|||
}
|
||||
|
||||
var result = TRAVERSAL_FUNC("TRAVERSAL_TREE",
|
||||
TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection)),
|
||||
TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection, "TRAVERSAL_TREE")),
|
||||
TO_ID(startVertex, vertexCollection),
|
||||
undefined,
|
||||
direction,
|
||||
|
@ -7327,7 +7354,7 @@ function AQL_EDGES (edgeCollection,
|
|||
options) {
|
||||
'use strict';
|
||||
|
||||
var c = COLLECTION(edgeCollection), result;
|
||||
var c = COLLECTION(edgeCollection, "EDGES"), result;
|
||||
|
||||
// validate arguments
|
||||
if (direction === "outbound") {
|
||||
|
@ -7704,7 +7731,7 @@ function AQL_GRAPH_NEIGHBORS (graphName,
|
|||
}
|
||||
}
|
||||
let vertexCollections = graph._vertexCollections().map(function (c) { return c.name();});
|
||||
let startVertices = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, vertexExample);
|
||||
let startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_NEIGHBORS", vertexCollections, vertexExample);
|
||||
if (startVertices.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
@ -8044,13 +8071,13 @@ function AQL_GRAPH_COMMON_NEIGHBORS (graphName,
|
|||
|
||||
let graph = graphModule._graph(graphName);
|
||||
let vertexCollections = graph._vertexCollections().map(function (c) { return c.name();});
|
||||
let vertices1 = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, vertex1Examples);
|
||||
let vertices1 = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_COMMON_NEIGHBORS", vertexCollections, vertex1Examples);
|
||||
let vertices2;
|
||||
if (vertex1Examples === vertex2Examples) {
|
||||
vertices2 = vertices1;
|
||||
}
|
||||
else {
|
||||
vertices2 = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, vertex2Examples);
|
||||
vertices2 = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_COMMON_NEIGHBORS", vertexCollections, vertex2Examples);
|
||||
}
|
||||
// Use ES6 Map. Higher performance then Object.
|
||||
let tmpNeighborsLeft = new Map();
|
||||
|
@ -8846,7 +8873,7 @@ function AQL_GRAPH_ABSOLUTE_BETWEENNESS (graphName, options) {
|
|||
options.includeData = false;
|
||||
let graph = graphModule._graph(graphName);
|
||||
let vertexCollections = graph._vertexCollections().map(function (c) { return c.name();});
|
||||
let vertexIds = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, {});
|
||||
let vertexIds = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_ABSOLUTE_BETWEENNESS", vertexCollections, {});
|
||||
let result = {};
|
||||
let distanceMap = AQL_GRAPH_SHORTEST_PATH(graphName, vertexIds , vertexIds, options);
|
||||
for (let k = 0; k < vertexIds.length; k++) {
|
||||
|
|
|
@ -31,11 +31,38 @@
|
|||
var qb = require('aqb');
|
||||
var util = require('util');
|
||||
var extend = require('underscore').extend;
|
||||
var arangoConsole = require('console');
|
||||
var ErrorStackParser = require('error-stack-parser');
|
||||
var AssertionError = require('assert').AssertionError;
|
||||
var exists = require('org/arangodb/is').existy;
|
||||
var db = require('org/arangodb').db;
|
||||
|
||||
const NATIVE_LOG_LEVELS = ['debug', 'info', 'warn', 'error'];
|
||||
|
||||
function nativeLogger(level, levelNum, mount) {
|
||||
let logLevel = String(level).toLowerCase();
|
||||
if (logLevel === 'trace' && levelNum === -200) {
|
||||
logLevel = 'info'; // require('console').trace also uses INFO level
|
||||
}
|
||||
if (NATIVE_LOG_LEVELS.indexOf(logLevel) !== -1) {
|
||||
return function (message) {
|
||||
arangoConsole._log(logLevel, `${mount} ${message}`);
|
||||
};
|
||||
}
|
||||
if (levelNum >= 200) {
|
||||
logLevel = 'error';
|
||||
} else if (levelNum >= 100) {
|
||||
logLevel = 'warn';
|
||||
} else if (levelNum <= -100) {
|
||||
logLevel = 'debug';
|
||||
} else {
|
||||
logLevel = 'info';
|
||||
}
|
||||
return function (message) {
|
||||
arangoConsole._log(logLevel, `(${level}) ${mount} ${message}`);
|
||||
};
|
||||
}
|
||||
|
||||
function ConsoleLogs(console) {
|
||||
this._console = console;
|
||||
this.defaultMaxAge = 2 * 60 * 60 * 1000;
|
||||
|
@ -131,8 +158,10 @@ function Console(mount, tracing) {
|
|||
this._mount = mount;
|
||||
this._timers = Object.create(null);
|
||||
this._tracing = Boolean(tracing);
|
||||
this._nativeLogging = true;
|
||||
this._databaseLogging = true;
|
||||
this._logLevel = -999;
|
||||
this._logLevels = {TRACE: -2};
|
||||
this._logLevels = {TRACE: -200};
|
||||
this._assertThrows = false;
|
||||
this.logs = new ConsoleLogs(this);
|
||||
|
||||
|
@ -142,10 +171,10 @@ function Console(mount, tracing) {
|
|||
}
|
||||
}.bind(this));
|
||||
|
||||
this.debug = this.custom('DEBUG', -1);
|
||||
this.debug = this.custom('DEBUG', -100);
|
||||
this.info = this.custom('INFO', 0);
|
||||
this.warn = this.custom('WARN', 1);
|
||||
this.error = this.custom('ERROR', 2);
|
||||
this.warn = this.custom('WARN', 100);
|
||||
this.error = this.custom('ERROR', 200);
|
||||
|
||||
this.assert.level = 'ERROR';
|
||||
this.dir.level = 'INFO';
|
||||
|
@ -170,14 +199,28 @@ extend(Console.prototype, {
|
|||
level: level,
|
||||
levelNum: this._logLevels[level],
|
||||
time: Date.now(),
|
||||
message: message
|
||||
message: String(message)
|
||||
};
|
||||
|
||||
let logLine;
|
||||
|
||||
if (this._nativeLogging) {
|
||||
logLine = nativeLogger(level, doc.levelNum, doc.mount);
|
||||
doc.message.split('\n').forEach(logLine);
|
||||
}
|
||||
|
||||
if (this._tracing) {
|
||||
var e = new Error();
|
||||
let e = new Error();
|
||||
Error.captureStackTrace(e, callee || this._log);
|
||||
e.stack = e.stack.replace(/\n+$/, '');
|
||||
doc.stack = ErrorStackParser.parse(e).slice(1);
|
||||
if (this._nativeLogging) {
|
||||
e.stack.split('\n').slice(2).forEach(logLine);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this._databaseLogging) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!db._foxxlog) {
|
||||
|
@ -240,7 +283,7 @@ extend(Console.prototype, {
|
|||
custom: function (level, weight) {
|
||||
level = String(level);
|
||||
weight = Number(weight);
|
||||
weight = weight === weight ? weight : 999;
|
||||
weight = weight === weight ? weight : 50;
|
||||
this._logLevels[level] = weight;
|
||||
var logWithLevel = function() {
|
||||
this._log(level, util.format.apply(null, arguments), logWithLevel);
|
||||
|
@ -264,6 +307,16 @@ extend(Console.prototype, {
|
|||
return this._tracing;
|
||||
},
|
||||
|
||||
setNativeLogging: function (nativeLogging) {
|
||||
this._nativeLogging = Boolean(nativeLogging);
|
||||
return this._nativeLogging;
|
||||
},
|
||||
|
||||
setDatabaseLogging: function (databaseLogging) {
|
||||
this._databaseLogging = Boolean(databaseLogging);
|
||||
return this._databaseLogging;
|
||||
},
|
||||
|
||||
setAssertThrows: function (assertThrows) {
|
||||
this._assertThrows = Boolean(assertThrows);
|
||||
return this._assertThrows;
|
||||
|
|
|
@ -346,6 +346,7 @@ class FoxxService {
|
|||
filename = path.resolve(this.main.context.__dirname, filename);
|
||||
|
||||
var module = new Module(filename, this.main);
|
||||
module.context.console = this.main.context.console;
|
||||
module.context.applicationContext = _.extend(
|
||||
new AppContext(this.main.context.applicationContext._service),
|
||||
this.main.context.applicationContext,
|
||||
|
|
|
@ -229,24 +229,28 @@ function ahuacatlGeoTestSuite () {
|
|||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN NEAR(\"" + locationsNon.name() + "\", 0, 0, 10, true)");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test invalid WITHIN arguments count
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInvalidWithinArgument : function () {
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, \"foo\")");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, true)");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, 0, true)");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, 0, [ ])");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test invalid collection parameter
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInvalidCollectionArgument : function () {
|
||||
var cluster = require("org/arangodb/cluster");
|
||||
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(1234, 0, 0, 10)");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(false, 0, 0, 10)");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(true, 0, 0, 10)");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN([ ], 0, 0, 10)");
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN({ }, 0, 0, 10)");
|
||||
if (cluster.isCluster()) {
|
||||
assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(@name, 0, 0, 10)", { name: "foobarbazcoll" });
|
||||
}
|
||||
else {
|
||||
assertQueryError(errors.ERROR_ARANGO_COLLECTION_NOT_FOUND.code, "RETURN WITHIN(@name, 0, 0, 10)", { name: "foobarbazcoll" });
|
||||
}
|
||||
assertQueryError(errors.ERROR_ARANGO_COLLECTION_NOT_FOUND.code, "RETURN WITHIN(@name, 0, 0, 10)", { name: "foobarbazcoll" });
|
||||
assertQueryError(errors.ERROR_QUERY_BIND_PARAMETER_MISSING.code, "RETURN WITHIN(@name, 0, 0, 10)");
|
||||
}
|
||||
|
||||
|
|
|
@ -83,7 +83,8 @@ function AgencySuite () {
|
|||
|
||||
testVersion : function () {
|
||||
var agencyVersion = JSON.parse(agency.version());
|
||||
assertEqual(agencyVersion.internalVersion, "2");
|
||||
assertEqual(agencyVersion.etcdserver, "2.2.2");
|
||||
assertEqual(agencyVersion.etcdcluster, "2.2.0");
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -213,6 +213,7 @@ HttpResponse::HttpResponseCode HttpResponse::responseCode (int code) {
|
|||
case TRI_ERROR_ARANGO_DOCUMENT_KEY_BAD:
|
||||
case TRI_ERROR_ARANGO_DOCUMENT_KEY_UNEXPECTED:
|
||||
case TRI_ERROR_ARANGO_DOCUMENT_TYPE_INVALID:
|
||||
case TRI_ERROR_ARANGO_DOCUMENT_HANDLE_BAD:
|
||||
case TRI_ERROR_CLUSTER_MUST_NOT_CHANGE_SHARDING_ATTRIBUTES:
|
||||
case TRI_ERROR_CLUSTER_MUST_NOT_SPECIFY_KEY:
|
||||
case TRI_ERROR_TYPE_ERROR:
|
||||
|
|
Loading…
Reference in New Issue