1
0
Fork 0

Merge branch 'devel' of github.com:arangodb/arangodb into devel

This commit is contained in:
Michael Hackstein 2016-05-03 09:23:22 +02:00
commit adb7c2cee2
73 changed files with 1058 additions and 886 deletions

View File

@ -48,7 +48,7 @@ class ArrayIterator {
throw Exception(Exception::InvalidValueType, "Expecting Array slice");
}
if (slice.length() > 0) {
if (_size > 0) {
auto h = slice.head();
if (h == 0x13) {
_current = slice.at(0).start();
@ -138,6 +138,19 @@ class ArrayIterator {
inline bool isFirst() const throw() { return (_position == 0); }
inline bool isLast() const throw() { return (_position + 1 >= _size); }
inline void reset(bool allowRandomIteration) {
_position = 0;
_current = nullptr;
if (_size > 0) {
auto h = _slice.head();
if (h == 0x13) {
_current = _slice.at(0).start();
} else if (allowRandomIteration) {
_current = _slice.begin() + _slice.findDataOffset(h);
}
}
}
private:
Slice _slice;
@ -162,7 +175,7 @@ class ObjectIterator {
throw Exception(Exception::InvalidValueType, "Expecting Object slice");
}
if (slice.length() > 0) {
if (_size > 0) {
auto h = slice.head();
if (h == 0x14) {
_current = slice.keyAt(0, false).start();

View File

@ -42,12 +42,17 @@
#include "velocypack/Value.h"
#include "velocypack/ValueType.h"
#ifndef VELOCYPACK_HASH
// forward for XXH64 function declared elsewhere
extern "C" unsigned long long XXH64(void const*, size_t, unsigned long long);
#define VELOCYPACK_HASH(mem, size, seed) XXH64(mem, size, seed)
#endif
namespace arangodb {
namespace velocypack {
// forward for fasthash64 function declared elsewhere
uint64_t fasthash64(void const*, size_t, uint64_t);
class SliceScope;
class Slice {
@ -140,7 +145,7 @@ class Slice {
// hashes the binary representation of a value
inline uint64_t hash(uint64_t seed = 0xdeadbeef) const {
return fasthash64(start(), checkOverflow(byteSize()), seed);
return VELOCYPACK_HASH(start(), checkOverflow(byteSize()), seed);
}
// hashes the value, normalizing different representations of
@ -148,6 +153,12 @@ class Slice {
// hash values than the binary hash() function
uint64_t normalizedHash(uint64_t seed = 0xdeadbeef) const;
// hashes the binary representation of a String slice. No check
// is done if the Slice value is actually of type String
inline uint64_t hashString(uint64_t seed = 0xdeadbeef) const throw() {
return VELOCYPACK_HASH(start(), stringSliceLength(), seed);
}
// check if slice is of the specified type
inline bool isType(ValueType t) const throw() { return TypeMap[*_start] == t; }
@ -640,7 +651,11 @@ class Slice {
}
VELOCYPACK_ASSERT(h <= 0x12);
return readInteger<ValueLength>(_start + 1, WidthMap[h]);
if (h <= 0x14) {
return readInteger<ValueLength>(_start + 1, WidthMap[h]);
}
// fallthrough to exception
break;
}
case ValueType::String: {
@ -759,6 +774,19 @@ class Slice {
std::string hexType() const;
private:
// get the total byte size for a String slice, including the head byte
// not check is done if the type of the slice is actually String
ValueLength stringSliceLength() const throw() {
// check if the type has a fixed length first
auto const h = head();
if (h == 0xbf) {
// long UTF-8 String
return static_cast<ValueLength>(
1 + 8 + readInteger<ValueLength>(_start + 1, 8));
}
return static_cast<ValueLength>(1 + h - 0x40);
}
// return the value for a UInt object, without checks
// returns 0 for invalid values/types
uint64_t getUIntUnchecked() const;

View File

@ -3,10 +3,10 @@
#ifndef VELOCYPACK_VERSION_NUMBER_H
#define VELOCYPACK_VERSION_NUMBER_H 1
#define VELOCYPACK_VERSION "0.1.15"
#define VELOCYPACK_VERSION "0.1.30"
#define VELOCYPACK_VERSION_MAJOR 0
#define VELOCYPACK_VERSION_MINOR 1
#define VELOCYPACK_VERSION_PATCH 15
#define VELOCYPACK_VERSION_PATCH 30
#endif
#endif

View File

@ -417,12 +417,12 @@ uint64_t Slice::normalizedHash(uint64_t seed) const {
if (isNumber()) {
// upcast integer values to double
double v = getNumericValue<double>();
value = fasthash64(&v, sizeof(v), seed);
value = VELOCYPACK_HASH(&v, sizeof(v), seed);
} else if (isArray()) {
// normalize arrays by hashing array length and iterating
// over all array members
uint64_t const n = length() ^ 0xba5bedf00d;
value = fasthash64(&n, sizeof(n), seed);
value = VELOCYPACK_HASH(&n, sizeof(n), seed);
for (auto const& it : ArrayIterator(*this)) {
value ^= it.normalizedHash(value);
}
@ -430,7 +430,7 @@ uint64_t Slice::normalizedHash(uint64_t seed) const {
// normalize objects by hashing object length and iterating
// over all object members
uint64_t const n = length() ^ 0xf00ba44ba5;
uint64_t seed2 = fasthash64(&n, sizeof(n), seed);
uint64_t seed2 = VELOCYPACK_HASH(&n, sizeof(n), seed);
value = seed2;
for (auto const& it : ObjectIterator(*this)) {
value ^= it.key.normalizedHash(seed2);

View File

@ -63,7 +63,7 @@ Alternatively we could use a `LET` statement with a subquery to group the traver
@END_EXAMPLE_ARANGOSH_OUTPUT
@endDocuBlock COMBINING_GRAPH_03_combine_let
Finaly we clean up again:
Finally, we clean up again:
@startDocuBlockInline COMBINING_GRAPH_04_cleanup
@EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_04_cleanup}

View File

@ -94,7 +94,7 @@ to undefined behavior and should be avoided.
!SECTION Enforcing strict mode
By default, any user function code will be executed in *sloppy mode*, not
*strict* or *strong mode*. In order to make a user function being run in strict
*strict* or *strong mode*. In order to make a user function run in strict
mode, use `"use strict"` explicitly inside the user function, e.g.:
```js

View File

@ -14,7 +14,7 @@ AQL supports the following functions to operate on array values:
*Examples*
FLATTEN([ 1, 2, [ 3, 4 ], 5, [ 6, 7 ], [ 8, [ 9, 10 ] ])
FLATTEN([ 1, 2, [ 3, 4 ], 5, [ 6, 7 ], [ 8, [ 9, 10 ] ] ])
will produce:
@ -22,59 +22,12 @@ AQL supports the following functions to operate on array values:
To fully flatten the array, use a *depth* of 2:
FLATTEN([ 1, 2, [ 3, 4 ], 5, [ 6, 7 ], [ 8, [ 9, 10 ] ], 2)
FLATTEN([ 1, 2, [ 3, 4 ], 5, [ 6, 7 ], [ 8, [ 9, 10 ] ] ], 2)
This will produce:
[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]
- *MIN(array)*: Returns the smallest element of *array*. *null* values
are ignored. If the array is empty or only *null* values are contained in the array, the
function will return *null*.
- *MAX(array)*: Returns the greatest element of *array*. *null* values
are ignored. If the array is empty or only *null* values are contained in the array, the
function will return *null*.
- *AVERAGE(array)*: Returns the average (arithmetic mean) of the values in *array*.
This requires the elements in *array* to be numbers. *null* values are ignored.
If the array is empty or only *null* values are contained in the array, the function
will return *null*.
- *SUM(array)*: Returns the sum of the values in *array*. This
requires the elements in *array* to be numbers. *null* values are ignored.
- *MEDIAN(array)*: Returns the median value of the values in *array*. This
requires the elements in *array* to be numbers. *null* values are ignored. If the
array is empty or only *null* values are contained in the array, the function will return
*null*.
- *PERCENTILE(array, n, method)*: Returns the *n*th percentile of the values in *array*.
This requires the elements in *array* to be numbers. *null* values are ignored. *n* must
be between 0 (excluded) and 100 (included). *method* can be *rank* or *interpolation*.
The function will return null if the array is empty or only *null* values are contained
in it or the percentile cannot be calculated.
- *VARIANCE_POPULATION(array)*: Returns the population variance of the values in
*array*. This requires the elements in *array* to be numbers. *null* values
are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *VARIANCE_SAMPLE(array)*: Returns the sample variance of the values in
*array*. This requires the elements in *array* to be numbers. *null* values
are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *STDDEV_POPULATION(array)*: Returns the population standard deviation of the
values in *array*. This requires the elements in *array* to be numbers. *null*
values are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *STDDEV_SAMPLE(array)*: Returns the sample standard deviation of the values in
*array*. This requires the elements in *array* to be numbers. *null* values
are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *REVERSE(array)*: Returns the elements in *array* in reversed order.
- *FIRST(array)*: Returns the first element in *array* or *null* if the
@ -255,6 +208,13 @@ AQL supports the following functions to operate on array values:
REMOVE_NTH([ "a", "b", "c", "d", "e" ], -2)
Apart from these functions, AQL also offers several language constructs:
Also see the [numeric functions](NumericFunctions.md) for more functions that work on
number arrays. Apart from that, AQL also offers several language constructs:
- [array operators](ArrayOperators.md) for array expansion and contraction,
- operations for array manipulations like [FOR](../AqlOperations/For.md), [SORT](../AqlOperations/Sort.md), [LIMIT](../AqlOperations/Limit.md), [COLLECT](../AqlOperations/Collect.md).
- [array comparison operators](Operators.md#array-comparison-operators) to compare
each element in an array to a value or the elements of another array,
- operations for array manipulations like [FOR](../Operations/For.md),
[SORT](../Operations/Sort.md), [LIMIT](../Operations/Limit.md),
as well as grouping with [COLLECT](../Operations/Collect.md),
which also offers efficient aggregation.

View File

@ -364,7 +364,7 @@ FOR user IN users
```js
DATE_FORMAT(DATE_NOW(), "%q/%yyyy") // quarter and year (e.g. "3/2015")
DATE_FORMAT(DATE_NOW(), "%dd.%mm.%yyyy %hh:%ii:%ss.%fff") // e.g. "18.09.2015 15:30:49.374"
DATE_FORMAT(DATE_NOW(), "%dd.%mm.%yyyy %hh:%ii:%ss,%fff") // e.g. "18.09.2015 15:30:49,374"
DATE_FORMAT("1969", "Summer of '%yy") // "Summer of '69"
DATE_FORMAT("2016", "%%l = %l") // "%l = 1" (2016 is a leap year)
DATE_FORMAT("2016-03-01", "%xxx%") // "063", trailing % ignored

View File

@ -9,6 +9,14 @@ supported:
- *ROUND(value)*: Returns the integer closest to *value*
Rounding towards zero, also known as `trunc()`in C/C++, can be achieved with
a combination of the [ternary operator](../Operators.md#ternary-operator),
`CEIL()` and `FLOOR()`:
```
LET rounded = value >= 0 ? FLOOR(value) : CEIL(value)
```
- *ABS(value)*: Returns the absolute part of *value*
- *SQRT(value)*: Returns the square root of *value*
@ -16,3 +24,50 @@ supported:
- *POW(base, exp)*: Returns the *base* to the exponent *exp*
- *RAND()*: Returns a pseudo-random number between 0 and 1
- *MIN(array)*: Returns the smallest element of *array*. *null* values
are ignored. If the array is empty or only *null* values are contained in the array, the
function will return *null*.
- *MAX(array)*: Returns the greatest element of *array*. *null* values
are ignored. If the array is empty or only *null* values are contained in the array, the
function will return *null*.
- *AVERAGE(array)*: Returns the average (arithmetic mean) of the values in *array*.
This requires the elements in *array* to be numbers. *null* values are ignored.
If the array is empty or only *null* values are contained in the array, the function
will return *null*.
- *SUM(array)*: Returns the sum of the values in *array*. This
requires the elements in *array* to be numbers. *null* values are ignored.
- *MEDIAN(array)*: Returns the median value of the values in *array*. This
requires the elements in *array* to be numbers. *null* values are ignored. If the
array is empty or only *null* values are contained in the array, the function will return
*null*.
- *PERCENTILE(array, n, method)*: Returns the *n*th percentile of the values in *array*.
This requires the elements in *array* to be numbers. *null* values are ignored. *n* must
be between 0 (excluded) and 100 (included). *method* can be *rank* or *interpolation*.
The function will return null if the array is empty or only *null* values are contained
in it or the percentile cannot be calculated.
- *VARIANCE_POPULATION(array)*: Returns the population variance of the values in
*array*. This requires the elements in *array* to be numbers. *null* values
are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *VARIANCE_SAMPLE(array)*: Returns the sample variance of the values in
*array*. This requires the elements in *array* to be numbers. *null* values
are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *STDDEV_POPULATION(array)*: Returns the population standard deviation of the
values in *array*. This requires the elements in *array* to be numbers. *null*
values are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.
- *STDDEV_SAMPLE(array)*: Returns the sample standard deviation of the values in
*array*. This requires the elements in *array* to be numbers. *null* values
are ignored. If the array is empty or only *null* values are contained in the array,
the function will return *null*.

View File

@ -11,6 +11,9 @@ For string processing, AQL offers the following functions:
/* "foobarbaz" */
CONCAT([ 'foo', 'bar', 'baz' ])
/* "hello!" */
CONCAT([ 'he', ['ll'] ], 'o!')
- *CONCAT_SEPARATOR(separator, value1, value2, ... valueN)*:
Concatenate the strings passed as arguments *value1* to *valueN* using the
@ -23,19 +26,24 @@ For string processing, AQL offers the following functions:
/* "foo, bar, baz" */
CONCAT_SEPARATOR(', ', [ 'foo', 'bar', 'baz' ])
- *CHAR_LENGTH(value)*: Return the number of characters in *value*. This is
a synonym for *LENGTH(value)*
/* "1-2-3-4-5" */
CONCAT_SEPARATOR('-', [1, 2, 3, null], [4, null, 5])
- *LOWER(value)*: Lower-case *value*
- *CHAR_LENGTH(value)*: Return the number of characters in *value* (not byte length).
This is a synonym for *LENGTH(value)*.
- *UPPER(value)*: Upper-case *value*
- *LOWER(value)*: Convert upper-case letters in *value* to their lower-case
counterparts. All other characters are returned unchanged.
- *UPPER(value)*: Convert lower-case letters in *value* to their upper-case
counterparts. All other characters are returned unchanged.
- *SUBSTITUTE(value, search, replace, limit)*: Replaces search values in the string
*value*. If *search* is a string, all occurrences of *search* will be replaced in
*value*. If *search* is a list, each occurrence of a value contained in *search*
will be replaced by the corresponding list item in *replace*. If *replace* has less
list items than *search*, occurrences of unmapped *search* items will be replaced
by the empty string. The number of replacements can optionally be limited using the
by an empty string. The number of replacements can optionally be limited using the
*limit* parameter. If the *limit* is reached, no further occurrences of the search
values will be replaced.
@ -43,7 +51,7 @@ For string processing, AQL offers the following functions:
SUBSTITUTE("the quick brown foxx", "quick", "lazy")
/* "the slow brown dog" */
SUBSTITUTE("the quick brown foxx", [ "quick", "foxx" ], [ "slow", "dog" ])
SUBSTITUTE("the quick brown foxx", [ "quick", "foxx" ], [ "slow", "dog" ])
/* "A VOID! brown " */
SUBSTITUTE("the quick brown foxx", [ "the", "quick", "foxx" ], [ "A", "VOID!" ])
@ -81,14 +89,14 @@ For string processing, AQL offers the following functions:
- *TRIM(value, chars)*: Returns the string *value* with whitespace stripped
from the start and end. The optional *chars* parameter can be used to override the
characters that should be removed from the string. It defaults to `\r\n\t `
(i.e. `0x0d`, `0x0a`, `0x09` and `0x20`).
characters that should be removed from the string. It defaults to `\r\n \t`
(i.e. `0x0d`, `0x0a`, `0x20` and `0x09`).
/* "foobar" */
TRIM(" foobar\t \r\n ")
TRIM(" foobar\t \r\n ")
/* "foo;bar;baz" */
TRIM(";foo;bar;baz, ", ",; ")
TRIM(";foo;bar;baz, ", ",; ")
- *LTRIM(value, chars)*: Returns the string *value* with whitespace stripped
from the start only. The optional *chars* parameter can be used to override the

View File

@ -12,15 +12,29 @@ This helps avoiding unexpected results. All type casts have to be performed by
invoking a type cast function. AQL offers several type cast functions for this
task. Each of the these functions takes an operand of any data type and returns
a result value of type corresponding to the function name (e.g. *TO_NUMBER()*
will return a number value):
will return a numeric value):
- *TO_BOOL(value)*: Takes an input *value* of any type and converts it
into the appropriate boolean value as follows:
- *null* is converted to *false*.
- Numbers are converted to *true* if they are unequal to 0, and to *false* otherwise.
- Numbers are converted to *true*, except for 0, which is converted to *false*.
- Strings are converted to *true* if they are non-empty, and to *false* otherwise.
- Arrays are always converted to *true*.
- Objects / documents are always converted to *true*.
It's also possible to use double negation to cast to boolean:
```js
!!1 // true
!!0 // false
!!-0.0 // false
not not 1 // true
!!'non-empty string' // true
!!'' // false
```
`TO_BOOL()` is preferred however, because it states the intention clearer.
- *TO_NUMBER(value)*: Takes an input *value* of any type and converts it
into a numeric value as follows:
@ -35,16 +49,34 @@ will return a number value):
result of `TO_NUMBER()` for its sole member. An array with two or more members is
converted to *null*.
- An object / document is converted to *null*.
An unary plus will also try to cast to a number, but `TO_NUMBER()` is the preferred way:
```js
+'5' // 5
+[8] // 8
+[8,9] // null
+{} // null
```
An unary minus works likewise, except that a numeric value is also negated:
```js
-'5' // -5
-[8] // -8
-[8,9] // null
-{} // null
```
- *TO_STRING(value)*: Takes an input *value* of any type and converts it
into a string value as follows:
- *null* is converted to the string *"null"*
- *false* is converted to the string *"false"*, *true* to the string *"true"*
- Numbers are converted to their string representations.
- Numbers are converted to their string representations. This can also be a
scientific notation: `TO_STRING(0.0000002) // "2e-7"`
- An empty array is converted to the empty string. An array with one member is converted
to the result of `TO_STRING()` for its sole member. An array with two or more members
is converted to a comma-separated array with the string representation of its members.
is converted to a comma-separated array with the string representation of its members:
`TO_STRING([1,2,3]) // "1,2,3"`
- An object / document is converted to the string *[object Object]*.
`CONCAT(value)` behaves identical if a single parameter is passed only.
- *TO_ARRAY(value)*: Takes an input *value* of any type and converts it
into an array value as follows:
@ -52,7 +84,12 @@ will return a number value):
- Boolean values, numbers and strings are converted to an array containing the original
value as its single element.
- Arrays keep their original value.
- Objects / documents are converted to an array containing their attribute values as array elements
- Objects / documents are converted to an array containing their attribute **values**
as array elements:
```js
TO_ARRAY({foo: 1, bar: 2, baz: [3, 4, 5]}) // [1, 2, [3, 4, 5]]
```
- *TO_LIST(value)*: This is an alias for *TO_ARRAY*.

View File

@ -75,19 +75,24 @@ of an array operator is an array.
Examples:
```
[ 1, 2, 3 ] ALL IN [ 2, 3, 4 ] // false
[ 1, 2, 3 ] ALL IN [ 1, 2, 3 ] // true
[ 1, 2, 3 ] NONE IN [ 3 ] // false
[ 1, 2, 3 ] NONE IN [ 23, 42 ] // true
[ 1, 2, 3 ] ANY IN [ 4, 5, 6 ] // false
[ 1, 2, 3 ] ANY IN [ 1, 42 ] // true
[ 1, 2, 3 ] ANY == 2 // true
[ 1, 2, 3 ] ANY == 4 // false
[ 1, 2, 3 ] ANY > 0 // true
[ 1, 2, 3 ] ANY IN [ 4, 5, 6 ] // false
[ 1, 2, 3 ] ANY IN [ 1, 42 ] // true
[ 1, 2, 3 ] ALL IN [ 2, 3, 4 ] // false
[ 1, 2, 3 ] ALL IN [ 1, 2, 3 ] // true
[ 1, 2, 3 ] ALL > 2 // false
[ 1, 2, 3 ] ALL > 0 // true
[ 1, 2, 3 ] NONE IN [ 3 ] // false
[ 1, 2, 3 ] NONE IN [ 23, 42 ] // true
[ 1, 2, 3 ] ANY <= 1 // true
[ 1, 2, 3 ] NONE < 99 // false
[ 1, 2, 3 ] NONE > 10 // true
[ 1, 2, 3 ] ALL > 2 // false
[ 1, 2, 3 ] ALL > 0 // true
[ 1, 2, 3 ] ALL >= 3 // false
["foo", "bar"] ALL != "moo" // true
["foo", "bar"] NONE == "bar" // false
["foo", "bar"] ANY == "foo" // true
```
!SUBSUBSECTION Logical operators
@ -177,6 +182,12 @@ AQL supports the following arithmetic operators:
The unary plus and unary minus are supported as well.
For exponentiation, there is a [numeric function](Functions/Numeric.md#pow) `POW()`.
For string concatenation, you must use the [string function](Functions/String.md#concat)
`CONCAT()`. Combining two strings with a plus operator (`"foo" + "bar"`) will not work!
Also see [Common Errors](CommonErrors.md)
Some example arithmetic operations:
1 + 1
@ -189,7 +200,7 @@ Some example arithmetic operations:
The arithmetic operators accept operands of any type. This behavior has changed in
ArangoDB 2.3. Passing non-numeric values to an arithmetic operator is now allow.
Any-non numeric operands will be casted to numbers implicitly by the operator,
Any non-numeric operands will be casted to numbers implicitly by the operator,
without making the query abort.
The *conversion to a numeric value* works as follows:

View File

@ -19,17 +19,17 @@ Here is an example document:
{
"_id" : "myusers/3456789",
"_key" : "3456789",
"_rev" : "3456789",
"firstName" : "Hugo",
"lastName" : "Schlonz",
"_rev" : "14253647",
"firstName" : "John",
"lastName" : "Doe",
"address" : {
"street" : "Street of Happiness",
"city" : "Heretown"
"street" : "Road To Nowhere 1",
"city" : "Gotham"
},
"hobbies" : [
"swimming",
"biking",
"programming"
{name: "swimming", howFavorite: 10},
{name: "biking", howFavorite: 6},
{name: "programming", howFavorite: 4}
]
}
```

View File

@ -13,10 +13,10 @@
<li>
<a href="BASE_PATH/Users/index.html">Manual</a>
</li>
<li class="active-tab">
<li>
<a href="BASE_PATH/AQL/index.html">AQL</a>
</li>
<li>
<li class="active-tab">
<a href="BASE_PATH/HTTP/index.html">HTTP</a>
</li>
<li>

View File

@ -0,0 +1 @@
!CHAPTER Deprecated

View File

@ -0,0 +1 @@
!CHAPTER Concepts

View File

@ -13,21 +13,21 @@ Any transaction only ever sees a single revision of a document.
For example:
```js
{
"firstName" : "Hugo",
"lastName" : "Schlonz",
"address" : {
"city" : "Hier",
"street" : "Strasse 1"
},
"hobbies" : [
"swimming",
"biking",
"programming"
],
"_id" : "demo/schlonz",
"_rev" : "13728680",
"_key" : "schlonz"
{
"_id" : "myusers/3456789",
"_key" : "3456789",
"_rev" : "14253647",
"firstName" : "John",
"lastName" : "Doe",
"address" : {
"street" : "Road To Nowhere 1",
"city" : "Gotham"
},
"hobbies" : [
{name: "swimming", howFavorite: 10},
{name: "biking", howFavorite: 6},
{name: "programming", howFavorite: 4}
]
}
```

View File

@ -1 +1 @@
!CHAPTER Graphs, Vertices and Edges
!CHAPTER Graphs, Vertices & Edges

View File

@ -0,0 +1,117 @@
!CHAPTER Coming from SQL
!SUBSECTION How do browse vectors translate into document queries?
In traditional SQL you may either fetch all columns of a table row by row, using
`SELECT * FROM table`, or select a subset of the columns. The list of table
columns to fetch is commonly called *column list* or *browse vector*:
```sql
SELECT columnA, columnB, columnZ FROM table
```
Since documents aren't two-dimensional, and neither do you want to be limited to
returning two-dimensional lists, the requirements for a query language are higher.
AQL is thus a little bit more complex than plain SQL at first, but offers much
more flexibility in the long run. It lets you handle arbitrarily structured
documents in convenient ways, mostly leaned on the syntax used in JavaScript.
!SUBSUBSECTION Composing the documents to be returned
The AQL `RETURN` statement returns one item per document it is handed. You can
return the whole document, or just parts of it. Given that *oneDocument* is
a document (retrieved like `LET oneDocument = DOCUMENT("myusers/3456789")`
for instance), it can be returned as-is like this:
```js
RETURN oneDocument
```json
[
{
"_id" : "myusers/3456789",
"_key" : "3456789"
"_rev" : "14253647",
"firstName" : "John",
"lastName" : "Doe",
"address" : {
"city" : "Gotham",
"street" : "Road To Nowhere 1"
},
"hobbies" : [
{name: "swimming", howFavourite: 10},
{name: "biking", howFavourite: 6},
{name: "programming", howFavourite: 4}
]
}
]
```
Return the hobbies sub-structure only:
```js
RETURN oneDocument.hobbies
```
```json
[
[
{name: "swimming", howFavourite: 10},
{name: "biking", howFavourite: 6},
{name: "programming", howFavourite: 4}
]
]
```
Return the hobbies and the address:
```js
RETURN {
hobbies: oneDocument.hobbies,
address: oneDocument.address
}
```
```json
[
{
hobbies: [
{name: "swimming", howFavourite: 10},
{name: "biking", howFavourite: 6},
{name: "programming", howFavourite: 4}
],
address: {
"city" : "Gotham",
"street" : "Road To Nowhere 1"
}
}
]
```
Return the first hobby only:
```js
RETURN oneDocument.hobbies[0].name
```
```json
[
"swimming"
]
```
Return a list of all hobby strings:
```js
RETURN { hobbies: oneDocument.hobbies[*].name }
```
```json
[
{hobbies: ["swimming", "biking", "porgramming"] }
]
```
More complex [array](../../AQL/Functions/Array.md) and
[object manipulations](../../AQL/Functions/Document.md) can be done using
AQL functions and [operators](../../AQL/Operators.md).

View File

@ -1,60 +1,31 @@
!CHAPTER First Steps in ArangoDB
For installation instructions, please refer to the
[Installation Manual](../Installing/README.md).
As you know from the introduction ArangoDB is a multi-model open-source
Database. You can see the Key features below or look directly at the programs in
the ArangoDB package.
Key features include:
* *Schema-free schemata*: Lets you combine the space efficiency of MySQL with
the performance power of NoSQL
* *Application server*: Use ArangoDB as an application server and fuse your
application and database together for maximal throughput
* *JavaScript for all*: No language zoo, you can use one language from your
browser to your back-end
* *Flexible data modeling*: Model your data as combinations of key-value pairs,
documents or graphs - perfect for social relations
* *Free index choice*: Use the correct index for your problem, may it be a skip
list or a fulltext search
* *Configurable durability*: Let the application decide if it needs more
durability or more performance
* *No-nonsense storage*: ArangoDB uses all of the power of modern storage
hardware, like SSD and large caches
* *Powerful query language* (AQL) to retrieve and modify data
* *Transactions*: Run queries on multiple documents or collections with
optional transactional consistency and isolation
* *Replication*: Set up the database in a master-slave configuration
* It is open source (*Apache License 2.0*)
For more in-depth information:
* Read more on the
[Design Goals](https://www.arangodb.com/2012/03/07/avocadodbs-design-objectives)
of ArangoDB
* [Watch the video](http://vimeo.com/36411892): Martin Schönert,
architect of ArangoDB, gives an introduction of what the ArangoDB project
is about
* Or give it a [try](https://www.arangodb.com/tryitout)
!CHAPTER Getting started
!SECTION ArangoDB programs
The ArangoDB package comes with the following programs:
* _arangod_: The ArangoDB database daemon. This server program is
* `arangod`: The ArangoDB database daemon. This server program is
intended to run as a daemon process and to serve the various clients
connection to the server via TCP / HTTP. See [Details about the ArangoDB Server](../FirstSteps/Arangod.md)
* _arangosh_: The ArangoDB shell. A client that implements a
connection to the server via TCP / HTTP.
See [Details about the ArangoDB Server](../GettingStarted/Arangod.md)
* `arangosh`: The ArangoDB shell. A client that implements a
read-eval-print loop (REPL) and provides functions to access and
administrate the ArangoDB server. See [Details about the ArangoDB Shell](../FirstSteps/Arangosh.md).
* _arangoimp_: A bulk importer for the ArangoDB server.
See [Details about Arangoimp](../HttpBulkImports/Arangoimp.md).
* _arangodump_: A tool to create backups of an ArangoDB database. See
[Details about Arangodump](../HttpBulkImports/Arangodump.md).
* _arangorestore_: A tool to reload data from a backup into an ArangoDB database.
See [Details about Arangorestore](../HttpBulkImports/Arangorestore.md)
* _arango-dfdb_: A datafile debugger for ArangoDB. It is intended to be
used primarily during development of ArangoDB
administrate the ArangoDB server.
See [Details about the ArangoDB Shell](../Administration/Arangosh/README.md)
* `arangoimp`: A bulk importer for the ArangoDB server.
See [Details about Arangoimp](../Administration/Arangoimp.md)
* `arangodump`: A tool to create backups of an ArangoDB database. See
[Details about Arangodump](../Administration/Arangodump.md)
* `arangorestore`: A tool to reload data from a backup into an ArangoDB database.
See [Details about Arangorestore](../Administration/Arangorestore.md)
* `arango-dfdb`: A datafile debugger for ArangoDB. It is intended to be
used primarily during development of ArangoDB.
See [Details about Arango-DFDB](../Troubleshooting/ArangoDfdb.md)
* `arangobench`: A benchmark and test tool. It can be used for performance and
server function testing. See [Details about Arangobench](../Troubleshooting/Arangobench.md)

View File

@ -10,10 +10,10 @@
<gcse:searchbox-only></gcse:searchbox-only>
</div>
<ul id="navmenu">
<li>
<li class="active-tab">
<a href="BASE_PATH/Users/index.html">Manual</a>
</li>
<li class="active-tab">
<li>
<a href="BASE_PATH/AQL/index.html">AQL</a>
</li>
<li>

View File

@ -1,4 +1,4 @@
!SECTION How ArangoDB uses Indexes
!SECTION Index Utilization
In most cases ArangoDB will use a single index per collection in a given query. AQL queries can
use more than one index per collection when multiple FILTER conditions are combined with a

View File

@ -22,7 +22,7 @@ Key features include:
* It is **open source** (Apache License 2.0)
In this documentation you can inform yourself about all the functions, features and programs ArangoDB provides for you.
Features are ilustrated with interactive usage examples; you can cut'n'paste them into [arangosh](Arangosh/README.md) to try them out.
Features are illustrated with interactive usage examples; you can cut'n'paste them into [arangosh](Arangosh/README.md) to try them out.
The http REST-API is demonstrated with cut'n'paste recepies intended to be used with the [cURL](http://curl.haxx.se).
Drivers may provide their own examples based on these .js based examples to improve understandeability for their respective users.
I.e. for the [java driver](https://github.com/arangodb/arangodb-java-driver#learn-more) some of the samples are re-implemented.

View File

@ -0,0 +1 @@
!CHAPTER Release Notes

View File

@ -16,7 +16,7 @@
* [Using the Web Interface](GettingStarted/WebInterface.md)
* [Your first queries](GettingStarted/AqlQueries.md)
* [Coming from SQL](GettingStarted/ComingFromSql.md)
* [Coming from MongoDB](GettingStarted/ComingFromMongoDb.md)
#* [Coming from MongoDB](GettingStarted/ComingFromMongoDb.md)
#
* [Scalability](Scalability/README.md)
* [Joins](Scalability/Joins.md)
@ -51,8 +51,8 @@
#
* [Indexing](Indexing/README.md)
* [Index Basics](Indexing/IndexBasics.md)
* [Which Index to use when](Indexing/WhichIndex.md)
* [How ArangoDB uses indexes](Indexing/HowArangoDBUsesIndexes.md)
* [Which index to use when](Indexing/WhichIndex.md)
* [Index Utilization](Indexing/IndexUtilization.md)
* [Working with Indexes](Indexing/WorkingWithIndexes.md)
* [Hash Indexes](Indexing/Hash.md)
* [Skiplists](Indexing/Skiplist.md)
@ -71,7 +71,7 @@
* [Working with Edges](Graphs/Edges/README.md)
#
# Will be replaced by new FOTF docs
* [Foxx microservices](Foxx/README.md)
* [Foxx Microservices](Foxx/README.md)
* [In a nutshell](Foxx/Nutshell/README.md)
* [Install](Foxx/Install/README.md)
* [Github](Foxx/Install/Github.md)
@ -120,6 +120,7 @@
* [Arangodump](Administration/Arangodump.md)
* [Arangorestore](Administration/Arangorestore.md)
* [Upgrading](Administration/Upgrading/README.md)
* [Upgrading to 3.0](Administration/Upgrading/Upgrading30.md)
* [Upgrading to 2.8](Administration/Upgrading/Upgrading28.md)
* [Upgrading to 2.6](Administration/Upgrading/Upgrading26.md)
* [Upgrading to 2.5](Administration/Upgrading/Upgrading25.md)
@ -151,7 +152,7 @@
* [Troubleshooting](Troubleshooting/README.md)
* [Emergency Console](Troubleshooting/EmergencyConsole.md)
* [Datafile Debugger](Troubleshooting/DatafileDebugger.md)
* [Arangob](Troubleshooting/Arangob.md)
* [Arangobench](Troubleshooting/Arangobench.md)
#
* [Architecture](Architecture/README.md)
* [Write-ahead log](Architecture/WriteAheadLog.md)
@ -167,6 +168,7 @@
* [Whats New in 2.2](ReleaseNotes/NewFeatures22.md)
* [Whats New in 2.1](ReleaseNotes/NewFeatures21.md)
# Place next to Upgrading instead?
* [Incompatible changes in 3.0](ReleaseNotes/UpgradingChanges30.md)
* [Incompatible changes in 2.8](ReleaseNotes/UpgradingChanges28.md)
* [Incompatible changes in 2.7](ReleaseNotes/UpgradingChanges27.md)
* [Incompatible changes in 2.6](ReleaseNotes/UpgradingChanges26.md)
@ -176,15 +178,15 @@
#
* [Appendix](Appendix/README.md)
* [JavaScript Modules](Appendix/JavaScriptModules/README.md)
* ["console"](Appendix/Modules/Console.md)
* ["fs"](Appendix/Modules/FS.md)
* ["process"](Appendix/Modules/Process.md)
* ["request"](Appendix/Modules/Request.md)
* ["actions"](Appendix/Modules/Actions.md)
* ["queries"](Appendix/Modules/Queries.md)
* ["planner"](Appendix/Modules/Planner.md)
* [Write-ahead log](Appendix/Modules/WAL.md)
* [Task Management](Appendix/Modules/Tasks.md)
* ["console"](Appendix/JavaScriptModules/Console.md)
* ["fs"](Appendix/JavaScriptModules/FS.md)
* ["process"](Appendix/JavaScriptModules/Process.md)
* ["request"](Appendix/JavaScriptModules/Request.md)
* ["actions"](Appendix/JavaScriptModules/Actions.md)
* ["queries"](Appendix/JavaScriptModules/Queries.md)
* ["planner"](Appendix/JavaScriptModules/Planner.md)
* [Write-ahead log](Appendix/JavaScriptModules/WAL.md)
* [Task Management](Appendix/JavaScriptModules/Tasks.md)
* [Deprecated](Appendix/Deprecated/README.md)
* [Simple Queries](Appendix/Deprecated/SimpleQueries/README.md)
* [Sequential Access](Appendix/Deprecated/SimpleQueries/Access.md)
@ -197,5 +199,5 @@
* [Json Objects](Appendix/Deprecated/Actions/JsonExample.md)
* [Modifying](Appendix/Deprecated/Actions/Modifying.md)
# Link to here from arangosh, actions, foxx, transactions
* [Error codes and meanings](Appendix/ErrorCodes/README.md)
* [Error codes and meanings](Appendix/ErrorCodes.md)
* [Glossary](Appendix/Glossary.md)

View File

@ -6,7 +6,12 @@ import inspect
validExtensions = (".cpp", ".h", ".js", ".mdpp", ".md")
# specify the paths in which docublocks are searched. note that js/apps/* must not be included because it contains js/apps/system/
# and that path also contains copies of some files present in js/ anyway.
searchPaths = ["Documentation/Books/Users/", "Documentation/DocuBlocks"]
searchPaths = [
"Documentation/Books/Users/",
"Documentation/Books/AQL/",
"Documentation/Books/HTTP/",
"Documentation/DocuBlocks/"
]
fullSuccess = True
def file_content(filepath):

View File

@ -4,9 +4,11 @@ set -e
echo
echo "$0: loading precompiled libraries"
V8_VERSION=`/bin/ls 3rdParty/V8/|grep V8 |sed "s;V8-;;"`
wget \
-O 3rdParty.tar.gz \
"https://www.arangodb.com/support-files/travisCI/precompiled-libraries-4.9.391.tar.gz"
"https://www.arangodb.com/support-files/travisCI/precompiled-libraries-${V8_VERSION}.tar.gz"
tar xzf 3rdParty.tar.gz

View File

@ -244,47 +244,6 @@ BOOST_AUTO_TEST_CASE (tst_absolute_paths) {
#endif
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test slurp file
////////////////////////////////////////////////////////////////////////////////
BOOST_AUTO_TEST_CASE (tst_slurp) {
size_t length;
char* filename;
char* result;
filename = TRI_Concatenate2File(_directory.c_str(), "files-unittest.tmp");
// remove file if it exists
TRI_UnlinkFile(filename);
// non-existing file
result = TRI_SlurpFile(TRI_CORE_MEM_ZONE, filename, &length);
BOOST_CHECK_EQUAL((char*) 0, result);
TRI_json_t* json = TRI_JsonString(TRI_CORE_MEM_ZONE, "{ \"this\" : true, \"is\" : [ \"a\", \"test\" ] }");
bool ok = TRI_SaveJson(filename, json, false);
BOOST_CHECK_EQUAL(true, ok);
// file exists now
result = TRI_SlurpFile(TRI_CORE_MEM_ZONE, filename, &length);
BOOST_CHECK_EQUAL(0, strcmp("{\"this\":true,\"is\":[\"a\",\"test\"]}\n", result));
BOOST_CHECK_EQUAL(length, strlen("{\"this\":true,\"is\":[\"a\",\"test\"]}\n"));
TRI_Free(TRI_CORE_MEM_ZONE, result);
// test without length
length = 42;
result = TRI_SlurpFile(TRI_CORE_MEM_ZONE, filename, 0);
BOOST_CHECK_EQUAL(0, strcmp("{\"this\":true,\"is\":[\"a\",\"test\"]}\n", result));
BOOST_CHECK_EQUAL(42, (int) length);
TRI_Free(TRI_CORE_MEM_ZONE, result);
TRI_FreeJson(TRI_CORE_MEM_ZONE, json);
TRI_Free(TRI_CORE_MEM_ZONE, filename);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief generate tests
////////////////////////////////////////////////////////////////////////////////

View File

@ -306,7 +306,7 @@ BOOST_AUTO_TEST_CASE (tst_json_hashattributes_mult2) {
const char* v1[] = { "a", "b" };
uint64_t const h1 = 12539197276825819752ULL;
uint64_t const h1 = 18170770464635016704ULL;
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"a\": \"foo\", \"b\": \"bar\" }");
BOOST_CHECK_EQUAL(h1, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
@ -316,31 +316,31 @@ BOOST_AUTO_TEST_CASE (tst_json_hashattributes_mult2) {
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"a\": \"food\", \"b\": \"bar\" }");
BOOST_CHECK_EQUAL(16541519083655723759ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(9898865118744151582ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"a\": \"foo\", \"b\": \"baz\" }");
BOOST_CHECK_EQUAL(7656993273597287052ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(4146172384428429960ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"a\": \"FOO\", \"b\": \"BAR\" }");
BOOST_CHECK_EQUAL(17704521675288781581ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(1969665727812990435ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"a\": \"foo\" }");
BOOST_CHECK_EQUAL(13052740859585980364ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(17850048730013513424ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"a\": \"foo\", \"b\": \"meow\" }");
BOOST_CHECK_EQUAL(5511414856106770809ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(828267433082628493ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"b\": \"bar\" }");
BOOST_CHECK_EQUAL(455614752263261981ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(8536899277477494659ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
json = TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, "{ \"b\": \"bar\", \"a\": \"meow\" }");
BOOST_CHECK_EQUAL(1842251108617319700ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
BOOST_CHECK_EQUAL(6298354225815173479ULL, TRI_HashJsonByAttributes(json, v1, 2, true, error));
FREE_JSON
}

View File

@ -47,9 +47,9 @@ AqlValue::AqlValue(TRI_doc_mptr_t const* mptr) {
uint64_t AqlValue::hash(arangodb::AqlTransaction* trx) const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
// we must use the slow hash function here, because a value may have
// different representations in case its an array/object/number
return slice().normalizedHash();
@ -138,9 +138,9 @@ bool AqlValue::isArray() const {
size_t AqlValue::length() const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
return slice().length();
}
case DOCVEC: {
@ -160,11 +160,11 @@ AqlValue AqlValue::at(int64_t position, bool& mustDestroy,
mustDestroy = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
doCopy = false;
// fall-through intentional
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isArray()) {
int64_t const n = static_cast<int64_t>(s.length());
@ -230,6 +230,117 @@ AqlValue AqlValue::at(int64_t position, bool& mustDestroy,
// default is to return null
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
}
/// @brief get the _key attribute from an object/document
AqlValue AqlValue::getKeyAttribute(arangodb::AqlTransaction* trx,
bool& mustDestroy, bool doCopy) const {
mustDestroy = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
doCopy = false;
// fall-through intentional
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isObject()) {
VPackSlice found = Transaction::extractKeyFromDocument(s);
if (!found.isNone()) {
if (doCopy || found.byteSize() < sizeof(_data.internal)) {
mustDestroy = true;
return AqlValue(found);
}
// return a reference to an existing slice
return AqlValue(found.begin());
}
}
// fall-through intentional
break;
}
case DOCVEC:
case RANGE: {
// will return null
break;
}
}
// default is to return null
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
}
/// @brief get the _from attribute from an object/document
AqlValue AqlValue::getFromAttribute(arangodb::AqlTransaction* trx,
bool& mustDestroy, bool doCopy) const {
mustDestroy = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
doCopy = false;
// fall-through intentional
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isObject()) {
VPackSlice found = Transaction::extractFromFromDocument(s);
if (!found.isNone()) {
if (doCopy || found.byteSize() < sizeof(_data.internal)) {
mustDestroy = true;
return AqlValue(found);
}
// return a reference to an existing slice
return AqlValue(found.begin());
}
}
// fall-through intentional
break;
}
case DOCVEC:
case RANGE: {
// will return null
break;
}
}
// default is to return null
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
}
/// @brief get the _to attribute from an object/document
AqlValue AqlValue::getToAttribute(arangodb::AqlTransaction* trx,
bool& mustDestroy, bool doCopy) const {
mustDestroy = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
doCopy = false;
// fall-through intentional
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isObject()) {
VPackSlice found = Transaction::extractToFromDocument(s);
if (!found.isNone()) {
if (doCopy || found.byteSize() < sizeof(_data.internal)) {
mustDestroy = true;
return AqlValue(found);
}
// return a reference to an existing slice
return AqlValue(found.begin());
}
}
// fall-through intentional
break;
}
case DOCVEC:
case RANGE: {
// will return null
break;
}
}
// default is to return null
return AqlValue(arangodb::basics::VelocyPackHelper::NullValue());
}
/// @brief get the (object) element by name
AqlValue AqlValue::get(arangodb::AqlTransaction* trx,
@ -238,11 +349,11 @@ AqlValue AqlValue::get(arangodb::AqlTransaction* trx,
mustDestroy = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
doCopy = false;
// fall-through intentional
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isObject()) {
VPackSlice found(s.get(name));
@ -281,11 +392,11 @@ AqlValue AqlValue::get(arangodb::AqlTransaction* trx,
mustDestroy = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
doCopy = false;
// fall-through intentional
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isObject()) {
VPackSlice found(s.get(names));
@ -322,9 +433,9 @@ bool AqlValue::hasKey(arangodb::AqlTransaction* trx,
std::string const& name) const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
return (s.isObject() && s.hasKey(name));
}
@ -348,9 +459,9 @@ double AqlValue::toDouble(bool& failed) const {
failed = false;
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isNull()) {
return 0.0;
@ -416,9 +527,9 @@ double AqlValue::toDouble(bool& failed) const {
int64_t AqlValue::toInt64() const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isNumber()) {
return s.getNumber<int64_t>();
@ -469,9 +580,9 @@ int64_t AqlValue::toInt64() const {
bool AqlValue::toBoolean() const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(slice());
if (s.isBoolean()) {
return s.getBoolean();
@ -564,9 +675,9 @@ v8::Handle<v8::Value> AqlValue::toV8(
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackOptions* options = trx->transactionContext()->getVPackOptions();
return TRI_VPackToV8(isolate, slice(), options);
}
@ -609,9 +720,9 @@ void AqlValue::toVelocyPack(AqlTransaction* trx,
bool resolveExternals) const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
if (resolveExternals) {
arangodb::basics::VelocyPackHelper::SanitizeExternals(slice(), builder);
} else {
@ -646,9 +757,9 @@ void AqlValue::toVelocyPack(AqlTransaction* trx,
AqlValue AqlValue::materialize(AqlTransaction* trx, bool& hasCopied, bool resolveExternals) const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
hasCopied = false;
return *this;
}
@ -670,14 +781,14 @@ AqlValue AqlValue::materialize(AqlTransaction* trx, bool& hasCopied, bool resolv
AqlValue AqlValue::clone() const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER: {
case VPACK_DOCUMENT_PART: {
return AqlValue(_data.pointer);
}
case VPACK_INLINE: {
// copy internal data
return AqlValue(slice());
}
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
// copy buffer
VPackValueLength length = _data.buffer->size();
auto buffer = new VPackBuffer<uint8_t>(length);
@ -713,12 +824,12 @@ AqlValue AqlValue::clone() const {
void AqlValue::destroy() {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE: {
// nothing to do
break;
}
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
delete _data.buffer;
erase(); // to prevent duplicate deletion
break;
@ -742,7 +853,7 @@ void AqlValue::destroy() {
VPackSlice AqlValue::slice() const {
switch (type()) {
case VPACK_DOCUMENT:
case VPACK_POINTER: {
case VPACK_DOCUMENT_PART: {
return VPackSlice(_data.pointer);
}
case VPACK_INLINE: {
@ -752,7 +863,7 @@ VPackSlice AqlValue::slice() const {
}
return s;
}
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
VPackSlice s(_data.buffer->data());
if (s.isExternal()) {
s = VPackSlice(s.getExternal());
@ -848,9 +959,9 @@ int AqlValue::Compare(arangodb::AqlTransaction* trx, AqlValue const& left,
switch (leftType) {
case VPACK_DOCUMENT:
case VPACK_POINTER:
case VPACK_DOCUMENT_PART:
case VPACK_INLINE:
case VPACK_EXTERNAL: {
case VPACK_MANAGED: {
return arangodb::basics::VelocyPackHelper::compare(left.slice(), right.slice(), compareUtf8, options);
}
case DOCVEC: {

View File

@ -19,6 +19,7 @@
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Max Neunhoeffer
/// @author Jan Steemann
////////////////////////////////////////////////////////////////////////////////
#ifndef ARANGOD_AQL_AQL_VALUE_H
@ -52,9 +53,9 @@ struct AqlValue final {
/// @brief AqlValueType, indicates what sort of value we have
enum AqlValueType : uint8_t {
VPACK_DOCUMENT, // contains a pointer to a vpack document, memory is not managed!
VPACK_POINTER, // contains a pointer to a vpack value, memory is not managed!
VPACK_DOCUMENT_PART, // contains a pointer to a vpack document sub-value, memory is not managed!
VPACK_INLINE, // contains vpack data, inline
VPACK_EXTERNAL, // contains vpack, via pointer to a managed buffer
VPACK_MANAGED, // contains vpack, via pointer to a managed buffer
DOCVEC, // a vector of blocks of results coming from a subquery, managed
RANGE // a pointer to a range remembering lower and upper bound, managed
};
@ -63,13 +64,13 @@ struct AqlValue final {
/// The last byte of this union (_data.internal[15]) will be used to identify
/// the type of the contained data:
///
/// VPACK_POINTER: data may be referenced via a pointer to a VPack slice
/// VPACK_DOCUMENT_PART: data may be referenced via a pointer to a VPack slice
/// existing somewhere in memory. The AqlValue is not responsible for managing
/// this memory.
/// VPACK_INLINE: VPack values with a size less than 16 bytes can be stored
/// directly inside the data.internal structure. All data is stored inline,
/// so there is no need for memory management.
/// VPACK_EXTERNAL: all values of a larger size will be stored in
/// VPACK_MANAGED: all values of a larger size will be stored in
/// _data.external via a managed VPackBuffer object. The Buffer is managed
/// by the AqlValue.
/// DOCVEC: a managed vector of AqlItemBlocks, for storing subquery results.
@ -99,7 +100,7 @@ struct AqlValue final {
// construct from pointer
explicit AqlValue(uint8_t const* pointer) {
_data.pointer = pointer;
setType(AqlValueType::VPACK_POINTER);
setType(AqlValueType::VPACK_DOCUMENT_PART);
}
// construct from docvec, taking over its ownership
@ -123,7 +124,7 @@ struct AqlValue final {
// construct from Buffer, taking over its ownership
explicit AqlValue(arangodb::velocypack::Buffer<uint8_t>* buffer) {
_data.buffer = buffer;
setType(AqlValueType::VPACK_EXTERNAL);
setType(AqlValueType::VPACK_MANAGED);
}
// construct from Builder
@ -150,7 +151,7 @@ struct AqlValue final {
/// @brief whether or not the value must be destroyed
inline bool requiresDestruction() const {
AqlValueType t = type();
return (t == VPACK_EXTERNAL || t == DOCVEC || t == RANGE);
return (t == VPACK_MANAGED || t == DOCVEC || t == RANGE);
}
/// @brief whether or not the value is empty / none
@ -202,6 +203,16 @@ struct AqlValue final {
/// @brief get the (array) element at position
AqlValue at(int64_t position, bool& mustDestroy, bool copy) const;
/// @brief get the _key attribute from an object/document
AqlValue getKeyAttribute(arangodb::AqlTransaction* trx,
bool& mustDestroy, bool copy) const;
/// @brief get the _from attribute from an object/document
AqlValue getFromAttribute(arangodb::AqlTransaction* trx,
bool& mustDestroy, bool copy) const;
/// @brief get the _to attribute from an object/document
AqlValue getToAttribute(arangodb::AqlTransaction* trx,
bool& mustDestroy, bool copy) const;
/// @brief get the (object) element by name(s)
AqlValue get(arangodb::AqlTransaction* trx,
std::string const& name, bool& mustDestroy, bool copy) const;
@ -253,8 +264,8 @@ struct AqlValue final {
bool resolveExternals) const;
/// @brief return the slice for the value
/// this will throw if the value type is not VPACK_POINTER, VPACK_INLINE or
/// VPACK_EXTERNAL
/// this will throw if the value type is not VPACK_DOCUMENT_PART, VPACK_INLINE or
/// VPACK_MANAGED
arangodb::velocypack::Slice slice() const;
/// @brief clone a value
@ -294,7 +305,7 @@ struct AqlValue final {
void initFromSlice(arangodb::velocypack::Slice const& slice) {
if (slice.isExternal()) {
_data.pointer = VPackSlice(slice.getExternal()).start();
setType(AqlValueType::VPACK_POINTER);
setType(AqlValueType::VPACK_DOCUMENT_PART);
return;
}
arangodb::velocypack::ValueLength length = slice.byteSize();
@ -306,7 +317,7 @@ struct AqlValue final {
// Use external
_data.buffer = new arangodb::velocypack::Buffer<uint8_t>(length);
_data.buffer->append(reinterpret_cast<char const*>(slice.begin()), length);
setType(AqlValueType::VPACK_EXTERNAL);
setType(AqlValueType::VPACK_MANAGED);
}
}
@ -372,13 +383,13 @@ struct hash<arangodb::aql::AqlValue> {
size_t res = intHash(type);
switch (type) {
case arangodb::aql::AqlValue::VPACK_DOCUMENT:
case arangodb::aql::AqlValue::VPACK_POINTER: {
case arangodb::aql::AqlValue::VPACK_DOCUMENT_PART: {
return res ^ ptrHash(x._data.pointer);
}
case arangodb::aql::AqlValue::VPACK_INLINE: {
return res ^ static_cast<size_t>(arangodb::velocypack::Slice(&x._data.internal[0]).hash());
}
case arangodb::aql::AqlValue::VPACK_EXTERNAL: {
case arangodb::aql::AqlValue::VPACK_MANAGED: {
return res ^ ptrHash(x._data.buffer);
}
case arangodb::aql::AqlValue::DOCVEC: {
@ -404,13 +415,13 @@ struct equal_to<arangodb::aql::AqlValue> {
}
switch (type) {
case arangodb::aql::AqlValue::VPACK_DOCUMENT:
case arangodb::aql::AqlValue::VPACK_POINTER: {
case arangodb::aql::AqlValue::VPACK_DOCUMENT_PART: {
return a._data.pointer == b._data.pointer;
}
case arangodb::aql::AqlValue::VPACK_INLINE: {
return arangodb::velocypack::Slice(&a._data.internal[0]).equals(arangodb::velocypack::Slice(&b._data.internal[0]));
}
case arangodb::aql::AqlValue::VPACK_EXTERNAL: {
case arangodb::aql::AqlValue::VPACK_MANAGED: {
return a._data.buffer == b._data.buffer;
}
case arangodb::aql::AqlValue::DOCVEC: {

View File

@ -24,6 +24,7 @@
#include "AttributeAccessor.h"
#include "Aql/AqlItemBlock.h"
#include "Aql/Variable.h"
#include "Basics/StaticStrings.h"
#include "Basics/VelocyPackHelper.h"
#include "Utils/AqlTransaction.h"
@ -33,9 +34,23 @@ using namespace arangodb::aql;
AttributeAccessor::AttributeAccessor(
std::vector<std::string> const& attributeParts, Variable const* variable)
: _attributeParts(attributeParts),
_variable(variable) {
_variable(variable),
_type(EXTRACT_MULTI) {
TRI_ASSERT(_variable != nullptr);
// determine accessor type
if (_attributeParts.size() == 1) {
if (attributeParts[0] == StaticStrings::KeyString) {
_type = EXTRACT_KEY;
} else if (attributeParts[0] == StaticStrings::FromString) {
_type = EXTRACT_FROM;
} else if (attributeParts[0] == StaticStrings::ToString) {
_type = EXTRACT_TO;
} else {
_type = EXTRACT_SINGLE;
}
}
}
/// @brief replace the variable in the accessor
@ -58,12 +73,19 @@ AqlValue AttributeAccessor::get(arangodb::AqlTransaction* trx,
for (auto it = vars.begin(); it != vars.end(); ++it, ++i) {
if ((*it)->id == _variable->id) {
// get the AQL value
if (_attributeParts.size() == 1) {
// use optimized version for single attribute (e.g. variable.attr)
return argv->getValueReference(startPos, regs[i]).get(trx, _attributeParts[0], mustDestroy, true);
} else {
// use general version for multiple attributes (e.g. variable.attr.subattr)
return argv->getValueReference(startPos, regs[i]).get(trx, _attributeParts, mustDestroy, true);
switch (_type) {
case EXTRACT_KEY:
return argv->getValueReference(startPos, regs[i]).getKeyAttribute(trx, mustDestroy, true);
case EXTRACT_FROM:
return argv->getValueReference(startPos, regs[i]).getFromAttribute(trx, mustDestroy, true);
case EXTRACT_TO:
return argv->getValueReference(startPos, regs[i]).getToAttribute(trx, mustDestroy, true);
case EXTRACT_SINGLE:
// use optimized version for single attribute (e.g. variable.attr)
return argv->getValueReference(startPos, regs[i]).get(trx, _attributeParts[0], mustDestroy, true);
case EXTRACT_MULTI:
// use general version for multiple attributes (e.g. variable.attr.subattr)
return argv->getValueReference(startPos, regs[i]).get(trx, _attributeParts, mustDestroy, true);
}
}
// fall-through intentional

View File

@ -50,6 +50,13 @@ class AttributeAccessor {
void replaceVariable(std::unordered_map<VariableId, Variable const*> const& replacements);
private:
enum AccessorType {
EXTRACT_KEY,
EXTRACT_FROM,
EXTRACT_TO,
EXTRACT_SINGLE,
EXTRACT_MULTI
};
/// @brief the attribute names vector (e.g. [ "a", "b", "c" ] for a.b.c)
std::vector<std::string> const _attributeParts;
@ -57,6 +64,8 @@ class AttributeAccessor {
/// @brief the accessed variable
Variable const* _variable;
/// @brief type of the accessor
AccessorType _type;
};
} // namespace arangodb::aql

View File

@ -567,7 +567,11 @@ QueryResult Query::execute(QueryRegistry* registry) {
}
AqlItemBlock* value = nullptr;
auto resultBuilder = std::make_shared<VPackBuilder>();
VPackOptions options = VPackOptions::Defaults;
options.buildUnindexedArrays = true;
options.buildUnindexedObjects = true;
auto resultBuilder = std::make_shared<VPackBuilder>(&options);
try {
resultBuilder->openArray();
// this is the RegisterId our results can be found in
@ -728,8 +732,12 @@ QueryResultV8 Query::executeV8(v8::Isolate* isolate, QueryRegistry* registry) {
try {
if (useQueryCache) {
VPackOptions options = VPackOptions::Defaults;
options.buildUnindexedArrays = true;
options.buildUnindexedObjects = true;
// iterate over result, return it and store it in query cache
auto builder = std::make_shared<VPackBuilder>();
auto builder = std::make_shared<VPackBuilder>(&options);
builder->openArray();
uint32_t j = 0;

View File

@ -117,8 +117,7 @@ class ClusterTraverser : public Traverser {
/// @brief internal cursor to enumerate the paths of a graph
//////////////////////////////////////////////////////////////////////////////
std::unique_ptr<arangodb::basics::PathEnumerator<std::string, std::string,
size_t>> _enumerator;
std::unique_ptr<arangodb::basics::PathEnumerator<std::string, std::string, size_t>> _enumerator;
};
class ClusterTraversalPath : public TraversalPath {

View File

@ -138,31 +138,6 @@ void HeartbeatThread::runDBServer() {
return true;
};
std::function<bool(VPackSlice const& result)> updateCurrent = [&](
VPackSlice const& result) {
if (!result.isNumber()) {
LOG(ERR) << "Current Version is not a number! " << result.toJson();
return false;
}
uint64_t version = result.getNumber<uint64_t>();
bool doSync = false;
{
MUTEX_LOCKER(mutexLocker, _statusLock);
if (version > _desiredVersions.current) {
_desiredVersions.current = version;
LOG(DEBUG) << "Desired Current Version is now " << _desiredVersions.current;
doSync = true;
}
}
if (doSync) {
syncDBServerStatusQuo();
}
return true;
};
auto planAgencyCallback = std::make_shared<AgencyCallback>(
_agency, "Plan/Version", updatePlan, true);
@ -175,18 +150,6 @@ void HeartbeatThread::runDBServer() {
}
}
auto currentAgencyCallback = std::make_shared<AgencyCallback>(
_agency, "Current/Version", updateCurrent, true);
registered = false;
while (!registered) {
registered = _agencyCallbackRegistry->registerCallback(currentAgencyCallback);
if (!registered) {
LOG(ERR) << "Couldn't register current change in agency!";
sleep(1);
}
}
while (!isStopping()) {
LOG(DEBUG) << "sending heartbeat to agency";
@ -232,7 +195,6 @@ void HeartbeatThread::runDBServer() {
if (!wasNotified) {
LOG(TRACE) << "Lock reached timeout";
planAgencyCallback->refetchAndUpdate();
currentAgencyCallback->refetchAndUpdate();
} else {
// mop: a plan change returned successfully...
// recheck and redispatch in case our desired versions increased
@ -244,7 +206,6 @@ void HeartbeatThread::runDBServer() {
}
_agencyCallbackRegistry->unregisterCallback(planAgencyCallback);
_agencyCallbackRegistry->unregisterCallback(currentAgencyCallback);
int count = 0;
while (++count < 3000) {
bool isInPlanChange;

View File

@ -43,7 +43,7 @@ struct AgencyVersions {
AgencyVersions(uint64_t _plan, uint64_t _current) : plan(_plan), current(_plan) {}
AgencyVersions(const ServerJobResult& result)
explicit AgencyVersions(const ServerJobResult& result)
: plan(result.planVersion),
current(result.currentVersion) {
}

View File

@ -54,7 +54,7 @@ static uint64_t HashElementKey(void*, VPackSlice const* key) {
}
// we can get away with the fast hash function here, as edge
// index values are restricted to strings
return key->hash(hash);
return key->hashString(hash);
}
////////////////////////////////////////////////////////////////////////////////
@ -76,7 +76,7 @@ static uint64_t HashElementEdgeFrom(void*, TRI_doc_mptr_t const* mptr,
TRI_ASSERT(tmp.isString());
// we can get away with the fast hash function here, as edge
// index values are restricted to strings
hash = tmp.hash(hash);
hash = tmp.hashString(hash);
}
return hash;
}
@ -100,7 +100,7 @@ static uint64_t HashElementEdgeTo(void*, TRI_doc_mptr_t const* mptr,
TRI_ASSERT(tmp.isString());
// we can get away with the fast hash function here, as edge
// index values are restricted to strings
hash = tmp.hash(hash);
hash = tmp.hashString(hash);
}
return hash;
}
@ -185,19 +185,12 @@ static bool IsEqualElementEdgeToByKey(void*,
}
TRI_doc_mptr_t* EdgeIndexIterator::next() {
while (true) {
if (_position >= static_cast<size_t>(_keys.length())) {
// we're at the end of the lookup values
return nullptr;
}
while (_iterator.valid()) {
if (_buffer.empty()) {
// We start a new lookup
_posInBuffer = 0;
// TODO: can we use an ArrayIterator with linear access here?
// at() will recalculate the object length etc. on every call
VPackSlice tmp = _keys.at(_position);
VPackSlice tmp = _iterator.value();
if (tmp.isObject()) {
tmp = tmp.get(TRI_SLICE_KEY_EQUAL);
}
@ -218,22 +211,18 @@ TRI_doc_mptr_t* EdgeIndexIterator::next() {
}
// found no result. now go to next lookup value in _keys
++_position;
_iterator.next();
}
return nullptr;
}
void EdgeIndexIterator::nextBabies(std::vector<TRI_doc_mptr_t*>& buffer, size_t limit) {
size_t atMost = _batchSize > limit ? limit : _batchSize;
while (true) {
if (_position >= static_cast<size_t>(_keys.length())) {
// we're at the end of the lookup values
buffer.clear();
return;
}
while (_iterator.valid()) {
if (buffer.empty()) {
VPackSlice tmp = _keys.at(_position);
VPackSlice tmp = _iterator.value();
if (tmp.isObject()) {
tmp = tmp.get(TRI_SLICE_KEY_EQUAL);
}
@ -250,20 +239,19 @@ void EdgeIndexIterator::nextBabies(std::vector<TRI_doc_mptr_t*>& buffer, size_t
if (!buffer.empty()) {
// found something
return;
//return buffer.at(_posInBuffer++);
}
// found no result. now go to next lookup value in _keys
++_position;
_iterator.next();
}
buffer.clear();
}
void EdgeIndexIterator::reset() {
_position = 0;
_posInBuffer = 0;
_buffer.clear();
_iterator.reset(true);
}
TRI_doc_mptr_t* AnyDirectionEdgeIndexIterator::next() {

View File

@ -31,6 +31,9 @@
#include "VocBase/vocbase.h"
#include "VocBase/voc-types.h"
#include <velocypack/Iterator.h>
#include <velocypack/Slice.h>
namespace arangodb {
class EdgeIndexIterator final : public IndexIterator {
@ -52,7 +55,7 @@ class EdgeIndexIterator final : public IndexIterator {
_index(index),
_searchValues(searchValues),
_keys(_searchValues.slice()),
_position(0),
_iterator(_keys, true),
_posInBuffer(0),
_batchSize(1000) {}
@ -63,7 +66,7 @@ class EdgeIndexIterator final : public IndexIterator {
_index(index),
_searchValues(arangodb::velocypack::Builder::clone(searchValues)),
_keys(_searchValues.slice()),
_position(0),
_iterator(_keys, true),
_posInBuffer(0),
_batchSize(1000) {}
@ -72,7 +75,7 @@ class EdgeIndexIterator final : public IndexIterator {
TRI_EdgeIndexHash_t const* _index;
arangodb::velocypack::Builder const _searchValues;
arangodb::velocypack::Slice const _keys;
size_t _position;
arangodb::velocypack::ArrayIterator _iterator;
std::vector<TRI_doc_mptr_t*> _buffer;
size_t _posInBuffer;
size_t _batchSize;

View File

@ -110,7 +110,7 @@ static bool IsEqualKeyElementHash(
TRI_doc_mptr_t* HashIndexIterator::next() {
while (true) {
if (_posInBuffer >= _buffer.size()) {
if (_position >= _numLookups) {
if (!_iterator.valid()) {
// we're at the end of the lookup values
return nullptr;
}
@ -120,7 +120,8 @@ TRI_doc_mptr_t* HashIndexIterator::next() {
_posInBuffer = 0;
int res = TRI_ERROR_NO_ERROR;
_index->lookup(_trx, _searchKeys.at(_position++), _buffer);
_index->lookup(_trx, _iterator.value(), _buffer);
_iterator.next();
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
@ -136,8 +137,8 @@ TRI_doc_mptr_t* HashIndexIterator::next() {
void HashIndexIterator::reset() {
_buffer.clear();
_position = 0;
_posInBuffer = 0;
_iterator.reset(true);
}
////////////////////////////////////////////////////////////////////////////////

View File

@ -33,6 +33,7 @@
#include "VocBase/vocbase.h"
#include "VocBase/voc-types.h"
#include <velocypack/Iterator.h>
#include <velocypack/Slice.h>
#include <velocypack/velocypack-aliases.h>
@ -59,17 +60,16 @@ class HashIndexIterator final : public IndexIterator {
: _trx(trx),
_index(index),
_searchValues(searchValues.get()),
_position(0),
_searchKeys(_searchValues->slice()),
_iterator(_searchValues->slice(), true),
_buffer(),
_posInBuffer(0) {
searchValues.release(); // now we have ownership for searchValues
_searchKeys = _searchValues->slice();
_numLookups = static_cast<size_t>(_searchKeys.length());
}
~HashIndexIterator() {
searchValues.release(); // now we have ownership for searchValues
}
~HashIndexIterator() = default;
TRI_doc_mptr_t* next() override;
void reset() override;
@ -79,8 +79,7 @@ class HashIndexIterator final : public IndexIterator {
HashIndex const* _index;
std::unique_ptr<arangodb::velocypack::Builder> _searchValues;
arangodb::velocypack::Slice _searchKeys;
size_t _position;
size_t _numLookups;
arangodb::velocypack::ArrayIterator _iterator;
std::vector<TRI_doc_mptr_t*> _buffer;
size_t _posInBuffer;
};

View File

@ -41,11 +41,10 @@ using namespace arangodb;
static inline uint64_t HashKey(void* userData, uint8_t const* key) {
// can use fast hash-function here, as index values are restricted to strings
return VPackSlice(key).hash();
return VPackSlice(key).hashString();
}
static inline uint64_t HashElement(void*,
TRI_doc_mptr_t const* element) {
static inline uint64_t HashElement(void*, TRI_doc_mptr_t const* element) {
return element->getHash();
}
@ -79,17 +78,9 @@ static bool IsEqualElementElement(void*, TRI_doc_mptr_t const* left,
}
TRI_doc_mptr_t* PrimaryIndexIterator::next() {
VPackSlice slice = _keys->slice();
while (true) {
if (_position >= static_cast<size_t>(slice.length())) {
// we're at the end of the lookup values
return nullptr;
}
// TODO: can we use an ArrayIterator with linear access here?
// at() will recalculate the array length etc. on every call
auto result = _index->lookup(_trx, slice.at(_position++));
while (_iterator.valid()) {
auto result = _index->lookup(_trx, _iterator.value());
_iterator.next();
if (result != nullptr) {
// found a result
@ -98,9 +89,11 @@ TRI_doc_mptr_t* PrimaryIndexIterator::next() {
// found no result. now go to next lookup value in _keys
}
return nullptr;
}
void PrimaryIndexIterator::reset() { _position = 0; }
void PrimaryIndexIterator::reset() { _iterator.reset(true); }
TRI_doc_mptr_t* AllIndexIterator::next() {
if (_reverse) {
@ -370,7 +363,7 @@ int PrimaryIndex::resize(arangodb::Transaction* trx, size_t targetSize) {
uint64_t PrimaryIndex::calculateHash(arangodb::Transaction* trx,
VPackSlice const& slice) {
// can use fast hash-function here, as index values are restricted to strings
return slice.hash();
return slice.hashString();
}
uint64_t PrimaryIndex::calculateHash(arangodb::Transaction* trx,

View File

@ -31,6 +31,7 @@
#include "VocBase/vocbase.h"
#include "VocBase/voc-types.h"
#include <velocypack/Iterator.h>
#include <velocypack/Slice.h>
#include <velocypack/velocypack-aliases.h>
@ -43,7 +44,12 @@ class PrimaryIndexIterator final : public IndexIterator {
public:
PrimaryIndexIterator(arangodb::Transaction* trx, PrimaryIndex const* index,
std::unique_ptr<VPackBuilder>& keys)
: _trx(trx), _index(index), _keys(keys.get()), _position(0) {
: _trx(trx),
_index(index),
_keys(keys.get()),
_iterator(_keys->slice(), true),
_position(0) {
keys.release(); // now we have ownership for _keys
TRI_ASSERT(_keys->slice().isArray());
}
@ -58,6 +64,7 @@ class PrimaryIndexIterator final : public IndexIterator {
arangodb::Transaction* _trx;
PrimaryIndex const* _index;
std::unique_ptr<VPackBuilder> _keys;
arangodb::velocypack::ArrayIterator _iterator;
size_t _position;
};

View File

@ -27,12 +27,11 @@
#include "Basics/Exceptions.h"
#include "Basics/MutexLocker.h"
#include "Basics/StaticStrings.h"
#include "Basics/VelocyPackDumper.h"
#include "Basics/VelocyPackHelper.h"
#include "Basics/VPackStringBufferAdapter.h"
#include "Utils/Cursor.h"
#include "Utils/CursorRepository.h"
#include <velocypack/Dumper.h>
#include <velocypack/Iterator.h>
#include <velocypack/Value.h>
#include <velocypack/velocypack-aliases.h>
@ -153,11 +152,29 @@ void RestCursorHandler::processQuery(VPackSlice const& slice) {
// result is smaller than batchSize and will be returned directly. no need
// to create a cursor
VPackBuilder result;
VPackOptions options = VPackOptions::Defaults;
options.buildUnindexedArrays = true;
options.buildUnindexedObjects = true;
// conservatively allocate a few bytes per value to be returned
int res;
if (n >= 10000) {
res = _response->body().reserve(128 * 1024);
} else if (n >= 1000) {
res = _response->body().reserve(64 * 1024);
} else {
res = _response->body().reserve(n * 48);
}
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
VPackBuilder result(&options);
try {
VPackObjectBuilder b(&result);
result.add(VPackValue("result"));
result.add(qResult);
result.add(VPackValue(qResult.begin(), VPackValueType::External));
result.add("hasMore", VPackValue(false));
if (arangodb::basics::VelocyPackHelper::getBooleanValue(opts, "count",
false)) {
@ -176,10 +193,8 @@ void RestCursorHandler::processQuery(VPackSlice const& slice) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
arangodb::basics::VPackStringBufferAdapter bufferAdapter(
_response->body().stringBuffer());
VPackDumper dumper(&bufferAdapter, queryResult.context->getVPackOptions());
dumper.dump(result.slice());
arangodb::basics::VelocyPackDumper dumper(&(_response->body()), queryResult.context->getVPackOptions());
dumper.dumpValue(result.slice());
return;
}

View File

@ -48,7 +48,11 @@ struct TRI_request_statistics_t {
_async(false),
_tooLarge(false),
_executeError(false),
_ignore(false) {}
_ignore(false) {
#ifdef USE_DEV_TIMERS
_id = nullptr;
#endif
}
void reset() {
_readStart = 0.0;

View File

@ -22,6 +22,7 @@
////////////////////////////////////////////////////////////////////////////////
#include "Cursor.h"
#include "Basics/VelocyPackDumper.h"
#include "Basics/VelocyPackHelper.h"
#include "Basics/VPackStringBufferAdapter.h"
#include "Utils/CollectionExport.h"
@ -69,15 +70,13 @@ VelocyPackCursor::VelocyPackCursor(TRI_vocbase_t* vocbase, CursorId id,
: Cursor(id, batchSize, extra, ttl, hasCount),
_vocbase(vocbase),
_result(std::move(result)),
_size(_result.result->slice().length()),
_iterator(_result.result->slice(), true),
_cached(_result.cached) {
TRI_ASSERT(_result.result->slice().isArray());
TRI_UseVocBase(vocbase);
}
VelocyPackCursor::~VelocyPackCursor() {
freeJson();
TRI_ReleaseVocBase(_vocbase);
}
@ -86,11 +85,11 @@ VelocyPackCursor::~VelocyPackCursor() {
////////////////////////////////////////////////////////////////////////////////
bool VelocyPackCursor::hasNext() {
if (_position < _size) {
if (_iterator.valid()) {
return true;
}
freeJson();
_isDeleted = true;
return false;
}
@ -100,16 +99,17 @@ bool VelocyPackCursor::hasNext() {
VPackSlice VelocyPackCursor::next() {
TRI_ASSERT(_result.result != nullptr);
TRI_ASSERT(_position < _size);
VPackSlice slice = _result.result->slice();
return slice.at(_position++);
TRI_ASSERT(_iterator.valid());
VPackSlice slice = _iterator.value();
_iterator.next();
return slice;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief return the cursor size
////////////////////////////////////////////////////////////////////////////////
size_t VelocyPackCursor::count() const { return _size; }
size_t VelocyPackCursor::count() const { return _iterator.size(); }
////////////////////////////////////////////////////////////////////////////////
/// @brief dump the cursor contents into a string buffer
@ -124,39 +124,45 @@ void VelocyPackCursor::dump(arangodb::basics::StringBuffer& buffer) {
// if the specified batch size does not get out of hand
// otherwise specifying a very high batch size would make the allocation fail
// in every case, even if there were much less documents in the collection
auto transactionContext = std::make_shared<StandaloneTransactionContext>(_vocbase);
size_t num = n;
if (num == 0) {
num = 1;
} else if (num >= 10000) {
num = 10000;
}
int res = buffer.reserve(num * 48);
if (n <= 50000) {
int res = buffer.reserve(n * 48);
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
arangodb::basics::VPackStringBufferAdapter bufferAdapter(
buffer.stringBuffer());
VPackDumper dumper(&bufferAdapter, transactionContext->getVPackOptions());
for (size_t i = 0; i < n; ++i) {
if (!hasNext()) {
break;
}
arangodb::basics::VelocyPackDumper dumper(&buffer, _result.context->getVPackOptions());
if (i > 0) {
buffer.appendChar(',');
}
try {
auto row = next();
if (row.isNone()) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
for (size_t i = 0; i < n; ++i) {
if (!hasNext()) {
break;
}
try {
dumper.dump(row);
} catch (...) {
/// TODO correct error Handling!
THROW_ARANGO_EXCEPTION(TRI_ERROR_INTERNAL);
if (i > 0) {
buffer.appendChar(',');
}
auto row = next();
if (row.isNone()) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
dumper.dumpValue(row);
}
} catch (arangodb::basics::Exception const& ex) {
THROW_ARANGO_EXCEPTION_MESSAGE(ex.code(), ex.what());
} catch (std::exception const& ex) {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, ex.what());
} catch (...) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_INTERNAL);
}
buffer.appendText("],\"hasMore\":");
@ -177,11 +183,8 @@ void VelocyPackCursor::dump(arangodb::basics::StringBuffer& buffer) {
VPackSlice const extraSlice = extra();
if (extraSlice.isObject()) {
arangodb::basics::VPackStringBufferAdapter bufferAdapter(
buffer.stringBuffer());
VPackDumper dumper(&bufferAdapter);
buffer.appendText(",\"extra\":");
dumper.dump(extraSlice);
dumper.dumpValue(extraSlice);
}
buffer.appendText(",\"cached\":");
@ -193,14 +196,6 @@ void VelocyPackCursor::dump(arangodb::basics::StringBuffer& buffer) {
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free the internals
////////////////////////////////////////////////////////////////////////////////
void VelocyPackCursor::freeJson() {
_isDeleted = true;
}
ExportCursor::ExportCursor(TRI_vocbase_t* vocbase, CursorId id,
arangodb::CollectionExport* ex, size_t batchSize,
double ttl, bool hasCount)

View File

@ -29,6 +29,8 @@
#include "Aql/QueryResult.h"
#include "VocBase/voc-types.h"
#include <velocypack/Iterator.h>
struct TRI_vocbase_t;
namespace arangodb {
@ -125,13 +127,10 @@ class VelocyPackCursor : public Cursor {
void dump(arangodb::basics::StringBuffer&) override final;
private:
void freeJson();
private:
TRI_vocbase_t* _vocbase;
aql::QueryResult _result;
size_t const _size;
arangodb::velocypack::ArrayIterator _iterator;
bool _cached;
};

View File

@ -994,7 +994,7 @@ OperationResult Transaction::anyLocal(std::string const& collectionName,
}
VPackSlice docs = result->slice();
VPackArrayIterator it(docs);
VPackArrayIterator it(docs, true);
while (it.valid()) {
resultBuilder.add(it.value());
it.next();

View File

@ -46,7 +46,9 @@ typedef arangodb::basics::PathFinder<arangodb::velocypack::Slice,
double> ArangoDBPathFinder;
typedef arangodb::basics::ConstDistanceFinder<arangodb::velocypack::Slice,
arangodb::velocypack::Slice>
arangodb::velocypack::Slice,
arangodb::basics::VelocyPackHelper::VPackStringHash,
arangodb::basics::VelocyPackHelper::VPackStringEqual>
ArangoDBConstDistancePathFinder;
namespace arangodb {

View File

@ -109,7 +109,7 @@ static inline void StoreNumber(uint8_t* dest, T value, uint32_t length) {
/// marker type
////////////////////////////////////////////////////////////////////////////////
static inline size_t VPackOffset(TRI_df_marker_type_t type) throw() {
static inline size_t VPackOffset(TRI_df_marker_type_t type) noexcept {
if (type == TRI_DF_MARKER_VPACK_DOCUMENT ||
type == TRI_DF_MARKER_VPACK_REMOVE) {
// VPack is located after transaction id
@ -146,7 +146,7 @@ static inline size_t VPackOffset(TRI_df_marker_type_t type) throw() {
/// @brief returns the marker-specific database id offset
////////////////////////////////////////////////////////////////////////////////
static inline size_t DatabaseIdOffset(TRI_df_marker_type_t type) throw() {
static inline size_t DatabaseIdOffset(TRI_df_marker_type_t type) noexcept {
if (type == TRI_DF_MARKER_PROLOGUE ||
type == TRI_DF_MARKER_VPACK_CREATE_COLLECTION ||
type == TRI_DF_MARKER_VPACK_DROP_COLLECTION ||
@ -168,7 +168,7 @@ static inline size_t DatabaseIdOffset(TRI_df_marker_type_t type) throw() {
/// @brief returns the marker-specific database id
////////////////////////////////////////////////////////////////////////////////
static inline TRI_voc_tick_t DatabaseId(TRI_df_marker_t const* marker) throw() {
static inline TRI_voc_tick_t DatabaseId(TRI_df_marker_t const* marker) noexcept {
TRI_df_marker_type_t type = marker->getType();
if (type == TRI_DF_MARKER_PROLOGUE ||
type == TRI_DF_MARKER_VPACK_CREATE_COLLECTION ||
@ -191,7 +191,7 @@ static inline TRI_voc_tick_t DatabaseId(TRI_df_marker_t const* marker) throw() {
/// @brief returns the marker-specific collection id offset
////////////////////////////////////////////////////////////////////////////////
static inline size_t CollectionIdOffset(TRI_df_marker_type_t type) throw() {
static inline size_t CollectionIdOffset(TRI_df_marker_type_t type) noexcept {
if (type == TRI_DF_MARKER_PROLOGUE ||
type == TRI_DF_MARKER_VPACK_CREATE_COLLECTION ||
type == TRI_DF_MARKER_VPACK_DROP_COLLECTION ||
@ -208,7 +208,7 @@ static inline size_t CollectionIdOffset(TRI_df_marker_type_t type) throw() {
/// @brief returns the marker-specific collection id
////////////////////////////////////////////////////////////////////////////////
static inline TRI_voc_tick_t CollectionId(TRI_df_marker_t const* marker) throw() {
static inline TRI_voc_tick_t CollectionId(TRI_df_marker_t const* marker) noexcept {
TRI_df_marker_type_t type = marker->getType();
if (type == TRI_DF_MARKER_PROLOGUE ||
type == TRI_DF_MARKER_VPACK_CREATE_COLLECTION ||
@ -226,7 +226,7 @@ static inline TRI_voc_tick_t CollectionId(TRI_df_marker_t const* marker) throw()
/// @brief returns the marker-specific transaction id offset
////////////////////////////////////////////////////////////////////////////////
static inline TRI_voc_tick_t TransactionIdOffset(TRI_df_marker_type_t type) throw() {
static inline TRI_voc_tick_t TransactionIdOffset(TRI_df_marker_type_t type) noexcept {
if (type == TRI_DF_MARKER_VPACK_DOCUMENT ||
type == TRI_DF_MARKER_VPACK_REMOVE) {
return sizeof(TRI_df_marker_t);
@ -243,7 +243,7 @@ static inline TRI_voc_tick_t TransactionIdOffset(TRI_df_marker_type_t type) thro
/// @brief returns the marker-specific transaction id
////////////////////////////////////////////////////////////////////////////////
static inline TRI_voc_tick_t TransactionId(TRI_df_marker_t const* marker) throw() {
static inline TRI_voc_tick_t TransactionId(TRI_df_marker_t const* marker) noexcept {
TRI_df_marker_type_t type = marker->getType();
if (type == TRI_DF_MARKER_VPACK_DOCUMENT ||
type == TRI_DF_MARKER_VPACK_REMOVE ||

View File

@ -106,7 +106,7 @@ struct TRI_doc_mptr_t {
inline void setVPack(void const* value) { _dataptr = value; }
// return a pointer to the beginning of the Vpack
inline uint8_t const* vpack() const throw() {
inline uint8_t const* vpack() const noexcept {
TRI_ASSERT(_dataptr != nullptr);
return reinterpret_cast<uint8_t const*>(_dataptr);
}
@ -117,7 +117,7 @@ struct TRI_doc_mptr_t {
}
// return a pointer to the beginning of the Vpack
inline void const* dataptr() const throw() {
inline void const* dataptr() const noexcept {
return _dataptr;
}

View File

@ -249,37 +249,37 @@ struct TRI_df_marker_t {
TRI_df_marker_t() : _size(0), _crc(0), _typeAndTick(0) {}
~TRI_df_marker_t() {}
inline off_t offsetOfSize() const throw() {
inline off_t offsetOfSize() const noexcept {
return offsetof(TRI_df_marker_t, _size);
}
inline off_t offsetOfCrc() const throw() {
inline off_t offsetOfCrc() const noexcept {
return offsetof(TRI_df_marker_t, _crc);
}
inline off_t offsetOfTypeAndTick() const throw() {
inline off_t offsetOfTypeAndTick() const noexcept {
return offsetof(TRI_df_marker_t, _typeAndTick);
}
inline TRI_voc_size_t getSize() const throw() { return _size; }
inline void setSize(TRI_voc_size_t size) throw() { _size = size; }
inline TRI_voc_size_t getSize() const noexcept { return _size; }
inline void setSize(TRI_voc_size_t size) noexcept { _size = size; }
inline TRI_voc_crc_t getCrc() const throw() { return _crc; }
inline void setCrc(TRI_voc_crc_t crc) throw() { _crc = crc; }
inline TRI_voc_crc_t getCrc() const noexcept { return _crc; }
inline void setCrc(TRI_voc_crc_t crc) noexcept { _crc = crc; }
inline TRI_voc_tick_t getTick() const throw() {
inline TRI_voc_tick_t getTick() const noexcept {
return static_cast<TRI_voc_tick_t>(_typeAndTick & 0x00ffffffffffffffULL);
}
inline void setTick(TRI_voc_tick_t tick) throw() {
inline void setTick(TRI_voc_tick_t tick) noexcept {
_typeAndTick &= 0xff00000000000000ULL;
_typeAndTick |= tick;
}
inline TRI_df_marker_type_t getType() const throw() {
inline TRI_df_marker_type_t getType() const noexcept {
return static_cast<TRI_df_marker_type_t>((_typeAndTick & 0xff00000000000000ULL) >> 56);
}
inline void setType(TRI_df_marker_type_t type) throw() {
inline void setType(TRI_df_marker_type_t type) noexcept {
uint64_t t = static_cast<uint64_t>(type) << 56;
_typeAndTick &= 0x00ffffffffffffffULL;
_typeAndTick |= t;
}
inline void setTypeAndTick(TRI_df_marker_type_t type, TRI_voc_tick_t tick) throw() {
inline void setTypeAndTick(TRI_df_marker_type_t type, TRI_voc_tick_t tick) noexcept {
uint64_t t = static_cast<uint64_t>(type) << 56;
t |= (tick & 0x00ffffffffffffffULL);
_typeAndTick = t;

View File

@ -787,7 +787,7 @@ static int OpenIteratorHandleDocumentMarker(TRI_df_marker_t const* marker,
VPackSlice const slice(reinterpret_cast<char const*>(marker) + DatafileHelper::VPackOffset(TRI_DF_MARKER_VPACK_DOCUMENT));
VPackSlice const keySlice = Transaction::extractKeyFromDocument(slice);
TRI_voc_rid_t const rid = std::stoull(slice.get(StaticStrings::RevString).copyString());
TRI_voc_rid_t const rid = StringUtils::uint64(slice.get(StaticStrings::RevString).copyString());
SetRevision(document, rid, false);
document->_keyGenerator->track(keySlice.copyString());
@ -890,7 +890,7 @@ static int OpenIteratorHandleDeletionMarker(TRI_df_marker_t const* marker,
VPackSlice const slice(reinterpret_cast<char const*>(marker) + DatafileHelper::VPackOffset(TRI_DF_MARKER_VPACK_REMOVE));
VPackSlice const keySlice = Transaction::extractKeyFromDocument(slice);
TRI_voc_rid_t const rid = std::stoull(slice.get(StaticStrings::RevString).copyString());
TRI_voc_rid_t const rid = StringUtils::uint64(slice.get(StaticStrings::RevString).copyString());
document->setLastRevision(rid, false);
document->_keyGenerator->track(keySlice.copyString());
@ -3320,7 +3320,7 @@ int TRI_document_collection_t::insert(Transaction* trx, VPackSlice const slice,
TRI_ASSERT(slice.isObject());
// we can get away with the fast hash function here, as key values are
// restricted to strings
hash = Transaction::extractKeyFromDocument(slice).hash();
hash = Transaction::extractKeyFromDocument(slice).hashString();
newSlice = slice;
}
@ -4190,7 +4190,7 @@ int TRI_document_collection_t::newObjectForInsert(
}
// we can get away with the fast hash function here, as key values are
// restricted to strings
hash = s.hash();
hash = s.hashString();
// _from and _to
if (isEdgeCollection) {

View File

@ -1726,6 +1726,10 @@ if (typeof SYS_OPTIONS !== 'undefined') {
delete global.SYS_OPTIONS;
}
exports.propertyKeys = (obj) => Object.keys(obj).filter((key) => {
return (key.charAt(0) !== '_' && key.charAt(0) !== '$');
});
////////////////////////////////////////////////////////////////////////////////
/// @brief print

View File

@ -38,26 +38,12 @@
Object.defineProperty(Object.prototype, '_shallowCopy', {
get() {
var self = this;
return this.propertyKeys.reduce(function (previous, key) {
previous[key] = self[key];
return require('internal').propertyKeys(this).reduce((previous, key) => {
previous[key] = this[key];
return previous;
}, {});
}
});
////////////////////////////////////////////////////////////////////////////////
/// @brief returns the property keys
////////////////////////////////////////////////////////////////////////////////
Object.defineProperty(Object.prototype, 'propertyKeys', {
get() {
return Object.keys(this).filter(function (key) {
return (key.charAt(0) !== '_' && key.charAt(0) !== '$');
});
}
});
}());

View File

@ -28,6 +28,7 @@
////////////////////////////////////////////////////////////////////////////////
var is = require("@arangodb/is"),
internal = require("internal"),
Edge,
Graph,
Vertex,
@ -244,7 +245,7 @@ Edge.prototype.getProperty = function (name) {
////////////////////////////////////////////////////////////////////////////////
Edge.prototype.getPropertyKeys = function () {
return this._properties.propertyKeys;
return internal.propertyKeys(this._properties);
};
////////////////////////////////////////////////////////////////////////////////
@ -431,7 +432,7 @@ Vertex.prototype.getProperty = function (name) {
////////////////////////////////////////////////////////////////////////////////
Vertex.prototype.getPropertyKeys = function () {
return this._properties.propertyKeys;
return internal.propertyKeys(this._properties);
};
////////////////////////////////////////////////////////////////////////////////

View File

@ -93,8 +93,8 @@ function RunCommandLineTests(options) {
console.log(`jslint: ${file} passed`);
}
} catch (err) {
console.log(`cannot run test file "${file}": ${err}`);
console.log(err.stack);
console.error(`cannot run test file "${file}": ${err}`);
console.error(err.stack);
result = false;
}
}

View File

@ -1,3 +0,0 @@
{
"missing required fields": "name and version"
}

View File

@ -85,13 +85,6 @@ describe('Foxx Manager install', function() {
.with.property('errorNum', errors.ERROR_MALFORMED_MANIFEST_FILE.code);
});
it('with incomplete manifest', function() {
expect(function () {
FoxxManager.install(fs.join(basePath, 'incomplete-manifest'), '/unittest/broken');
}).to.throw(ArangoError)
.with.property('errorNum', errors.ERROR_INVALID_APPLICATION_MANIFEST.code);
});
it('with malformed name', function() {
expect(function () {
FoxxManager.install(fs.join(basePath, 'malformed-name'), '/unittest/broken');

View File

@ -487,7 +487,7 @@ var JSHINT = (function() {
.substr(1, m.length - 2)
.replace("\\\"", "\"");
}
if (m === '_shallowCopy') return;
membersOnly[m] = false;
});
}
@ -1882,6 +1882,7 @@ var JSHINT = (function() {
function countMember(m) {
if (m === '_shallowCopy') return;
if (membersOnly && typeof membersOnly[m] !== "boolean") {
warning("W036", state.tokens.curr, m);
}

View File

@ -354,7 +354,7 @@ function getLocalCollections () {
/// @brief create databases if they exist in the plan but not locally
////////////////////////////////////////////////////////////////////////////////
function createLocalDatabases (plannedDatabases, writeLocked) {
function createLocalDatabases (plannedDatabases, currentDatabases, writeLocked) {
var ourselves = global.ArangoServerState.id();
var createDatabaseAgency = function (payload) {
global.ArangoAgency.set("Current/Databases/" + payload.name + "/" + ourselves,
@ -374,7 +374,7 @@ function createLocalDatabases (plannedDatabases, writeLocked) {
payload.error = false;
payload.errorNum = 0;
payload.errorMessage = "no error";
if (! localDatabases.hasOwnProperty(name)) {
// must create database
@ -396,6 +396,12 @@ function createLocalDatabases (plannedDatabases, writeLocked) {
writeLocked({ part: "Current" },
createDatabaseAgency,
[ payload ]);
} else if (typeof currentDatabases[name] !== 'object' || !currentDatabases[name].hasOwnProperty(ourselves)) {
// mop: ok during cluster startup we have this buggy situation where a dbserver
// has a database but has not yet announced it to the agency :S
writeLocked({ part: "Current" },
createDatabaseAgency,
[ payload ]);
}
}
}
@ -506,8 +512,9 @@ function cleanupCurrentDatabases (writeLocked) {
function handleDatabaseChanges (plan, current, writeLocked) {
var plannedDatabases = getByPrefix(plan, "Plan/Databases/");
var currentDatabases = getByPrefix(plan, "Current/Databases/");
createLocalDatabases(plannedDatabases, writeLocked);
createLocalDatabases(plannedDatabases, currentDatabases, writeLocked);
dropLocalDatabases(plannedDatabases, writeLocked);
cleanupCurrentDatabases(writeLocked);
}

View File

@ -94,7 +94,7 @@ const manifestSchema = {
),
lib: joi.string().default('.'),
main: joi.string().default('index.js'),
main: joi.string().optional(),
configuration: (
joi.object().optional()
@ -1061,7 +1061,9 @@ function _install(serviceInfo, mount, options, runSetup) {
},
action() {
var definition = collection.firstExample({mount: mount});
collection.remove(definition._key);
if (definition !== null) {
collection.remove(definition._key);
}
}
});
}

View File

@ -187,9 +187,9 @@ module.exports = class FoxxService {
basePath: this.main.context.baseUrl,
paths: paths,
info: {
title: this.name,
title: this.manifest.name,
description: this.manifest.description,
version: this.version,
version: this.manifest.version,
license: {
name: this.manifest.license
}

View File

@ -803,55 +803,6 @@ int TRI_AppendString2StringBuffer(TRI_string_buffer_t* self, char const* str,
return AppendString(self, str, len);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief appends characters but url-encode the string
////////////////////////////////////////////////////////////////////////////////
int TRI_AppendUrlEncodedStringStringBuffer(TRI_string_buffer_t* self,
char const* src) {
static char hexChars[16] = {'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
size_t len = strlen(src);
int res = Reserve(self, len * 3);
if (res != TRI_ERROR_NO_ERROR) {
return res;
}
char const* end = src + len;
for (; src < end; ++src) {
if ('0' <= *src && *src <= '9') {
AppendChar(self, *src);
}
else if ('a' <= *src && *src <= 'z') {
AppendChar(self, *src);
}
else if ('A' <= *src && *src <= 'Z') {
AppendChar(self, *src);
}
else if (*src == '-' || *src == '_' || *src == '.' || *src == '~') {
AppendChar(self, *src);
}
else {
uint8_t n = (uint8_t)(*src);
uint8_t n1 = n >> 4;
uint8_t n2 = n & 0x0F;
AppendChar(self, '%');
AppendChar(self, hexChars[n1]);
AppendChar(self, hexChars[n2]);
}
}
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief appends characters but json-encode the null-terminated string
////////////////////////////////////////////////////////////////////////////////

View File

@ -205,11 +205,24 @@ int TRI_AppendString2StringBuffer(TRI_string_buffer_t* self, char const* str,
size_t len);
////////////////////////////////////////////////////////////////////////////////
/// @brief appends characters but url-encode the string
/// @brief appends characters but does not check buffer bounds
////////////////////////////////////////////////////////////////////////////////
int TRI_AppendUrlEncodedStringStringBuffer(TRI_string_buffer_t* self,
char const* str);
static inline void TRI_AppendCharUnsafeStringBuffer(TRI_string_buffer_t* self, char chr) {
*self->_current++ = chr;
}
static inline void TRI_AppendStringUnsafeStringBuffer(TRI_string_buffer_t* self, char const* str) {
size_t len = strlen(str);
memcpy(self->_current, str, len);
self->_current += len;
}
static inline void TRI_AppendStringUnsafeStringBuffer(TRI_string_buffer_t* self, char const* str,
size_t len) {
memcpy(self->_current, str, len);
self->_current += len;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief appends characters but json-encode the null-terminated string
@ -764,7 +777,7 @@ class StringBuffer {
TRI_AppendCharStringBuffer(&_buffer, chr);
return *this;
}
//////////////////////////////////////////////////////////////////////////////
/// @brief appends as json-encoded
//////////////////////////////////////////////////////////////////////////////

View File

@ -1217,7 +1217,7 @@ class PathEnumerator {
}
};
template <typename VertexId, typename EdgeId>
template <typename VertexId, typename EdgeId, typename HashFuncType, typename EqualFuncType>
class ConstDistanceFinder {
public:
//////////////////////////////////////////////////////////////////////////////
@ -1251,10 +1251,10 @@ class ConstDistanceFinder {
PathSnippet(VertexId& pred, EdgeId& path) : _pred(pred), _path(path) {}
};
std::unordered_map<VertexId, PathSnippet*> _leftFound;
std::unordered_map<VertexId, PathSnippet*, HashFuncType, EqualFuncType> _leftFound;
std::deque<VertexId> _leftClosure;
std::unordered_map<VertexId, PathSnippet*> _rightFound;
std::unordered_map<VertexId, PathSnippet*, HashFuncType, EqualFuncType> _rightFound;
std::deque<VertexId> _rightClosure;
ExpanderFunction _leftNeighborExpander;
@ -1300,12 +1300,13 @@ class ConstDistanceFinder {
_leftNeighborExpander(v, edges, neighbors);
TRI_ASSERT(edges.size() == neighbors.size());
for (size_t i = 0; i < neighbors.size(); ++i) {
VertexId n = neighbors.at(i);
VertexId const n = neighbors.at(i);
if (_leftFound.find(n) == _leftFound.end()) {
_leftFound.emplace(n, new PathSnippet(v, edges.at(i)));
if (_rightFound.find(n) != _rightFound.end()) {
auto leftFoundIt = _leftFound.emplace(n, new PathSnippet(v, edges.at(i))).first;
auto rightFoundIt = _rightFound.find(n);
if (rightFoundIt != _rightFound.end()) {
res->vertices.emplace_back(n);
auto it = _leftFound.find(n);
auto it = leftFoundIt;
VertexId next;
while (it->second != nullptr) {
next = it->second->_pred;
@ -1313,7 +1314,7 @@ class ConstDistanceFinder {
res->edges.push_front(it->second->_path);
it = _leftFound.find(next);
}
it = _rightFound.find(n);
it = rightFoundIt;
while (it->second != nullptr) {
next = it->second->_pred;
res->vertices.emplace_back(next);
@ -1336,12 +1337,13 @@ class ConstDistanceFinder {
_rightNeighborExpander(v, edges, neighbors);
TRI_ASSERT(edges.size() == neighbors.size());
for (size_t i = 0; i < neighbors.size(); ++i) {
VertexId n = neighbors.at(i);
VertexId const n = neighbors.at(i);
if (_rightFound.find(n) == _rightFound.end()) {
_rightFound.emplace(n, new PathSnippet(v, edges.at(i)));
if (_leftFound.find(n) != _leftFound.end()) {
auto rightFoundIt = _rightFound.emplace(n, new PathSnippet(v, edges.at(i))).first;
auto leftFoundIt = _leftFound.find(n);
if (leftFoundIt != _leftFound.end()) {
res->vertices.emplace_back(n);
auto it = _leftFound.find(n);
auto it = leftFoundIt;
VertexId next;
while (it->second != nullptr) {
next = it->second->_pred;
@ -1349,7 +1351,7 @@ class ConstDistanceFinder {
res->edges.push_front(it->second->_path);
it = _leftFound.find(next);
}
it = _rightFound.find(n);
it = rightFoundIt;
while (it->second != nullptr) {
next = it->second->_pred;
res->vertices.emplace_back(next);

View File

@ -25,12 +25,6 @@
#include "Basics/Exceptions.h"
#include "Basics/fpconv.h"
#include "Logger/Logger.h"
//#include "Basics/StringUtils.h"
//#include "Basics/Utf8Helper.h"
//#include "Basics/VPackStringBufferAdapter.h"
//#include "Basics/files.h"
//#include "Basics/hashes.h"
//#include "Basics/tri-strings.h"
#include <velocypack/velocypack-common.h>
#include <velocypack/AttributeTranslator.h>
@ -42,11 +36,13 @@
using namespace arangodb::basics;
void VelocyPackDumper::handleUnsupportedType(VPackSlice const* slice) {
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
if (options->unsupportedTypeBehavior == VPackOptions::NullifyUnsupportedType) {
_buffer->appendText("null", 4);
TRI_AppendStringUnsafeStringBuffer(buffer, "null", 4);
return;
} else if (options->unsupportedTypeBehavior == VPackOptions::ConvertUnsupportedType) {
_buffer->appendText(std::string("\"(non-representable type ") + slice->typeName() + ")\"");
TRI_AppendStringUnsafeStringBuffer(buffer, "\"(non-representable type)\"");
return;
}
@ -54,71 +50,88 @@ void VelocyPackDumper::handleUnsupportedType(VPackSlice const* slice) {
}
void VelocyPackDumper::appendUInt(uint64_t v) {
if (10000000000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 10000000000000000000ULL) % 10);
}
if (1000000000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 1000000000000000000ULL) % 10);
}
if (100000000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 100000000000000000ULL) % 10);
}
if (10000000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 10000000000000000ULL) % 10);
}
if (1000000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 1000000000000000ULL) % 10);
}
if (100000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 100000000000000ULL) % 10);
}
if (10000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 10000000000000ULL) % 10);
}
if (1000000000000ULL <= v) {
_buffer->appendChar('0' + (v / 1000000000000ULL) % 10);
}
if (100000000000ULL <= v) {
_buffer->appendChar('0' + (v / 100000000000ULL) % 10);
}
if (10000000000ULL <= v) {
_buffer->appendChar('0' + (v / 10000000000ULL) % 10);
}
if (1000000000ULL <= v) {
_buffer->appendChar('0' + (v / 1000000000ULL) % 10);
}
if (100000000ULL <= v) {
_buffer->appendChar('0' + (v / 100000000ULL) % 10);
}
if (10000000ULL <= v) {
_buffer->appendChar('0' + (v / 10000000ULL) % 10);
}
if (1000000ULL <= v) {
_buffer->appendChar('0' + (v / 1000000ULL) % 10);
}
if (100000ULL <= v) {
_buffer->appendChar('0' + (v / 100000ULL) % 10);
}
if (10000ULL <= v) {
_buffer->appendChar('0' + (v / 10000ULL) % 10);
}
if (1000ULL <= v) {
_buffer->appendChar('0' + (v / 1000ULL) % 10);
}
if (100ULL <= v) {
_buffer->appendChar('0' + (v / 100ULL) % 10);
}
if (10ULL <= v) {
_buffer->appendChar('0' + (v / 10ULL) % 10);
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
int res = TRI_ReserveStringBuffer(buffer, 21);
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
_buffer->appendChar('0' + (v % 10));
if (10000000000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000000000000ULL) % 10);
}
if (1000000000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000000000000ULL) % 10);
}
if (100000000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000000000000ULL) % 10);
}
if (10000000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000000000ULL) % 10);
}
if (1000000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000000000ULL) % 10);
}
if (100000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000000000ULL) % 10);
}
if (10000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000000ULL) % 10);
}
if (1000000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000000ULL) % 10);
}
if (100000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000000ULL) % 10);
}
if (10000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000ULL) % 10);
}
if (1000000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000ULL) % 10);
}
if (100000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000ULL) % 10);
}
if (10000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000ULL) % 10);
}
if (1000000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000ULL) % 10);
}
if (100000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000ULL) % 10);
}
if (10000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000ULL) % 10);
}
if (1000ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000ULL) % 10);
}
if (100ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100ULL) % 10);
}
if (10ULL <= v) {
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10ULL) % 10);
}
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v % 10));
}
void VelocyPackDumper::appendDouble(double v) {
char temp[24];
int len = fpconv_dtoa(v, &temp[0]);
_buffer->appendText(&temp[0], static_cast<VPackValueLength>(len));
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
int res = TRI_ReserveStringBuffer(buffer, static_cast<size_t>(len));
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
TRI_AppendStringUnsafeStringBuffer(buffer, &temp[0], static_cast<size_t>(len));
}
void VelocyPackDumper::dumpInteger(VPackSlice const* slice) {
@ -127,79 +140,96 @@ void VelocyPackDumper::dumpInteger(VPackSlice const* slice) {
appendUInt(v);
} else if (slice->isType(VPackValueType::Int)) {
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
int res = TRI_ReserveStringBuffer(buffer, 21);
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
int64_t v = slice->getInt();
if (v == INT64_MIN) {
_buffer->appendText("-9223372036854775808", 20);
TRI_AppendStringUnsafeStringBuffer(buffer, "-9223372036854775808", 20);
return;
}
if (v < 0) {
_buffer->appendChar('-');
TRI_AppendCharUnsafeStringBuffer(buffer, '-');
v = -v;
}
if (1000000000000000000LL <= v) {
_buffer->appendChar('0' + (v / 1000000000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000000000000LL) % 10);
}
if (100000000000000000LL <= v) {
_buffer->appendChar('0' + (v / 100000000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000000000000LL) % 10);
}
if (10000000000000000LL <= v) {
_buffer->appendChar('0' + (v / 10000000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000000000LL) % 10);
}
if (1000000000000000LL <= v) {
_buffer->appendChar('0' + (v / 1000000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000000000LL) % 10);
}
if (100000000000000LL <= v) {
_buffer->appendChar('0' + (v / 100000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000000000LL) % 10);
}
if (10000000000000LL <= v) {
_buffer->appendChar('0' + (v / 10000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000000LL) % 10);
}
if (1000000000000LL <= v) {
_buffer->appendChar('0' + (v / 1000000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000000LL) % 10);
}
if (100000000000LL <= v) {
_buffer->appendChar('0' + (v / 100000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000000LL) % 10);
}
if (10000000000LL <= v) {
_buffer->appendChar('0' + (v / 10000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000000LL) % 10);
}
if (1000000000LL <= v) {
_buffer->appendChar('0' + (v / 1000000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000000LL) % 10);
}
if (100000000LL <= v) {
_buffer->appendChar('0' + (v / 100000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000000LL) % 10);
}
if (10000000LL <= v) {
_buffer->appendChar('0' + (v / 10000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000000LL) % 10);
}
if (1000000LL <= v) {
_buffer->appendChar('0' + (v / 1000000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000000LL) % 10);
}
if (100000LL <= v) {
_buffer->appendChar('0' + (v / 100000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100000LL) % 10);
}
if (10000LL <= v) {
_buffer->appendChar('0' + (v / 10000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10000LL) % 10);
}
if (1000LL <= v) {
_buffer->appendChar('0' + (v / 1000LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 1000LL) % 10);
}
if (100LL <= v) {
_buffer->appendChar('0' + (v / 100LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 100LL) % 10);
}
if (10LL <= v) {
_buffer->appendChar('0' + (v / 10LL) % 10);
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v / 10LL) % 10);
}
_buffer->appendChar('0' + (v % 10));
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + (v % 10));
} else if (slice->isType(VPackValueType::SmallInt)) {
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
int res = TRI_ReserveStringBuffer(buffer, 21);
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
int64_t v = slice->getSmallInt();
if (v < 0) {
_buffer->appendChar('-');
TRI_AppendCharUnsafeStringBuffer(buffer, '-');
v = -v;
}
_buffer->appendChar('0' + static_cast<char>(v));
TRI_AppendCharUnsafeStringBuffer(buffer, '0' + static_cast<char>(v));
}
}
@ -237,13 +267,31 @@ void VelocyPackDumper::dumpString(char const* src, VPackValueLength len) {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0};
_buffer->reserve(len);
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
uint8_t const* p = reinterpret_cast<uint8_t const*>(src);
uint8_t const* e = p + len;
static size_t const maxCount = 16;
size_t count = maxCount;
while (p < e) {
uint8_t c = *p;
// don't check for buffer reallocation on every single character
if (count == maxCount) {
// maximum value that we can append in one go is 6 bytes
int res = TRI_ReserveStringBuffer(buffer, 6 * count);
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
count = 0;
} else {
++count;
}
if ((c & 0x80) == 0) {
// check for control characters
char esc = EscapeTable[c];
@ -251,22 +299,22 @@ void VelocyPackDumper::dumpString(char const* src, VPackValueLength len) {
if (esc) {
if (c != '/' || options->escapeForwardSlashes) {
// escape forward slashes only when requested
_buffer->appendChar('\\');
TRI_AppendCharUnsafeStringBuffer(buffer, '\\');
}
_buffer->appendChar(static_cast<char>(esc));
TRI_AppendCharUnsafeStringBuffer(buffer, static_cast<char>(esc));
if (esc == 'u') {
uint16_t i1 = (((uint16_t)c) & 0xf0) >> 4;
uint16_t i2 = (((uint16_t)c) & 0x0f);
_buffer->appendText("00", 2);
_buffer->appendChar(
TRI_AppendStringUnsafeStringBuffer(buffer, "00", 2);
TRI_AppendCharUnsafeStringBuffer(buffer,
static_cast<char>((i1 < 10) ? ('0' + i1) : ('A' + i1 - 10)));
_buffer->appendChar(
TRI_AppendCharUnsafeStringBuffer(buffer,
static_cast<char>((i2 < 10) ? ('0' + i2) : ('A' + i2 - 10)));
}
} else {
_buffer->appendChar(static_cast<char>(c));
TRI_AppendCharUnsafeStringBuffer(buffer, static_cast<char>(c));
}
} else if ((c & 0xe0) == 0xc0) {
// two-byte sequence
@ -274,7 +322,7 @@ void VelocyPackDumper::dumpString(char const* src, VPackValueLength len) {
throw VPackException(VPackException::InvalidUtf8Sequence);
}
_buffer->appendText(reinterpret_cast<char const*>(p), 2);
TRI_AppendStringUnsafeStringBuffer(buffer, reinterpret_cast<char const*>(p), 2);
++p;
} else if ((c & 0xf0) == 0xe0) {
// three-byte sequence
@ -282,7 +330,7 @@ void VelocyPackDumper::dumpString(char const* src, VPackValueLength len) {
throw VPackException(VPackException::InvalidUtf8Sequence);
}
_buffer->appendText(reinterpret_cast<char const*>(p), 3);
TRI_AppendStringUnsafeStringBuffer(buffer, reinterpret_cast<char const*>(p), 3);
p += 2;
} else if ((c & 0xf8) == 0xf0) {
// four-byte sequence
@ -290,7 +338,7 @@ void VelocyPackDumper::dumpString(char const* src, VPackValueLength len) {
throw VPackException(VPackException::InvalidUtf8Sequence);
}
_buffer->appendText(reinterpret_cast<char const*>(p), 4);
TRI_AppendStringUnsafeStringBuffer(buffer, reinterpret_cast<char const*>(p), 4);
p += 3;
}
@ -302,49 +350,68 @@ void VelocyPackDumper::dumpValue(VPackSlice const* slice, VPackSlice const* base
if (base == nullptr) {
base = slice;
}
TRI_string_buffer_t* buffer = _buffer->stringBuffer();
// alloc at least 16 bytes
int res = TRI_ReserveStringBuffer(buffer, 16);
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
switch (slice->type()) {
case VPackValueType::Null: {
_buffer->appendText("null", 4);
TRI_AppendStringUnsafeStringBuffer(buffer, "null", 4);
break;
}
case VPackValueType::Bool: {
if (slice->getBool()) {
_buffer->appendText("true", 4);
TRI_AppendStringUnsafeStringBuffer(buffer, "true", 4);
} else {
_buffer->appendText("false", 5);
TRI_AppendStringUnsafeStringBuffer(buffer, "false", 5);
}
break;
}
case VPackValueType::Array: {
VPackArrayIterator it(*slice, true);
_buffer->appendChar('[');
TRI_AppendCharUnsafeStringBuffer(buffer, '[');
while (it.valid()) {
if (!it.isFirst()) {
_buffer->appendChar(',');
if (TRI_AppendCharStringBuffer(buffer, ',') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
}
dumpValue(it.value(), slice);
it.next();
}
_buffer->appendChar(']');
if (TRI_AppendCharStringBuffer(buffer, ']') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
break;
}
case VPackValueType::Object: {
VPackObjectIterator it(*slice);
_buffer->appendChar('{');
VPackObjectIterator it(*slice, true);
TRI_AppendCharUnsafeStringBuffer(buffer, '{');
while (it.valid()) {
if (!it.isFirst()) {
_buffer->appendChar(',');
if (TRI_AppendCharStringBuffer(buffer, ',') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
}
dumpValue(it.key().makeKey(), slice);
_buffer->appendChar(':');
if (TRI_AppendCharStringBuffer(buffer, ':') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
dumpValue(it.value(), slice);
it.next();
}
_buffer->appendChar('}');
if (TRI_AppendCharStringBuffer(buffer, '}') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
break;
}
@ -368,10 +435,11 @@ void VelocyPackDumper::dumpValue(VPackSlice const* slice, VPackSlice const* base
case VPackValueType::String: {
VPackValueLength len;
char const* p = slice->getString(len);
_buffer->reserve(2 + len);
_buffer->appendChar('"');
TRI_AppendCharUnsafeStringBuffer(buffer, '"');
dumpString(p, len);
_buffer->appendChar('"');
if (TRI_AppendCharStringBuffer(buffer, '"') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
break;
}
@ -385,8 +453,12 @@ void VelocyPackDumper::dumpValue(VPackSlice const* slice, VPackSlice const* base
if (options->customTypeHandler == nullptr) {
throw VPackException(VPackException::NeedCustomTypeHandler);
} else {
TRI_AppendCharUnsafeStringBuffer(buffer, '"');
std::string v = options->customTypeHandler->toString(*slice, nullptr, *base);
dumpString(v.c_str(), v.size());
if (TRI_AppendCharStringBuffer(buffer, '"') != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
}
}
break;
}

View File

@ -51,35 +51,24 @@ class VelocyPackDumper {
}
~VelocyPackDumper() = default;
void dump(velocypack::Slice const& slice) {
_buffer->reserve(slice.byteSize());
dumpValue(&slice);
void dumpValue(velocypack::Slice const* slice, velocypack::Slice const* = nullptr);
inline void dumpValue(velocypack::Slice const& slice, velocypack::Slice const* base = nullptr) {
dumpValue(&slice, base);
}
void dump(velocypack::Slice const* slice) { dump(*slice); }
void append(velocypack::Slice const& slice) { dumpValue(&slice); }
void append(velocypack::Slice const* slice) { dumpValue(slice); }
private:
void appendUInt(uint64_t);
void appendDouble(double);
private:
void handleUnsupportedType(velocypack::Slice const* slice);
void dumpInteger(velocypack::Slice const*);
void dumpString(char const*, velocypack::ValueLength);
inline void dumpValue(velocypack::Slice const& slice, velocypack::Slice const* base = nullptr) {
dumpValue(&slice, base);
}
void dumpValue(velocypack::Slice const*, velocypack::Slice const* = nullptr);
public:
velocypack::Options const* options;

View File

@ -41,6 +41,10 @@
#include <velocypack/Slice.h>
#include <velocypack/velocypack-aliases.h>
extern "C" {
unsigned long long XXH64 (const void* input, size_t length, unsigned long long seed);
}
using VelocyPackHelper = arangodb::basics::VelocyPackHelper;
static std::unique_ptr<VPackAttributeTranslator> Translator;
@ -159,49 +163,34 @@ size_t VelocyPackHelper::VPackHash::operator()(VPackSlice const& slice) const {
return slice.normalizedHash();
};
size_t VelocyPackHelper::VPackStringHash::operator()(VPackSlice const& slice) const {
auto const h = slice.head();
VPackValueLength l;
if (h == 0xbf) {
// long UTF-8 String
l = static_cast<VPackValueLength>(
1 + 8 + velocypack::readInteger<VPackValueLength>(slice.begin() + 1, 8));
} else {
l = static_cast<VPackValueLength>(1 + h - 0x40);
}
return velocypack::fasthash64(slice.start(), velocypack::checkOverflow(l),
0xdeadbeef);
size_t VelocyPackHelper::VPackStringHash::operator()(VPackSlice const& slice) const noexcept {
return slice.hashString();
};
bool VelocyPackHelper::VPackEqual::operator()(VPackSlice const& lhs, VPackSlice const& rhs) const {
return VelocyPackHelper::compare(lhs, rhs, false) == 0;
};
bool VelocyPackHelper::VPackStringEqual::operator()(VPackSlice const& lhs, VPackSlice const& rhs) const {
bool VelocyPackHelper::VPackStringEqual::operator()(VPackSlice const& lhs, VPackSlice const& rhs) const noexcept {
auto const lh = lhs.head();
auto const rh = rhs.head();
if (lh != rh) {
return false;
}
VPackValueLength size;
if (lh == 0xbf) {
// long UTF-8 String
size = static_cast<VPackValueLength>(
1 + 8 + velocypack::readInteger<VPackValueLength>(lhs.begin() + 1, 8));
} else {
size = static_cast<VPackValueLength>(1 + lh - 0x40);
}
auto const rh = rhs.head();
if (rh == 0xbf) {
// long UTF-8 String
if (size !=static_cast<VPackValueLength>(
1 + 8 + velocypack::readInteger<VPackValueLength>(rhs.begin() + 1, 8))) {
size = static_cast<VPackValueLength>(velocypack::readInteger<VPackValueLength>(lhs.begin() + 1, 8));
if (size !=static_cast<VPackValueLength>(velocypack::readInteger<VPackValueLength>(rhs.begin() + 1, 8))) {
return false;
}
} else {
if (size != static_cast<VPackValueLength>(1 + rh - 0x40)) {
return false;
}
}
return (memcmp(lhs.start(), rhs.start(),
arangodb::velocypack::checkOverflow(size)) == 0);
return (memcmp(lhs.start() + 1 + 8, rhs.start() + 1 + 8, static_cast<size_t>(size)) == 0);
}
size = static_cast<VPackValueLength>(lh - 0x40);
return (memcmp(lhs.start() + 1, rhs.start() + 1, static_cast<size_t>(size)) == 0);
};
static int TypeWeight(VPackSlice const& slice) {

View File

@ -63,7 +63,7 @@ class VelocyPackHelper {
};
struct VPackStringHash {
size_t operator()(arangodb::velocypack::Slice const&) const;
size_t operator()(arangodb::velocypack::Slice const&) const noexcept;
};
////////////////////////////////////////////////////////////////////////////////
@ -77,7 +77,7 @@ class VelocyPackHelper {
struct VPackStringEqual {
bool operator()(arangodb::velocypack::Slice const&,
arangodb::velocypack::Slice const&) const;
arangodb::velocypack::Slice const&) const noexcept;
};
////////////////////////////////////////////////////////////////////////////////

View File

@ -849,129 +849,6 @@ int TRI_StringifyJson(TRI_string_buffer_t* buffer, TRI_json_t const* object) {
return StringifyJson(buffer->_memoryZone, buffer, object, true);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief prints a json object
////////////////////////////////////////////////////////////////////////////////
bool TRI_PrintJson(int fd, TRI_json_t const* object, bool appendNewline) {
if (object == nullptr) {
// sanity check
return false;
}
TRI_string_buffer_t buffer;
TRI_InitStringBuffer(&buffer, TRI_UNKNOWN_MEM_ZONE);
if (StringifyJson(buffer._memoryZone, &buffer, object, true) !=
TRI_ERROR_NO_ERROR) {
TRI_AnnihilateStringBuffer(&buffer);
return false;
}
if (TRI_LengthStringBuffer(&buffer) == 0) {
// should not happen
return false;
}
if (appendNewline) {
// add the newline here so we only need one write operation in the ideal
// case
TRI_AppendCharStringBuffer(&buffer, '\n');
}
char const* p = TRI_BeginStringBuffer(&buffer);
size_t n = TRI_LengthStringBuffer(&buffer);
while (0 < n) {
ssize_t m = TRI_WRITE(fd, p, (TRI_write_t)n);
if (m <= 0) {
TRI_AnnihilateStringBuffer(&buffer);
return false;
}
n -= m;
p += m;
}
TRI_AnnihilateStringBuffer(&buffer);
return true;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief saves a json object
////////////////////////////////////////////////////////////////////////////////
bool TRI_SaveJson(char const* filename, TRI_json_t const* object,
bool syncFile) {
char* tmp = TRI_Concatenate2String(filename, ".tmp");
if (tmp == nullptr) {
return false;
}
// remove a potentially existing temporary file
if (TRI_ExistsFile(tmp)) {
TRI_UnlinkFile(tmp);
}
int fd = TRI_CREATE(tmp, O_CREAT | O_TRUNC | O_EXCL | O_RDWR | TRI_O_CLOEXEC,
S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP);
if (fd < 0) {
TRI_set_errno(TRI_ERROR_SYS_ERROR);
LOG(ERR) << "cannot create json file '" << tmp << "': " << TRI_LAST_ERROR_STR;
TRI_FreeString(TRI_CORE_MEM_ZONE, tmp);
return false;
}
if (!TRI_PrintJson(fd, object, true)) {
TRI_CLOSE(fd);
TRI_set_errno(TRI_ERROR_SYS_ERROR);
LOG(ERR) << "cannot write to json file '" << tmp << "': " << TRI_LAST_ERROR_STR;
TRI_UnlinkFile(tmp);
TRI_FreeString(TRI_CORE_MEM_ZONE, tmp);
return false;
}
if (syncFile) {
LOG(TRACE) << "syncing tmp file '" << tmp << "'";
if (!TRI_fsync(fd)) {
TRI_CLOSE(fd);
TRI_set_errno(TRI_ERROR_SYS_ERROR);
LOG(ERR) << "cannot sync saved json '" << tmp << "': " << TRI_LAST_ERROR_STR;
TRI_UnlinkFile(tmp);
TRI_FreeString(TRI_CORE_MEM_ZONE, tmp);
return false;
}
}
int res = TRI_CLOSE(fd);
if (res < 0) {
TRI_set_errno(TRI_ERROR_SYS_ERROR);
LOG(ERR) << "cannot close saved file '" << tmp << "': " << TRI_LAST_ERROR_STR;
TRI_UnlinkFile(tmp);
TRI_FreeString(TRI_CORE_MEM_ZONE, tmp);
return false;
}
res = TRI_RenameFile(tmp, filename);
if (res != TRI_ERROR_NO_ERROR) {
TRI_set_errno(res);
LOG(ERR) << "cannot rename saved file '" << tmp << "' to '" << filename << "': " << TRI_LAST_ERROR_STR;
TRI_UnlinkFile(tmp);
TRI_FreeString(TRI_CORE_MEM_ZONE, tmp);
return false;
}
TRI_FreeString(TRI_CORE_MEM_ZONE, tmp);
return true;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief copies a json object into a given buffer
////////////////////////////////////////////////////////////////////////////////

View File

@ -288,18 +288,6 @@ bool TRI_ReplaceObjectJson(TRI_memory_zone_t* zone, TRI_json_t* object,
int TRI_StringifyJson(struct TRI_string_buffer_t*, TRI_json_t const* object);
////////////////////////////////////////////////////////////////////////////////
/// @brief prints a json object
////////////////////////////////////////////////////////////////////////////////
bool TRI_PrintJson(int fd, TRI_json_t const*, bool);
////////////////////////////////////////////////////////////////////////////////
/// @brief saves a json object
////////////////////////////////////////////////////////////////////////////////
bool TRI_SaveJson(char const*, TRI_json_t const*, bool);
////////////////////////////////////////////////////////////////////////////////
/// @brief copies a json object into a given buffer
////////////////////////////////////////////////////////////////////////////////

View File

@ -19,7 +19,9 @@ const yaml = require("js-yaml");
const documentationSourceDirs = [
fs.join(fs.makeAbsolute(''), "Documentation/Examples/setup-arangosh.js"),
fs.join(fs.makeAbsolute(''), "Documentation/DocuBlocks"),
fs.join(fs.makeAbsolute(''), "Documentation/Books/Users")
fs.join(fs.makeAbsolute(''), "Documentation/Books/Users"),
fs.join(fs.makeAbsolute(''), "Documentation/Books/AQL"),
fs.join(fs.makeAbsolute(''), "Documentation/Books/HTTP")
];
const theScript = 'utils/generateExamples.py';