1
0
Fork 0

more doc, error handling when loading corrupted index

This commit is contained in:
Frank Celler 2012-03-09 13:56:28 +01:00
parent 1a7b0083eb
commit ffb61fe3c2
31 changed files with 2144 additions and 416 deletions

View File

@ -195,7 +195,7 @@ namespace triagens {
/// @brief escape unicode
///
/// This method escapes a unicode character string by replacing the unicode
/// characters by a \uXXXX sequence.
/// characters by a \\uXXXX sequence.
////////////////////////////////////////////////////////////////////////////////
string escapeUnicode (string const& name, bool escapeSlash = true);

View File

@ -130,7 +130,7 @@ Thread::~Thread () {
TRI_StopThread(&_thread);
}
TRI_DeatchThread(&_thread);
TRI_DetachThread(&_thread);
}
////////////////////////////////////////////////////////////////////////////////

View File

@ -173,7 +173,7 @@ char* TRI_EscapeCString (char const* in, size_t inLength, size_t* outLength);
/// @brief escapes special characters using unicode escapes
///
/// This method escapes an UTF-8 character string by replacing the unprintable
/// characters by a \uXXXX sequence. Set escapeSlash to true in order to also
/// characters by a \\uXXXX sequence. Set escapeSlash to true in order to also
/// escape the character '/'.
////////////////////////////////////////////////////////////////////////////////
@ -182,7 +182,7 @@ char* TRI_EscapeUtf8String (char const* in, size_t inLength, bool escapeSlash, s
////////////////////////////////////////////////////////////////////////////////
/// @brief unescapes unicode escape sequences
///
/// This method decodes a UTF-8 character string by replacing the \uXXXX
/// This method decodes a UTF-8 character string by replacing the \\uXXXX
/// sequence by unicode characters and representing them as UTF-8 sequences.
////////////////////////////////////////////////////////////////////////////////

View File

@ -158,12 +158,19 @@ bool TRI_StartThread (TRI_thread_t* thread, void (*starter)(void*), void* data)
int rc;
d = TRI_Allocate(sizeof(thread_data_t));
if (!d) {
TRI_set_errno(TRI_ERROR_SYS_ERROR);
LOG_ERROR("could not start thread: %s ", strerror(errno));
return false;
}
d->starter = starter;
d->_data = data;
rc = pthread_create(thread, 0, &ThreadStarter, d);
if (rc != 0) {
TRI_Free(d);
TRI_set_errno(TRI_ERROR_SYS_ERROR);
LOG_ERROR("could not start thread: %s ", strerror(errno));
return false;
@ -184,7 +191,7 @@ void TRI_StopThread (TRI_thread_t* thread) {
/// @brief detachs a thread
////////////////////////////////////////////////////////////////////////////////
void TRI_DeatchThread (TRI_thread_t* thread) {
void TRI_DetachThread (TRI_thread_t* thread) {
pthread_detach(*thread);
}

View File

@ -116,7 +116,7 @@ void TRI_StopThread (TRI_thread_t* thread);
/// @brief detachs a thread
////////////////////////////////////////////////////////////////////////////////
void TRI_DeatchThread (TRI_thread_t* thread);
void TRI_DetachThread (TRI_thread_t* thread);
////////////////////////////////////////////////////////////////////////////////
/// @brief waits for a thread to finish

View File

@ -0,0 +1,5 @@
avocado> db.examples.parameter();
{
"waitForSync" : false,
"journalSize" : 134217728
}

View File

@ -0,0 +1,5 @@
avocado> db.examples.parameter({ waitForSync : true });
{
"waitForSync" : true,
"journalSize" : 134217728
}

View File

@ -0,0 +1,20 @@
avocado> db.geo.ensureGeoIndex("loc");
162534834
avocado> for (i = -90; i <= 90; i += 10) {
.......> for (j = -180; j <= 180; j += 10) {
.......> db.geo.save({ name : "Name/" + i + "/" + j,
.......> loc: [ i, j ] });
.......> }
.......> }
avocado> db.geo.count();
703
avocado> db.geo.near(0,0).limit(3).toArray();
[ { "_id" : "154092:24861164", "_rev" : 24861164, "name" : "Name/0/0", "loc" : [0, 0]},
{ "_id" : "154092:24926700", "_rev" : 24926700, "name" : "Name/0/10", "loc" : [0, 10]},
{ "_id" : "154092:22436332", "_rev" : 22436332, "name" : "Name/-10/0", "loc" : [-10, 0]}]
avocado> db.geo.near(0,0).count();
100

View File

@ -0,0 +1,30 @@
avocado> db.geo2.ensureGeoIndex("location.latitude", "location.longitude");
23735273
avocado> for (i = -90; i <= 90; i += 10) {
.......> for (j = -180; j <= 180; j += 10) {
.......> db.geo2.save({ name : "Name/" + i + "/" + j,
.......> location: { latitude : i,
.......> longitude : j } });
.......> }
.......> }
avocado> db.geo2.near(0,0).limit(3).toArray();
[{
"_id" : "48126444:72964588",
"_rev" : 72964588,
"location" : { "latitude" : 0, "longitude" : 0},
"name" : "Name/0/0"
},
{
"_id" : "48126444:73030124",
"_rev" : 73030124,
"location" : { "latitude" : 0, "longitude" : 10},
"name" : "Name/0/10"
},
{
"_id" : "48126444:70539756",
"_rev" : 70539756,
"location" : { "latitude" : -10, "longitude" : 0},
"name" : "Name/-10/0"
}]

View File

@ -0,0 +1,2 @@
avocado> db.five.ensureHashIndex("a")
2170279

View File

@ -0,0 +1,14 @@
avocado> db.users.all().toArray();
[ { "_id" : "553063885:554702285", "_rev" : 554702285, "id" : 323, "name" : "Peter" },
{ "_id" : "553063885:554636749", "_rev" : 554636749, "id" : 535, "name" : "Peter" },
{ "_id" : "553063885:554833357", "_rev" : 554833357, "id" : 25, "name" : "Vladimir" } ]
avocado> db.users.select( {"id" : 323 } ).toArray();
[ { "id" : 323, "name" : "Peter", "_id" : "553063885:554702285" } ]
avocado> db.users.select( {"name" : "Peter" } ).toArray();
[ { "id" : 323, "name" : "Peter", "_id" : "553063885:554702285" },
{ "id" : 535, "name" : "Peter", "_id" : "553063885:554636749" } ]
avocado> db.users.select( {"name" : "Peter", "id" : 535 } ).toArray();
[ { "id" : 535, "name" : "Peter", "_id" : "553063885:554636749" } ]

View File

@ -0,0 +1,4 @@
avocado> var a = db.users.select( {"name" : "Peter" } );
avocado> while (a.hasNext()) print(a.next());
{ "id" : 323, "name" : "Peter", "_id" : "553063885:554702285" }
{ "id" : 535, "name" : "Peter", "_id" : "553063885:554636749" }

1450
Doxygen/Scripts/Markdown.pl Executable file

File diff suppressed because it is too large Load Diff

16
Doxygen/Scripts/md2html.sh Executable file
View File

@ -0,0 +1,16 @@
#!/bin/sh
MARKDOWN="`dirname $0`/Markdown.pl"
INPUT="$1"
if test -z "$INPUT"; then
echo "usage: $0 <file.md>"
exit 1
fi
OUTPUT="`dirname $INPUT`/`basename $INPUT .md`.html"
perl "$MARKDOWN" "$INPUT" \
| sed -r -e "s/href=\"([^\"#]+)([\"#])/href=\"\1\.html\2/g" \
| sed -e "s/href=\"wiki\//href=\"/g" \
| sed -e "s/#wiki-/#/g" > $OUTPUT

View File

@ -17,19 +17,19 @@ Doxygen/.setup-directories:
@touch $@
Doxygen/js/%.c: @srcdir@/js/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/js/system/%.c: @srcdir@/js/system/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/js/modules/%.c: @srcdir@/js/system/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/js/bootstrap/%.c: @srcdir@/js/bootstrap/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/xml/%.md: Doxygen/xml/%.xml
python @top_srcdir@/Doxygen/Scripts/xml2md.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/xml2md.py $< > $@
################################################################################
## Doxygen
@ -48,8 +48,8 @@ doxygen: Doxygen/avocado.doxy $(DOXYGEN)
wiki: $(WIKI)
@test -d Doxygen/wiki || mkdir Doxygen/wiki
for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/pandoc.sh $$w; done
for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/md2html.sh `echo $$w | sed -e 's:/xml/:/wiki/:g'`; done
@for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/pandoc.sh $$w; done
@for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/md2html.sh `echo $$w | sed -e 's:/xml/:/wiki/:g'`; done
################################################################################
## CLEANUP

View File

@ -286,7 +286,10 @@ DOXYGEN = \
################################################################################
WIKI = \
Doxygen/xml/AQL.md \
Doxygen/xml/Basics.md \
Doxygen/xml/DBAdmin.md \
Doxygen/xml/SimpleQueries.md \
Doxygen/xml/Actions.md \
Doxygen/xml/AvocadoScript.md \
Doxygen/xml/CommandLine.md \
@ -296,9 +299,9 @@ WIKI = \
Doxygen/xml/CommandLineScheduler.md \
Doxygen/xml/Compiling.md \
Doxygen/xml/DefineAction.md \
Doxygen/xml/GeoCoordinates.md \
Doxygen/xml/Graphs.md \
Doxygen/xml/HttpInterface.md \
Doxygen/xml/IndexUsage.md \
Doxygen/xml/InstallManual.md \
Doxygen/xml/JSModuleActions.md \
Doxygen/xml/JSModuleConsole.md \
@ -311,7 +314,6 @@ WIKI = \
Doxygen/xml/RefManual.md \
Doxygen/xml/RestDocument.md \
Doxygen/xml/RestSystem.md \
Doxygen/xml/SimpleQueries.md \
Doxygen/xml/StartStop.md \
Doxygen/xml/UserManual.md \
Doxygen/xml/jsUnity.md

View File

@ -806,7 +806,10 @@ DOXYGEN = \
################################################################################
################################################################################
WIKI = \
Doxygen/xml/AQL.md \
Doxygen/xml/Basics.md \
Doxygen/xml/DBAdmin.md \
Doxygen/xml/SimpleQueries.md \
Doxygen/xml/Actions.md \
Doxygen/xml/AvocadoScript.md \
Doxygen/xml/CommandLine.md \
@ -816,9 +819,9 @@ WIKI = \
Doxygen/xml/CommandLineScheduler.md \
Doxygen/xml/Compiling.md \
Doxygen/xml/DefineAction.md \
Doxygen/xml/GeoCoordinates.md \
Doxygen/xml/Graphs.md \
Doxygen/xml/HttpInterface.md \
Doxygen/xml/IndexUsage.md \
Doxygen/xml/InstallManual.md \
Doxygen/xml/JSModuleActions.md \
Doxygen/xml/JSModuleConsole.md \
@ -831,7 +834,6 @@ WIKI = \
Doxygen/xml/RefManual.md \
Doxygen/xml/RestDocument.md \
Doxygen/xml/RestSystem.md \
Doxygen/xml/SimpleQueries.md \
Doxygen/xml/StartStop.md \
Doxygen/xml/UserManual.md \
Doxygen/xml/jsUnity.md
@ -2413,19 +2415,19 @@ Doxygen/.setup-directories:
@touch $@
Doxygen/js/%.c: @srcdir@/js/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/js/system/%.c: @srcdir@/js/system/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/js/modules/%.c: @srcdir@/js/system/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/js/bootstrap/%.c: @srcdir@/js/bootstrap/%.js Doxygen/.setup-directories
python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/js2doxy.py $< > $@
Doxygen/xml/%.md: Doxygen/xml/%.xml
python @top_srcdir@/Doxygen/Scripts/xml2md.py $< > $@
@python @top_srcdir@/Doxygen/Scripts/xml2md.py $< > $@
################################################################################
################################################################################
@ -2442,8 +2444,8 @@ doxygen: Doxygen/avocado.doxy $(DOXYGEN)
wiki: $(WIKI)
@test -d Doxygen/wiki || mkdir Doxygen/wiki
for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/pandoc.sh $$w; done
for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/md2html.sh `echo $$w | sed -e 's:/xml/:/wiki/:g'`; done
@for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/pandoc.sh $$w; done
@for w in $(WIKI); do @top_srcdir@/Doxygen/Scripts/md2html.sh `echo $$w | sed -e 's:/xml/:/wiki/:g'`; done
.setup-directories:
@test -d js || mkdir js

View File

@ -194,10 +194,13 @@ void ActionDispatcherThread::run () {
_context->Enter();
DispatcherThread::run();
// free memory
TRI_FreeActionsVocBase();
_context->Exit();
_context.Dispose();
_isolate->Exit();
_isolate->Dispose();
}
@ -310,7 +313,7 @@ void ActionDispatcherThread::initialise () {
LOGGER_FATAL << "cannot load actions from directory '" << loader->getDirectory() << "'";
}
}
// and return from the context
_context->Exit();
_isolate->Exit();

View File

@ -33,6 +33,7 @@
#include <v8.h>
#include "V8/JSLoader.h"
#include "V8/v8-globals.h"
#include "VocBase/vocbase.h"
// -----------------------------------------------------------------------------

View File

@ -65,8 +65,6 @@ using namespace triagens::avocado;
/// <ol>
/// <li>@ref GeoCoordinates
/// </li>
/// <li>@ref Pagination
/// </li>
/// </ol>
/// </li>
/// <li>Vertices, Edges, and Graphs
@ -89,6 +87,17 @@ using namespace triagens::avocado;
/// </li>
/// </ol>
/// </li>
/// <li>@ref DBAdmin
/// <ol>
/// <li>@ref DBAdminDurability
/// </li>
/// <li>@ref DBAdminIndex
/// <ol>
/// <li>@ref DBAdminIndexGeo
/// </ol>
/// </li>
/// </ol>
/// </li>
/// <li>Advanced Topics
/// <ol>
/// <li>Actions
@ -193,6 +202,65 @@ using namespace triagens::avocado;
/// Opens a debug shell instead of starting the HTTP server.
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @page DBAdminTOC
///
/// <ol>
/// <li>@ref DBAdminDurability
/// </li>
/// <li>@ref DBAdminIndex
/// <ol>
/// <li>@ref DBAdminIndexGeo
/// </ol>
/// </li>
/// </ol>
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @page DBAdmin Database Administration
///
/// <hr>
/// @copydetails DBAdminTOC
/// <hr>
///
/// @section DBAdminDurability Durability
///
/// @subsection DBAdminDurability1 Mostly Memory/Durability
///
/// Database documents are stored in the memory-memory-mapped files are used to
/// store them. The operating system has the advantageous option to decide
/// swapping sparsely used areas out of the main memory. Per default, these
/// memory-mapped files are synced frequently - advantageously storing all
/// documents securely at once (durability).
///
/// @subsection DBAdminDurability2 AppendOnly/MVCC
///
/// Instead of overwriting existing documents, a completely new version of the
/// document is generated. The two benefits are:
///
/// - Objects can be stored coherently and compactly in the main memory.
/// - Objects are preserved-isolated writing and reading transactions allow
/// accessing these objects for parallel operations.
///
/// The system collects obsolete versions as garbage, recognizing them as
/// forsaken. Garbage collection is asynchronous and runs parallel to other
/// processes.
///
/// @subsection DBAdminDurability3 Configuration
///
/// @copydetails JS_ParameterVocbaseCol
///
/// @section DBAdminIndex Index Management
///
/// @subsection DBAdminIndexHash Hash Indexes
///
/// @copydetails JS_EnsureHashIndexVocbaseCol
///
/// @subsection DBAdminIndexGeo Geo Indexes
///
/// @copydetails JS_EnsureGeoIndexVocbaseCol
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- INSTALLATION MANUAL
// -----------------------------------------------------------------------------
@ -373,5 +441,5 @@ int main (int argc, char* argv[]) {
// Local Variables:
// mode: outline-minor
// outline-regexp: "^\\(/// @brief\\|/// {@inheritDoc}\\|/// @addtogroup\\|// --SECTION--\\|/// @\\}\\)"
// outline-regexp: "^\\(/// @brief\\|/// {@inheritDoc}\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @\\}\\)"
// End:

View File

@ -95,7 +95,9 @@ ListenTask::ListenTask (struct addrinfo *aip, bool reuseAddress)
ListenTask::~ListenTask () {
close(listenSocket);
if (listenSocket != -1) {
close(listenSocket);
}
}
// -----------------------------------------------------------------------------

View File

@ -341,6 +341,26 @@ void TRI_CreateActionVocBase (string const& name,
LOG_DEBUG("created action '%s' for queue %s", url.c_str(), queue.c_str());
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free all existing actions
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeActionsVocBase (void) {
TRI_v8_global_t* v8g;
v8g = (TRI_v8_global_t*) v8::Isolate::GetCurrent()->GetData();
WRITE_LOCKER(ActionsLock);
WRITE_LOCKER(v8g->ActionsLock);
map<string, TRI_action_t* >::iterator it;
for (it = Actions.begin(); it != Actions.end(); it++) {
delete (*it).second;
}
Actions.clear();
}
////////////////////////////////////////////////////////////////////////////////
/// @brief looks up an action
////////////////////////////////////////////////////////////////////////////////

View File

@ -227,6 +227,12 @@ void TRI_CreateActionVocBase (std::string const& name,
TRI_action_options_t ao,
v8::Handle<v8::Function> callback);
////////////////////////////////////////////////////////////////////////////////
/// @brief free all existing actions
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeActionsVocBase (void);
////////////////////////////////////////////////////////////////////////////////
/// @brief looks up an action
////////////////////////////////////////////////////////////////////////////////

View File

@ -2572,8 +2572,6 @@ static v8::Handle<v8::Value> JS_DropIndexVocbaseCol (v8::Arguments const& argv)
/// In case that the index was successfully created, the index indetifier
/// is returned.
///
/// @verbinclude fluent10
///
/// @FUN{ensureGeoIndex(@FA{location}, @LIT{true})}
///
/// As above which the exception, that the order within the list is longitude
@ -2592,7 +2590,15 @@ static v8::Handle<v8::Value> JS_DropIndexVocbaseCol (v8::Arguments const& argv)
/// In case that the index was successfully created, the index indetifier
/// is returned.
///
/// @verbinclude fluent14
/// @EXAMPLES
///
/// Create an geo index for a list attribute:
///
/// @verbinclude admin3
///
/// Create an geo index for a hash array attribute:
///
/// @verbinclude admin4
////////////////////////////////////////////////////////////////////////////////
static v8::Handle<v8::Value> JS_EnsureGeoIndexVocbaseCol (v8::Arguments const& argv) {
@ -2675,20 +2681,22 @@ static v8::Handle<v8::Value> JS_EnsureGeoIndexVocbaseCol (v8::Arguments const& a
////////////////////////////////////////////////////////////////////////////////
/// @brief ensures that a hash index exists
///
/// @FUN{ensureHashIndex(@FA{field1}, @FA{field2}, ...,@FA{fieldn})}
/// @FUN{ensureUniqueConstrain(@FA{field1}, @FA{field2}, ...,@FA{fieldn})}
///
/// Creates a hash index on all documents using attributes as paths to the
/// fields. At least one attribute must be given. The value of this attribute
/// must be a list. All documents, which do not have the attribute path or with
/// ore or more values that are not suitable, are ignored.
/// must be a list. All documents, which do not have the attribute path or where
/// one or more values that are not suitable, are ignored.
///
/// In case that the index was successfully created, the index indetifier
/// is returned.
///
/// @verbinclude fluent14
/// @EXAMPLES
///
/// @verbinclude admin5
////////////////////////////////////////////////////////////////////////////////
static v8::Handle<v8::Value> JS_EnsureHashIndexVocbaseCol (v8::Arguments const& argv) {
static v8::Handle<v8::Value> JS_EnsureUniqueConstraintVocbaseCol (v8::Arguments const& argv) {
v8::HandleScope scope;
v8::Handle<v8::String> err;
@ -2749,7 +2757,7 @@ static v8::Handle<v8::Value> JS_EnsureHashIndexVocbaseCol (v8::Arguments const&
v8::Handle<v8::Value> argument = argv[j];
if (! argument->IsString() ) {
errorString = "invalid parameter passed to ensureHashIndex(...) command";
errorString = "invalid parameter passed to ensureUniqueConstraint(...) command";
ok = false;
break;
}
@ -2762,12 +2770,12 @@ static v8::Handle<v8::Value> JS_EnsureHashIndexVocbaseCol (v8::Arguments const&
char* cArgument = *argumentString == 0 ? 0 : TRI_DuplicateString(*argumentString);
if (cArgument == NULL) {
errorString = "insuffient memory to complete ensureHashIndex(...) command";
errorString = "insuffient memory to complete ensureUniqueConstraint(...) command";
ok = false;
break;
}
TRI_PushBackVector(&attributes,&cArgument);
TRI_PushBackVector(&attributes, &cArgument);
}
// .............................................................................
@ -2781,7 +2789,7 @@ static v8::Handle<v8::Value> JS_EnsureHashIndexVocbaseCol (v8::Arguments const&
char* right = *((char**) (TRI_AtVector(&attributes, k)));
if (TRI_EqualString(left, right)) {
errorString = "duplicate parameters sent to ensureHashIndex(...) command";
errorString = "duplicate parameters sent to ensureUniqueConstraint(...) command";
ok = false;
break;
}
@ -2880,7 +2888,7 @@ static v8::Handle<v8::Value> JS_EnsureMultiHashIndexVocbaseCol (v8::Arguments co
// .............................................................................
if (argv.Length() == 0) {
return scope.Close(v8::ThrowException(v8::String::New("one or more string parameters required for the ensureHashIndex(...) command")));
return scope.Close(v8::ThrowException(v8::String::New("one or more string parameters required for the ensureUniqueConstraint(...) command")));
}
// .............................................................................
@ -3428,7 +3436,7 @@ static v8::Handle<v8::Value> JS_LoadVocbaseCol (v8::Arguments const& argv) {
///
/// @FUN{parameter()}
///
/// Returns the collection parameter.
/// Returns an object containing all collection parameters.
///
/// - @LIT{waitForSync}: If @LIT{true} creating a document will only return
/// after the data was synced to disk.
@ -3436,15 +3444,15 @@ static v8::Handle<v8::Value> JS_LoadVocbaseCol (v8::Arguments const& argv) {
///
/// @FUN{parameter(@FA{parameter-array})}
///
/// Changes the collection parameter.
/// Changes the collection parameters.
///
/// @EXAMPLES
///
/// Read the parameter
/// Read all parameters
///
/// @verbinclude admin1
///
/// Write the parameter
/// Change a parameter
///
/// @verbinclude admin2
////////////////////////////////////////////////////////////////////////////////
@ -4540,10 +4548,10 @@ void TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocbase_t* vocbas
v8::Handle<v8::String> DropIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("dropIndex"));
v8::Handle<v8::String> EdgesFuncName = v8::Persistent<v8::String>::New(v8::String::New("edges"));
v8::Handle<v8::String> EnsureGeoIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureGeoIndex"));
v8::Handle<v8::String> EnsureHashIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureHashIndex"));
v8::Handle<v8::String> EnsureMultiHashIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureMultiHashIndex"));
v8::Handle<v8::String> EnsureSkiplistIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureSLIndex"));
v8::Handle<v8::String> EnsureMultiSkiplistIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureMultiSLIndex"));
v8::Handle<v8::String> EnsureSkiplistIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureSLIndex"));
v8::Handle<v8::String> EnsureUniqueConstraintFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureUniqueConstraint"));
v8::Handle<v8::String> ExecuteFuncName = v8::Persistent<v8::String>::New(v8::String::New("execute"));
v8::Handle<v8::String> FiguresFuncName = v8::Persistent<v8::String>::New(v8::String::New("figures"));
v8::Handle<v8::String> GetIndexesFuncName = v8::Persistent<v8::String>::New(v8::String::New("getIndexes"));
@ -4676,10 +4684,10 @@ void TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocbase_t* vocbas
rt->Set(DocumentFuncName, v8::FunctionTemplate::New(JS_DocumentQuery));
rt->Set(DropIndexFuncName, v8::FunctionTemplate::New(JS_DropIndexVocbaseCol));
rt->Set(EnsureGeoIndexFuncName, v8::FunctionTemplate::New(JS_EnsureGeoIndexVocbaseCol));
rt->Set(EnsureHashIndexFuncName, v8::FunctionTemplate::New(JS_EnsureHashIndexVocbaseCol));
rt->Set(EnsureMultiHashIndexFuncName, v8::FunctionTemplate::New(JS_EnsureMultiHashIndexVocbaseCol));
rt->Set(EnsureMultiSkiplistIndexFuncName, v8::FunctionTemplate::New(JS_EnsureMultiSkiplistIndexVocbaseCol));
rt->Set(EnsureSkiplistIndexFuncName, v8::FunctionTemplate::New(JS_EnsureSkiplistIndexVocbaseCol));
rt->Set(EnsureUniqueConstraintFuncName, v8::FunctionTemplate::New(JS_EnsureUniqueConstraintVocbaseCol));
rt->Set(FiguresFuncName, v8::FunctionTemplate::New(JS_FiguresVocbaseCol));
rt->Set(GetIndexesFuncName, v8::FunctionTemplate::New(JS_GetIndexesVocbaseCol));
rt->Set(LoadFuncName, v8::FunctionTemplate::New(JS_LoadVocbaseCol));
@ -4711,10 +4719,10 @@ void TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocbase_t* vocbas
rt->Set(DocumentFuncName, v8::FunctionTemplate::New(JS_DocumentQuery));
rt->Set(DropIndexFuncName, v8::FunctionTemplate::New(JS_DropIndexVocbaseCol));
rt->Set(EnsureGeoIndexFuncName, v8::FunctionTemplate::New(JS_EnsureGeoIndexVocbaseCol));
rt->Set(EnsureHashIndexFuncName, v8::FunctionTemplate::New(JS_EnsureHashIndexVocbaseCol));
rt->Set(EnsureMultiHashIndexFuncName, v8::FunctionTemplate::New(JS_EnsureMultiHashIndexVocbaseCol));
rt->Set(EnsureMultiSkiplistIndexFuncName, v8::FunctionTemplate::New(JS_EnsureMultiSkiplistIndexVocbaseCol));
rt->Set(EnsureSkiplistIndexFuncName, v8::FunctionTemplate::New(JS_EnsureSkiplistIndexVocbaseCol));
rt->Set(EnsureUniqueConstraintFuncName, v8::FunctionTemplate::New(JS_EnsureUniqueConstraintVocbaseCol));
rt->Set(FiguresFuncName, v8::FunctionTemplate::New(JS_FiguresVocbaseCol));
rt->Set(GetIndexesFuncName, v8::FunctionTemplate::New(JS_GetIndexesVocbaseCol));
rt->Set(LoadFuncName, v8::FunctionTemplate::New(JS_LoadVocbaseCol));

View File

@ -118,16 +118,6 @@
/// - look at all the @ref JavaScriptFunc
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @page GeoCoordinates Geo Coordinates
///
/// @section EnsureGeoIndex Create a Geo-Spatial Index
///
/// First create an index.
///
/// @copydetails JS_EnsureGeoIndexVocbaseCol
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @page JavaScriptFuncIndex JavaScript Function Index
///

View File

@ -693,7 +693,7 @@ bool TRI_IterateCollection (TRI_collection_t* collection,
////////////////////////////////////////////////////////////////////////////////
void TRI_IterateIndexCollection (TRI_collection_t* collection,
void (*iterator)(char const* filename, void*),
bool (*iterator)(char const* filename, void*),
void* data) {
size_t n;
size_t i;
@ -703,9 +703,16 @@ void TRI_IterateIndexCollection (TRI_collection_t* collection,
for (i = 0; i < n; ++i) {
char const* filename;
bool ok;
filename = collection->_indexFiles._buffer[i];
iterator(filename, data);
ok = iterator(filename, data);
if (! ok) {
LOG_ERROR("cannot load index '%s' for collection '%s'",
filename,
collection->_name);
}
}
}

View File

@ -316,7 +316,7 @@ bool TRI_IterateCollection (TRI_collection_t*,
////////////////////////////////////////////////////////////////////////////////
void TRI_IterateIndexCollection (TRI_collection_t* collection,
void (*iterator)(char const* filename, void*),
bool (*iterator)(char const* filename, void*),
void* data);
////////////////////////////////////////////////////////////////////////////////

View File

@ -277,7 +277,7 @@ static TRI_vector_string_t* GetFieldsIndex (const TRI_idx_type_e indexType,
}
else {
// read number of fields
strVal = TRI_LookupArrayJson(json, "field_count");
strVal = TRI_LookupArrayJson(json, "fieldCount");
if (!strVal || strVal->_type != TRI_JSON_NUMBER) {
return fields;
}
@ -1119,62 +1119,14 @@ GeoCoordinates* TRI_NearestGeoIndex (TRI_index_t* idx,
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief attempts to locate an entry in the hash index
////////////////////////////////////////////////////////////////////////////////
// .............................................................................
// Warning: who ever calls this function is responsible for destroying
// HashIndexElements* results
// .............................................................................
HashIndexElements* TRI_LookupHashIndex(TRI_index_t* idx, TRI_json_t* parameterList) {
TRI_hash_index_t* hashIndex;
HashIndexElements* result;
HashIndexElement element;
TRI_shaper_t* shaper;
size_t j;
element.numFields = parameterList->_value._objects._length;
element.fields = TRI_Allocate( sizeof(TRI_json_t) * element.numFields);
if (element.fields == NULL) {
LOG_WARNING("out-of-memory in LookupHashIndex");
return NULL;
}
hashIndex = (TRI_hash_index_t*) idx;
shaper = hashIndex->base._collection->_shaper;
for (j = 0; j < element.numFields; ++j) {
TRI_json_t* jsonObject = (TRI_json_t*) (TRI_AtVector(&(parameterList->_value._objects),j));
TRI_shaped_json_t* shapedObject = TRI_ShapedJsonJson(shaper, jsonObject);
element.fields[j] = *shapedObject;
TRI_Free(shapedObject);
}
if (hashIndex->_unique) {
result = HashIndex_find(hashIndex->_hashIndex, &element);
}
else {
result = MultiHashIndex_find(hashIndex->_hashIndex, &element);
}
for (j = 0; j < element.numFields; ++j) {
TRI_DestroyShapedJson(element.fields + j);
}
TRI_Free(element.fields);
return result;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief helper for hashing
////////////////////////////////////////////////////////////////////////////////
static bool HashIndexHelper(const TRI_hash_index_t* hashIndex,
HashIndexElement* hashElement,
const TRI_doc_mptr_t* document,
const TRI_shaped_json_t* shapedDoc) {
static bool HashIndexHelper (const TRI_hash_index_t* hashIndex,
HashIndexElement* hashElement,
const TRI_doc_mptr_t* document,
const TRI_shaped_json_t* shapedDoc) {
union { void* p; void const* c; } cnv;
TRI_shaped_json_t shapedObject;
TRI_shape_access_t* acc;
@ -1189,8 +1141,7 @@ static bool HashIndexHelper(const TRI_hash_index_t* hashIndex,
// ..........................................................................
hashElement->data = NULL;
for (j = 0; j < hashIndex->_shapeList->_length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(hashIndex->_shapeList,j)));
@ -1198,7 +1149,9 @@ static bool HashIndexHelper(const TRI_hash_index_t* hashIndex,
// ..........................................................................
// Determine if document has that particular shape
// ..........................................................................
acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, shapedDoc->_sid, shape);
if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) {
TRI_FreeShapeAccessor(acc);
@ -1206,67 +1159,67 @@ static bool HashIndexHelper(const TRI_hash_index_t* hashIndex,
TRI_Free(hashElement->fields);
return false;
}
// ..........................................................................
// Extract the field
// ..........................................................................
if (! TRI_ExecuteShapeAccessor(acc, shapedDoc, &shapedObject)) {
TRI_FreeShapeAccessor(acc);
TRI_Free(hashElement->fields);
return false;
}
// ..........................................................................
// Store the json shaped Object -- this is what will be hashed
// ..........................................................................
hashElement->fields[j] = shapedObject;
TRI_FreeShapeAccessor(acc);
} // end of for loop
}
else if (document != NULL) {
// ..........................................................................
// Assign the document to the HashIndexElement structure - so that it can later
// be retreived.
// ..........................................................................
cnv.c = document;
hashElement->data = cnv.p;
for (j = 0; j < hashIndex->_shapeList->_length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(hashIndex->_shapeList,j)));
// ..........................................................................
// Determine if document has that particular shape
// ..........................................................................
acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, document->_document._sid, shape);
if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) {
TRI_FreeShapeAccessor(acc);
}
TRI_Free(hashElement->fields);
return false;
}
// ..........................................................................
// Extract the field
// ..........................................................................
if (! TRI_ExecuteShapeAccessor(acc, &(document->_document), &shapedObject)) {
TRI_FreeShapeAccessor(acc);
TRI_Free(hashElement->fields);
return false;
}
/* start oreste:
#ifdef DEBUG_ORESTE
TRI_json_t* object;
TRI_string_buffer_t buffer;
TRI_InitStringBuffer(&buffer);
@ -1277,12 +1230,14 @@ static bool HashIndexHelper(const TRI_hash_index_t* hashIndex,
TRI_DestroyStringBuffer(&buffer);
TRI_Free(object);
object = NULL;
end oreste */
#endif
// ..........................................................................
// Store the field
// ..........................................................................
hashElement->fields[j] = shapedObject;
TRI_FreeShapeAccessor(acc);
} // end of for loop
}
@ -1292,18 +1247,13 @@ static bool HashIndexHelper(const TRI_hash_index_t* hashIndex,
}
return true;
} // end of static function HashIndexHelper
}
////////////////////////////////////////////////////////////////////////////////
/// @brief hash indexes a document
////////////////////////////////////////////////////////////////////////////////
static bool InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
@ -1312,12 +1262,13 @@ static bool InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
// ............................................................................
// Obtain the hash index structure
// ............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in InsertHashIndex");
return false;
}
// ............................................................................
// Allocate storage to shaped json objects stored as a simple list.
@ -1326,17 +1277,18 @@ static bool InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
hashElement.numFields = hashIndex->_shapeList->_length;
hashElement.fields = TRI_Allocate( sizeof(TRI_shaped_json_t) * hashElement.numFields);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in InsertHashIndex");
return false;
}
ok = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
if (!ok) {
return false;
}
// ............................................................................
// Fill the json field list from the document for unique hash index
// ............................................................................
@ -1353,7 +1305,6 @@ static bool InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
if (res == -1) {
LOG_WARNING("found duplicate entry in hash-index, should not happen");
}
@ -1367,13 +1318,11 @@ static bool InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
return res == 0;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief describes a hash index as a json object
////////////////////////////////////////////////////////////////////////////////
static TRI_json_t* JsonHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collection) {
TRI_json_t* json;
const TRI_shape_path_t* path;
TRI_hash_index_t* hashIndex;
@ -1384,45 +1333,52 @@ static TRI_json_t* JsonHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec
// ..........................................................................
// Recast as a hash index
// ..........................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (hashIndex == NULL) {
return NULL;
}
// ..........................................................................
// Allocate sufficent memory for the field list
// ..........................................................................
fieldList = TRI_Allocate( (sizeof(char*) * hashIndex->_shapeList->_length) );
if (fieldList == NULL) {
return NULL;
}
// ..........................................................................
// Convert the attributes (field list of the hash index) into strings
// ..........................................................................
for (j = 0; j < hashIndex->_shapeList->_length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(hashIndex->_shapeList,j)));
path = collection->_shaper->lookupAttributePathByPid(collection->_shaper, shape);
if (path == NULL) {
TRI_Free(fieldList);
return NULL;
}
fieldList[j] = ((const char*) path) + sizeof(TRI_shape_path_t) + path->_aidLength * sizeof(TRI_shape_aid_t);
}
// ..........................................................................
// create json object and fill it
// ..........................................................................
json = TRI_CreateArrayJson();
if (!json) {
TRI_Free(fieldList);
return NULL;
}
fieldCounter = TRI_Allocate(30);
if (!fieldCounter) {
TRI_Free(fieldList);
TRI_FreeJson(json);
@ -1432,7 +1388,8 @@ static TRI_json_t* JsonHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec
TRI_Insert2ArrayJson(json, "iid", TRI_CreateNumberJson(idx->_iid));
TRI_Insert2ArrayJson(json, "unique", TRI_CreateBooleanJson(hashIndex->_unique));
TRI_Insert2ArrayJson(json, "type", TRI_CreateStringCopyJson("hash"));
TRI_Insert2ArrayJson(json, "field_count", TRI_CreateNumberJson(hashIndex->_shapeList->_length));
TRI_Insert2ArrayJson(json, "fieldCount", TRI_CreateNumberJson(hashIndex->_shapeList->_length));
for (j = 0; j < hashIndex->_shapeList->_length; ++j) {
sprintf(fieldCounter,"field_%lu",j);
TRI_Insert2ArrayJson(json, fieldCounter, TRI_CreateStringCopyJson(fieldList[j]));
@ -1444,34 +1401,33 @@ static TRI_json_t* JsonHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec
return json;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief removes a document from a hash index
////////////////////////////////////////////////////////////////////////////////
static bool RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
bool result;
// ............................................................................
// Obtain the hash index structure
// ............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in RemoveHashIndex");
return false;
}
// ............................................................................
// Allocate some memory for the HashIndexElement structure
// ............................................................................
hashElement.numFields = hashIndex->_shapeList->_length;
hashElement.fields = TRI_Allocate( sizeof(TRI_shaped_json_t) * hashElement.numFields);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in InsertHashIndex");
return false;
@ -1480,10 +1436,10 @@ static bool RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
// ..........................................................................
// Fill the json field list from the document
// ..........................................................................
if (!HashIndexHelper(hashIndex, &hashElement, doc, NULL)) {
if (! HashIndexHelper(hashIndex, &hashElement, doc, NULL)) {
return false;
}
// ............................................................................
// Attempt the removal for unique hash indexes
@ -1501,12 +1457,15 @@ static bool RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
result = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
}
return result;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief updates a document from a hash index
////////////////////////////////////////////////////////////////////////////////
static bool UpdateHashIndex (TRI_index_t* idx, const TRI_doc_mptr_t* newDoc,
static bool UpdateHashIndex (TRI_index_t* idx,
const TRI_doc_mptr_t* newDoc,
const TRI_shaped_json_t* oldDoc) {
// ..........................................................................
@ -1519,81 +1478,76 @@ static bool UpdateHashIndex (TRI_index_t* idx, const TRI_doc_mptr_t* newDoc,
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// ............................................................................
// Obtain the hash index structure
// ............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in UpdateHashIndex");
return false;
}
// ............................................................................
// Allocate some memory for the HashIndexElement structure
// ............................................................................
hashElement.numFields = hashIndex->_shapeList->_length;
hashElement.fields = TRI_Allocate( sizeof(TRI_shaped_json_t) * hashElement.numFields);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in UpdateHashIndex");
return false;
}
// ............................................................................
// Update for unique hash index
// ............................................................................
// ............................................................................
// Fill in the fields with the values from oldDoc
// ............................................................................
if (hashIndex->_unique) {
if (HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc)) {
// ............................................................................
// We must fill the hashElement with the value of the document shape -- this
// is necessary when we attempt to remove non-unique hash indexes.
// ............................................................................
cnv.c = newDoc; // we are assuming here that the doc ptr does not change
hashElement.data = cnv.p;
// ............................................................................
// Remove the hash index entry and return.
// ............................................................................
if (!HashIndex_remove(hashIndex->_hashIndex, &hashElement)) {
LOG_WARNING("could not remove old document from hash index in UpdateHashIndex");
}
}
// ............................................................................
// Fill the json simple list from the document
// ............................................................................
if (!HashIndexHelper(hashIndex, &hashElement, newDoc, NULL)) {
if (! HashIndexHelper(hashIndex, &hashElement, newDoc, NULL)) {
// ..........................................................................
// probably fields do not match
// ..........................................................................
return false;
}
// ............................................................................
// Attempt to add the hash entry from the new doc
// ............................................................................
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Update for non-unique hash index
// ............................................................................
@ -1605,39 +1559,42 @@ static bool UpdateHashIndex (TRI_index_t* idx, const TRI_doc_mptr_t* newDoc,
// ............................................................................
if (HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc)) {
// ............................................................................
// We must fill the hashElement with the value of the document shape -- this
// is necessary when we attempt to remove non-unique hash indexes.
// ............................................................................
cnv.c = newDoc;
hashElement.data = cnv.p;
// ............................................................................
// Remove the hash index entry and return.
// ............................................................................
if (!MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement)) {
if (! MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement)) {
LOG_WARNING("could not remove old document from hash index in UpdateHashIndex");
}
}
// ............................................................................
// Fill the shaped json simple list from the document
// ............................................................................
if (!HashIndexHelper(hashIndex, &hashElement, newDoc, NULL)) {
// ..........................................................................
// probably fields do not match
// ..........................................................................
return false;
}
// ............................................................................
// Attempt to add the hash entry from the new doc
// ............................................................................
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
if (res == -1) {
@ -1652,7 +1609,19 @@ static bool UpdateHashIndex (TRI_index_t* idx, const TRI_doc_mptr_t* newDoc,
return res == 0;
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- constructors and destructors
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief creates a hash index
@ -1682,15 +1651,19 @@ TRI_index_t* TRI_CreateHashIndex (struct TRI_doc_collection_s* collection,
// ...........................................................................
// Copy the contents of the shape list vector into a new vector and store this
// ...........................................................................
hashIndex->_shapeList = TRI_Allocate(sizeof(TRI_vector_t));
if (!hashIndex->_shapeList) {
TRI_Free(hashIndex);
return NULL;
}
TRI_InitVector(hashIndex->_shapeList, sizeof(TRI_shape_pid_t));
for (j = 0; j < shapeList->_length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(shapeList,j)));
TRI_PushBackVector(hashIndex->_shapeList,&shape);
}
@ -1704,13 +1677,88 @@ TRI_index_t* TRI_CreateHashIndex (struct TRI_doc_collection_s* collection,
return &hashIndex->base;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the memory allocated, but does not free the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_DestroyHashIndex (TRI_index_t* idx) {
LOG_ERROR("TRI_DestroyHashIndex not implemented");
}
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the memory allocated and frees the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeHashIndex (TRI_index_t* idx) {
TRI_DestroyHashIndex(idx);
TRI_Free(idx);
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- public functions
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief attempts to locate an entry in the hash index
///
/// @warning who ever calls this function is responsible for destroying
/// HashIndexElements* results
////////////////////////////////////////////////////////////////////////////////
HashIndexElements* TRI_LookupHashIndex(TRI_index_t* idx, TRI_json_t* parameterList) {
TRI_hash_index_t* hashIndex;
HashIndexElements* result;
HashIndexElement element;
TRI_shaper_t* shaper;
size_t j;
element.numFields = parameterList->_value._objects._length;
element.fields = TRI_Allocate( sizeof(TRI_json_t) * element.numFields);
if (element.fields == NULL) {
LOG_WARNING("out-of-memory in LookupHashIndex");
return NULL;
}
hashIndex = (TRI_hash_index_t*) idx;
shaper = hashIndex->base._collection->_shaper;
for (j = 0; j < element.numFields; ++j) {
TRI_json_t* jsonObject = (TRI_json_t*) (TRI_AtVector(&(parameterList->_value._objects),j));
TRI_shaped_json_t* shapedObject = TRI_ShapedJsonJson(shaper, jsonObject);
element.fields[j] = *shapedObject;
TRI_Free(shapedObject);
}
if (hashIndex->_unique) {
result = HashIndex_find(hashIndex->_hashIndex, &element);
}
else {
result = MultiHashIndex_find(hashIndex->_hashIndex, &element);
}
for (j = 0; j < element.numFields; ++j) {
TRI_DestroyShapedJson(element.fields + j);
}
TRI_Free(element.fields);
return result;
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- SKIPLIST INDEX
@ -2032,7 +2080,7 @@ static TRI_json_t* JsonSkiplistIndex (TRI_index_t* idx, TRI_doc_collection_t* co
TRI_Insert2ArrayJson(json, "iid", TRI_CreateNumberJson(idx->_iid));
TRI_Insert2ArrayJson(json, "unique", TRI_CreateBooleanJson(skiplistIndex->_unique));
TRI_Insert2ArrayJson(json, "type", TRI_CreateStringCopyJson("skiplist"));
TRI_Insert2ArrayJson(json, "field_count", TRI_CreateNumberJson(skiplistIndex->_shapeList->_length));
TRI_Insert2ArrayJson(json, "fieldCount", TRI_CreateNumberJson(skiplistIndex->_shapeList->_length));
for (j = 0; j < skiplistIndex->_shapeList->_length; ++j) {
sprintf(fieldCounter,"field_%lu",j);
TRI_Insert2ArrayJson(json, fieldCounter, TRI_CreateStringCopyJson(fieldList[j]));

View File

@ -321,9 +321,6 @@ GeoCoordinates* TRI_NearestGeoIndex (TRI_index_t*,
/// @{
////////////////////////////////////////////////////////////////////////////////
HashIndexElements* TRI_LookupHashIndex (TRI_index_t*, TRI_json_t*);
////////////////////////////////////////////////////////////////////////////////
/// @brief creates a hash-index
////////////////////////////////////////////////////////////////////////////////
@ -332,8 +329,37 @@ TRI_index_t* TRI_CreateHashIndex (struct TRI_doc_collection_s*,
TRI_vector_t* shapeList,
bool unique);
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the memory allocated, but does not free the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_DestroyHashIndex (TRI_index_t* idx);
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the memory allocated and frees the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeHashIndex (TRI_index_t* idx);
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- public functions
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief attempts to locate an entry in the hash index
////////////////////////////////////////////////////////////////////////////////
HashIndexElements* TRI_LookupHashIndex (TRI_index_t*, TRI_json_t*);
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////

View File

@ -1152,66 +1152,82 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
/// @brief iterator for index open
////////////////////////////////////////////////////////////////////////////////
static void OpenIndex (char const* filename, void* data) {
static bool OpenIndex (char const* filename, void* data) {
TRI_idx_iid_t iid;
TRI_index_t* idx;
TRI_json_t* fieldCount;
TRI_json_t* fieldStr;
TRI_json_t* gjs;
TRI_json_t* iis;
TRI_json_t* json;
TRI_json_t* lat;
TRI_json_t* loc;
TRI_json_t* lon;
TRI_json_t* type;
TRI_json_t* fieldCount;
TRI_json_t* fieldStr;
TRI_vector_t attributes;
TRI_sim_collection_t* doc;
bool geoJson;
char const* typeStr;
char* error;
char* fieldChar;
int intCount;
bool ok;
TRI_vector_t attributes;
bool uniqueIndex;
char const* typeStr;
char fieldChar[30];
char* error;
int intCount;
// load json description of the index
json = TRI_JsonFile(filename, &error);
// simple collection of the index
doc = (TRI_sim_collection_t*) data;
// json must be a index description
if (json == NULL) {
LOG_ERROR("cannot read index definition from '%s': %s", filename, error);
return;
return false;
}
if (json->_type != TRI_JSON_ARRAY) {
LOG_ERROR("cannot read index definition from '%s': expecting an array", filename);
TRI_FreeJson(json);
return;
return false;
}
// extract the type
type = TRI_LookupArrayJson(json, "type");
if (type->_type != TRI_JSON_STRING) {
LOG_ERROR("cannot read index definition from '%s': expecting a string for type", filename);
TRI_FreeJson(json);
return;
return false;
}
typeStr = type->_value._string.data;
doc = (TRI_sim_collection_t*) data;
// extract the index identifier
iis = TRI_LookupArrayJson(json, "iid");
if (iis != NULL && iis->_type == TRI_JSON_NUMBER) {
iid = iis->_value._number;
}
else {
LOG_ERROR("ignore hash-index, index identifier could not be located");
return false;
}
// ...........................................................................
// geo index
// GEO INDEX
// ...........................................................................
if (TRI_EqualString(typeStr, "geo")) {
TRI_json_t* lat;
TRI_json_t* loc;
TRI_json_t* lon;
bool geoJson;
loc = TRI_LookupArrayJson(json, "location");
lat = TRI_LookupArrayJson(json, "latitude");
lon = TRI_LookupArrayJson(json, "longitude");
iis = TRI_LookupArrayJson(json, "iid");
gjs = TRI_LookupArrayJson(json, "geoJson");
iid = 0;
geoJson = false;
if (iis != NULL && iis->_type == TRI_JSON_NUMBER) {
iid = iis->_value._number;
}
if (gjs != NULL && gjs->_type == TRI_JSON_BOOLEAN) {
geoJson = gjs->_value._boolean;
}
@ -1223,215 +1239,175 @@ static void OpenIndex (char const* filename, void* data) {
CreateGeoIndexSimCollection(doc, NULL, lat->_value._string.data, lon->_value._string.data, false, iid);
}
else {
LOG_WARNING("ignore geo-index, need either 'location' or 'latitude' and 'longitude'");
}
}
LOG_ERROR("ignore geo-index %lu, need either 'location' or 'latitude' and 'longitude'",
(unsigned long) iid);
TRI_FreeJson(json);
return false;
}
TRI_FreeJson(json);
return true;
}
// ...........................................................................
// Hash Index
// HASH INDEX
// ...........................................................................
else if (TRI_EqualString(typeStr, "hash")) {
// .........................................................................
// Initialise the ok value
// .........................................................................
ok = true;
// .........................................................................
// Initialise the vector in which we store the fields on which the hashing
// will be based.
// .........................................................................
TRI_InitVector(&attributes, sizeof(char*));
// .........................................................................
// Determine the id of the hash index
// .........................................................................
if (ok) {
iis = TRI_LookupArrayJson(json, "iid");
iid = 0;
if (iis != NULL && iis->_type == TRI_JSON_NUMBER) {
iid = iis->_value._number;
}
else {
LOG_WARNING("ignore hash-index, id could not be located");
ok = false;
}
}
// .........................................................................
// Determine if the hash index is unique or non-unique
// .........................................................................
if (ok) {
gjs = TRI_LookupArrayJson(json, "unique");
uniqueIndex = false;
if (gjs != NULL && gjs->_type == TRI_JSON_BOOLEAN) {
uniqueIndex = gjs->_value._boolean;
}
else {
LOG_WARNING("ignore hash-index, could not determine if unique or non-unique");
ok = false;
}
gjs = TRI_LookupArrayJson(json, "unique");
uniqueIndex = false;
if (gjs != NULL && gjs->_type == TRI_JSON_BOOLEAN) {
uniqueIndex = gjs->_value._boolean;
}
else {
LOG_ERROR("ignore hash-index %lu, could not determine if unique or non-unique",
(unsigned long) iid);
TRI_FreeJson(json);
return false;
}
// .........................................................................
// Extract the list of fields
// .........................................................................
if (ok) {
fieldCount = 0;
fieldCount = TRI_LookupArrayJson(json, "field_count");
intCount = 0;
if ( (fieldCount != NULL) && (fieldCount->_type == TRI_JSON_NUMBER) ) {
intCount = fieldCount->_value._number;
}
if (intCount < 1) {
LOG_WARNING("ignore hash-index, field count missing");
ok = false;
}
}
if (ok) {
fieldChar = TRI_Allocate(30);
if (fieldChar == NULL) {
LOG_WARNING("ignore hash-index, field count missing");
ok = false;
}
}
if (ok) {
for (int j = 0; j < intCount; ++j) {
sprintf(fieldChar,"field_%i",j);
fieldStr = TRI_LookupArrayJson(json, fieldChar);
if (fieldStr->_type != TRI_JSON_STRING) {
LOG_WARNING("ignore hash-index, invalid field name for hash index");
ok = false;
break;
}
TRI_PushBackVector(&attributes, &(fieldStr->_value._string.data));
}
TRI_Free(fieldChar);
}
if (ok) {
CreateHashIndexSimCollection (doc, &attributes, iid, uniqueIndex);
}
// .........................................................................
// Free the vector
// .........................................................................
TRI_DestroyVector(&attributes);
}
fieldCount = 0;
fieldCount = TRI_LookupArrayJson(json, "fieldCount");
intCount = 0;
if ( (fieldCount != NULL) && (fieldCount->_type == TRI_JSON_NUMBER) ) {
intCount = fieldCount->_value._number;
}
if (intCount < 1) {
LOG_ERROR("ignore hash-index %lu, field count missing", (unsigned long) iid);
TRI_FreeJson(json);
return false;
}
// ...........................................................................
// Skiplist Index
// ...........................................................................
else if (TRI_EqualString(typeStr, "skiplist")) {
// .........................................................................
// Initialise the ok value
// .........................................................................
ok = true;
// .........................................................................
// Initialise the vector in which we store the fields on which the hashing
// will be based.
// .........................................................................
TRI_InitVector(&attributes, sizeof(char*));
// find fields
for (int j = 0; j < intCount; ++j) {
sprintf(fieldChar, "field_%i", j);
fieldStr = TRI_LookupArrayJson(json, fieldChar);
if (fieldStr->_type != TRI_JSON_STRING) {
LOG_ERROR("ignore hash-index %lu, invalid field name for hash index",
(unsigned long) iid);
TRI_DestroyVector(&attributes);
TRI_FreeJson(json);
return false;
}
TRI_PushBackVector(&attributes, &(fieldStr->_value._string.data));
}
// create the index
idx = CreateHashIndexSimCollection (doc, &attributes, iid, uniqueIndex);
TRI_DestroyVector(&attributes);
TRI_FreeJson(json);
if (idx == NULL) {
LOG_ERROR("cannot create hash index %lu", (unsigned long) iid);
return false;
}
return true;
}
// ...........................................................................
// SKIPLIST INDEX
// ...........................................................................
else if (TRI_EqualString(typeStr, "skiplist")) {
// Determine if the skiplist index is unique or non-unique
gjs = TRI_LookupArrayJson(json, "unique");
uniqueIndex = false;
if (gjs != NULL && gjs->_type == TRI_JSON_BOOLEAN) {
uniqueIndex = gjs->_value._boolean;
}
else {
LOG_ERROR("ignore skiplist-index %lu, could not determine if unique or non-unique",
(unsigned long) iid);
TRI_FreeJson(json);
return false;
}
// Extract the list of fields
fieldCount = 0;
fieldCount = TRI_LookupArrayJson(json, "fieldCount");
intCount = 0;
if ( (fieldCount != NULL) && (fieldCount->_type == TRI_JSON_NUMBER) ) {
intCount = fieldCount->_value._number;
}
if (intCount < 1) {
LOG_ERROR("ignore skiplist-index %lu, field count missing", (unsigned long) iid);
TRI_FreeJson(json);
return false;
}
// Initialise the vector in which we store the fields on which the hashing
// will be based.
TRI_InitVector(&attributes, sizeof(char*));
// .........................................................................
// Determine the id of the hash index
// .........................................................................
if (ok) {
iis = TRI_LookupArrayJson(json, "iid");
iid = 0;
if (iis != NULL && iis->_type == TRI_JSON_NUMBER) {
iid = iis->_value._number;
}
else {
LOG_WARNING("ignore skiplist-index, id could not be located");
ok = false;
// find fields
for (int j = 0; j < intCount; ++j) {
sprintf(fieldChar, "field_%i", j);
fieldStr = TRI_LookupArrayJson(json, fieldChar);
if (fieldStr->_type != TRI_JSON_STRING) {
LOG_ERROR("ignore skiplist-index %lu, invalid field name for hash index",
(unsigned long) iid);
TRI_DestroyVector(&attributes);
TRI_FreeJson(json);
return false;
}
}
TRI_PushBackVector(&attributes, &(fieldStr->_value._string.data));
}
// .........................................................................
// Determine if the skiplist index is unique or non-unique
// .........................................................................
if (ok) {
gjs = TRI_LookupArrayJson(json, "unique");
uniqueIndex = false;
if (gjs != NULL && gjs->_type == TRI_JSON_BOOLEAN) {
uniqueIndex = gjs->_value._boolean;
}
else {
LOG_WARNING("ignore skiplist-index, could not determine if unique or non-unique");
ok = false;
}
}
// .........................................................................
// Extract the list of fields
// .........................................................................
if (ok) {
fieldCount = 0;
fieldCount = TRI_LookupArrayJson(json, "field_count");
intCount = 0;
if ( (fieldCount != NULL) && (fieldCount->_type == TRI_JSON_NUMBER) ) {
intCount = fieldCount->_value._number;
}
if (intCount < 1) {
LOG_WARNING("ignore skiplist-index, field count missing");
ok = false;
}
}
if (ok) {
fieldChar = TRI_Allocate(30);
if (fieldChar == NULL) {
LOG_WARNING("ignore skiplist-index, field count missing");
ok = false;
}
}
if (ok) {
for (int j = 0; j < intCount; ++j) {
sprintf(fieldChar,"field_%i",j);
fieldStr = TRI_LookupArrayJson(json, fieldChar);
if (fieldStr->_type != TRI_JSON_STRING) {
LOG_WARNING("ignore skiplist-index, invalid field name for skiplist index");
ok = false;
break;
}
TRI_PushBackVector(&attributes, &(fieldStr->_value._string.data));
}
TRI_Free(fieldChar);
}
if (ok) {
CreateSkiplistIndexSimCollection (doc, &attributes, iid, uniqueIndex);
}
// .........................................................................
// Free the vector
// .........................................................................
// create the index
idx = CreateSkiplistIndexSimCollection (doc, &attributes, iid, uniqueIndex);
TRI_DestroyVector(&attributes);
TRI_FreeJson(json);
if (idx == NULL) {
LOG_ERROR("cannot create hash index %lu", (unsigned long) iid);
return false;
}
return true;
}
// ups
else {
LOG_WARNING("ignoring unknown index type '%s'", typeStr);
}
// .........................................................................
// ups, unknown index type
// .........................................................................
TRI_FreeJson(json);
else {
LOG_ERROR("ignoring unknown index type '%s' for index %lu",
typeStr,
(unsigned long) iid);
TRI_FreeJson(json);
return false;
}
}
@ -2024,7 +2000,7 @@ static bool DeleteImmediateIndexes (TRI_sim_collection_t* collection,
/// @brief initialises an index with all existing documents
////////////////////////////////////////////////////////////////////////////////
static void FillIndex (TRI_sim_collection_t* collection,
static bool FillIndex (TRI_sim_collection_t* collection,
TRI_index_t* idx) {
size_t n;
size_t scanned;
@ -2042,8 +2018,12 @@ static void FillIndex (TRI_sim_collection_t* collection,
if (*ptr) {
++scanned;
if (!idx->insert(idx, *ptr)) {
// TODO: handle errors
if (! idx->insert(idx, *ptr)) {
LOG_TRACE("failed to insert document '%lu:%lu'",
(unsigned long) collection->base.base._cid,
(unsigned long) ((TRI_doc_mptr_t const*) ptr)->_did);
return false;
}
if (scanned % 10000 == 0) {
@ -2051,6 +2031,8 @@ static void FillIndex (TRI_sim_collection_t* collection,
}
}
}
return true;
}
////////////////////////////////////////////////////////////////////////////////
@ -2406,74 +2388,85 @@ static TRI_index_t* CreateHashIndexSimCollection (TRI_sim_collection_t* collecti
const TRI_vector_t* attributes,
TRI_idx_iid_t iid,
bool unique) {
TRI_index_t* idx = NULL;
TRI_shaper_t* shaper = collection->base._shaper;
TRI_index_t* idx;
TRI_shaper_t* shaper;
TRI_vector_t shapes;
bool ok;
idx = NULL;
shaper = collection->base._shaper;
TRI_InitVector(&shapes, sizeof(TRI_shape_pid_t));
// ...........................................................................
// Determine the shape ids for the attributes
// ...........................................................................
for (size_t j = 0; j < attributes->_length; ++j) {
char* shapeString = *((char**)(TRI_AtVector(attributes,j)));
TRI_shape_pid_t shape = shaper->findAttributePathByName(shaper, shapeString);
TRI_PushBackVector(&shapes,&shape);
char* shapeString;
TRI_shape_pid_t shape;
shapeString = *((char**)(TRI_AtVector(attributes,j)));
shape = shaper->findAttributePathByName(shaper, shapeString);
TRI_PushBackVector(&shapes, &shape);
}
// ...........................................................................
// Attempt to find an existing index which matches the attributes above.
// If a suitable index is found, return that one otherwise we need to create
// a new one.
// ...........................................................................
idx = TRI_LookupHashIndexSimCollection(collection, &shapes);
if (idx != NULL) {
TRI_DestroyVector(&shapes);
LOG_TRACE("hash-index already created");
return idx;
}
// ...........................................................................
// Create the hash index
// ...........................................................................
idx = TRI_CreateHashIndex(&collection->base,&shapes, unique);
idx = TRI_CreateHashIndex(&collection->base, &shapes, unique);
// ...........................................................................
// release memory allocated to vector
// ...........................................................................
TRI_DestroyVector(&shapes);
// ...........................................................................
// If index id given, use it otherwise use the default.
// ...........................................................................
if (iid) {
idx->_iid = iid;
}
// ...........................................................................
// initialises the index with all existing documents
// ...........................................................................
FillIndex(collection, idx);
ok = FillIndex(collection, idx);
if (! ok) {
TRI_FreeHashIndex(idx);
return NULL;
}
// ...........................................................................
// store index
// store index and return
// ...........................................................................
TRI_PushBackVectorPointer(&collection->_indexes, idx);
// ...........................................................................
// release memory allocated to vector
// ...........................................................................
TRI_DestroyVector(&shapes);
return idx;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief adds a skiplist index to the collection
////////////////////////////////////////////////////////////////////////////////
@ -2837,8 +2830,8 @@ TRI_idx_iid_t TRI_EnsureHashIndexSimCollection(TRI_sim_collection_t* collection,
// .............................................................................
// within write-lock the collection
// .............................................................................
TRI_WriteLockReadWriteLock(&collection->_lock);
TRI_WriteLockReadWriteLock(&collection->_lock);
// .............................................................................
// Given the list of attributes (as strings)
@ -2868,7 +2861,6 @@ TRI_idx_iid_t TRI_EnsureHashIndexSimCollection(TRI_sim_collection_t* collection,
return ok ? idx->_iid : 0;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief ensures that a skiplist index exists
////////////////////////////////////////////////////////////////////////////////

View File

@ -194,7 +194,7 @@ var console = require("console");
/// Note that the url for "user" actions is automatically prefixed
/// with @LIT{_action}. This applies to all specified contexts. For example, if
/// the context contains "admin" and "user" and the url is @LIT{hallo}, then the
/// action is accessible under @{/_action/hallo} - even for the admin context.
/// action is accessible under @LIT{/_action/hallo} - even for the admin context.
///
/// @FA{options.callback}(@FA{request}, @FA{response})
///
@ -347,7 +347,7 @@ function defineHttp (options) {
///
/// The functions defines a response. @FA{code} is the status code to
/// return. @FA{result} is the result object, which will be returned as JSON
/// object in the body. @{headers} is an array of headers to returned.
/// object in the body. @LIT{headers} is an array of headers to returned.
////////////////////////////////////////////////////////////////////////////////
function actionResult (req, res, code, result, headers) {