mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'master' of github.com:triAGENS/ArangoDB into devel
Conflicts: CHANGELOG Makefile.am Makefile.in arangod/V8Server/v8-vocbase.cpp
This commit is contained in:
commit
b0e452f74a
|
@ -1,5 +1,7 @@
|
|||
v1.1.x (XXXX-XX-XX)
|
||||
-------------------
|
||||
v1.0.beta1 (2012-07-29)
|
||||
-----------------------
|
||||
|
||||
* fixed issue #118: We need a collection "debugger"
|
||||
|
||||
* fixed issue #126: Access-Shaper must be cached
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ PROJECT_NAME = "ArangoDB"
|
|||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER = 1.0.alpha3
|
||||
PROJECT_NUMBER = 1.0.beta1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short.
|
||||
|
||||
|
@ -212,7 +212,7 @@ ALIASES += GE{1}="<b>\1</b>"
|
|||
ALIASES += EXAMPLES="<b>Examples</b><br>"
|
||||
ALIASES += EXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}@endlatexonly"
|
||||
ALIASES += TINYEXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}\renewcommand{\examplesize}{\tiny}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}\renewcommand{\examplesize}{\ttfamily}@endlatexonly"
|
||||
ALIASES += VERSION="1.0.alpha3"
|
||||
ALIASES += VERSION="1.0.beta1"
|
||||
ALIASES += EMBEDTOC{1}="@ifnot LATEX<hr>@copydoc \1<hr>@endif"
|
||||
ALIASES += RESTHEADER{2}="@if LATEX@latexonly \vskip 0.5em\colorbox{gray}{@endlatexonly\1 (\2)@latexonly}\vskip 1em@endlatexonly@else <hr><em>\1</em> (\2)<hr><br>@endif"
|
||||
ALIASES += LATEXBREAK="@latexonly\vskip -0.5em\hskip 1.0em@endlatexonly"
|
||||
|
|
|
@ -31,7 +31,7 @@ PROJECT_NAME = "ArangoDB"
|
|||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER = 1.0.alpha3
|
||||
PROJECT_NUMBER = 1.0.beta1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short.
|
||||
|
||||
|
@ -212,7 +212,7 @@ ALIASES += GE{1}="<b>\1</b>"
|
|||
ALIASES += EXAMPLES="<b>Examples</b><br>"
|
||||
ALIASES += EXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}@endlatexonly"
|
||||
ALIASES += TINYEXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}\renewcommand{\examplesize}{\tiny}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}\renewcommand{\examplesize}{\ttfamily}@endlatexonly"
|
||||
ALIASES += VERSION="1.0.alpha3"
|
||||
ALIASES += VERSION="1.0.beta1"
|
||||
ALIASES += EMBEDTOC{1}="@ifnot LATEX<hr>@copydoc \1<hr>@endif"
|
||||
ALIASES += RESTHEADER{2}="@if LATEX@latexonly \vskip 0.5em\colorbox{gray}{@endlatexonly\1 (\2)@latexonly}\vskip 1em@endlatexonly@else <hr><em>\1</em> (\2)<hr><br>@endif"
|
||||
ALIASES += LATEXBREAK="@latexonly\vskip -0.5em\hskip 1.0em@endlatexonly"
|
||||
|
|
|
@ -31,7 +31,7 @@ PROJECT_NAME = "ArangoDB"
|
|||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER = 1.0.alpha3
|
||||
PROJECT_NUMBER = 1.0.beta1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short.
|
||||
|
||||
|
@ -212,7 +212,7 @@ ALIASES += GE{1}="<b>\1</b>"
|
|||
ALIASES += EXAMPLES="<b>Examples</b><br>"
|
||||
ALIASES += EXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}@endlatexonly"
|
||||
ALIASES += TINYEXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}\renewcommand{\examplesize}{\tiny}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}\renewcommand{\examplesize}{\ttfamily}@endlatexonly"
|
||||
ALIASES += VERSION="1.0.alpha3"
|
||||
ALIASES += VERSION="1.0.beta1"
|
||||
ALIASES += EMBEDTOC{1}="@ifnot LATEX<hr>@copydoc \1<hr>@endif"
|
||||
ALIASES += RESTHEADER{2}="@if LATEX@latexonly \vskip 0.5em\colorbox{gray}{@endlatexonly\1 (\2)@latexonly}\vskip 1em@endlatexonly@else <hr><em>\1</em> (\2)<hr><br>@endif"
|
||||
ALIASES += LATEXBREAK="@latexonly\vskip -0.5em\hskip 1.0em@endlatexonly"
|
||||
|
|
|
@ -31,7 +31,7 @@ PROJECT_NAME = "ArangoDB"
|
|||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER = 1.0.alpha3
|
||||
PROJECT_NUMBER = 1.0.beta1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short.
|
||||
|
||||
|
@ -212,7 +212,7 @@ ALIASES += GE{1}="<b>\1</b>"
|
|||
ALIASES += EXAMPLES="<b>Examples</b><br>"
|
||||
ALIASES += EXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}@endlatexonly"
|
||||
ALIASES += TINYEXAMPLE{2}="@latexonly\renewcommand{\examplecap}{\2}\setboolean{hascap}{true}\renewcommand{\examplesize}{\tiny}@endlatexonly@verbinclude \1@latexonly\setboolean{hascap}{false}\renewcommand{\examplesize}{\ttfamily}@endlatexonly"
|
||||
ALIASES += VERSION="1.0.alpha3"
|
||||
ALIASES += VERSION="1.0.beta1"
|
||||
ALIASES += EMBEDTOC{1}="@ifnot LATEX<hr>@copydoc \1<hr>@endif"
|
||||
ALIASES += RESTHEADER{2}="@if LATEX@latexonly \vskip 0.5em\colorbox{gray}{@endlatexonly\1 (\2)@latexonly}\vskip 1em@endlatexonly@else <hr><em>\1</em> (\2)<hr><br>@endif"
|
||||
ALIASES += LATEXBREAK="@latexonly\vskip -0.5em\hskip 1.0em@endlatexonly"
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
[ -f /etc/default/rcS ] && . /etc/default/rcS
|
||||
PATH=/bin:/usr/bin:/sbin:/usr/sbin
|
||||
DAEMON=/usr/sbin/arangod
|
||||
DESC="session voc server"
|
||||
NAME="arangos"
|
||||
DESC="ArangoDB"
|
||||
NAME="arangod"
|
||||
PIDDIR=/var/run/arangodb
|
||||
PIDFILE=${PIDDIR}/arangod.pid
|
||||
CONF=/etc/arangodb/arangod.conf
|
||||
|
|
32
Makefile.am
32
Makefile.am
|
@ -176,7 +176,8 @@ noinst_LIBRARIES = \
|
|||
|
||||
sbin_PROGRAMS = \
|
||||
bin/arangod \
|
||||
bin/arango-password
|
||||
bin/arango-password \
|
||||
bin/arango-dfdb
|
||||
|
||||
# exclude arango-password from sources list
|
||||
bin_arango_password_SOURCES =
|
||||
|
@ -260,7 +261,7 @@ endif
|
|||
if ENABLE_INSTALL_DBDIR
|
||||
|
||||
install-data-local:
|
||||
test -d @localstatedir@/${PACKAGE_TARNAME} || mkdir -p @localstatedir@/${PACKAGE_TARNAME}
|
||||
test -d $(DESTDIR)$(TRI_DATABASE_DIR) || mkdir -p $(DESTDIR)$(TRI_DATABASE_DIR)
|
||||
|
||||
endif
|
||||
|
||||
|
@ -343,8 +344,8 @@ bin/arango-password: bin/arango-password.in
|
|||
if ENABLE_RELATIVE_DEVEL
|
||||
sed \
|
||||
-e 's%@SBINDIR@%bin%g' \
|
||||
-e 's%@DATABASE@%${databasedir}%g' \
|
||||
-e 's%@STATICFILES@%${pkgdatadir}%g' \
|
||||
-e 's%@DATABASE@%${TRI_DATABASE_DIR}%g' \
|
||||
-e 's%@STATICFILES@%${TRI_PKGDATA_DIR}%g' \
|
||||
$< > $@
|
||||
chmod 755 $@
|
||||
else
|
||||
|
@ -358,6 +359,29 @@ endif
|
|||
|
||||
CLEANUP += bin/arango-password
|
||||
|
||||
################################################################################
|
||||
### @brief arango datafile debugger
|
||||
################################################################################
|
||||
|
||||
bin/arango-dfdb: bin/arango-dfdb.in
|
||||
if ENABLE_RELATIVE_DEVEL
|
||||
sed \
|
||||
-e 's%@SBINDIR@%bin%g' \
|
||||
-e 's%@DATABASE@%${TRI_DATABASE_DIR}%g' \
|
||||
-e 's%@STATICFILES@%${TRI_PKGDATA_DIR}%g' \
|
||||
$< > $@
|
||||
chmod 755 $@
|
||||
else
|
||||
sed \
|
||||
-e 's%@SBINDIR@%${TRI_SBIN_DIR}%g' \
|
||||
-e 's%@DATABASE@%${TRI_DATABASE_DIR}%g' \
|
||||
-e 's%@STATICFILES@%${TRI_PKGDATA_DIR}%g' \
|
||||
$< > $@
|
||||
chmod 755 $@
|
||||
endif
|
||||
|
||||
CLEANUP += bin/arango-dfdb
|
||||
|
||||
################################################################################
|
||||
## --SECTION-- DEPENDENCIES
|
||||
################################################################################
|
||||
|
|
|
@ -79,6 +79,7 @@ WIKI = \
|
|||
DbaManual \
|
||||
DbaManualBasics \
|
||||
DbaManualAuthentication \
|
||||
DbaManualDatafileDebugger \
|
||||
DefineAction \
|
||||
ExamplesSetup \
|
||||
FirstStepsArangoDB \
|
||||
|
|
64
Makefile.in
64
Makefile.in
|
@ -221,7 +221,8 @@ target_triplet = @target@
|
|||
################################################################################
|
||||
@ENABLE_MRUBY_TRUE@am__append_5 = @MRUBY_CPPFLAGS@ @MRUBY_LDFLAGS@
|
||||
@ENABLE_MRUBY_TRUE@am__append_6 = @MRUBY_LIBS@
|
||||
sbin_PROGRAMS = bin/arangod$(EXEEXT) bin/arango-password$(EXEEXT)
|
||||
sbin_PROGRAMS = bin/arangod$(EXEEXT) bin/arango-password$(EXEEXT) \
|
||||
bin/arango-dfdb$(EXEEXT)
|
||||
bin_PROGRAMS = bin/arangosh$(EXEEXT) bin/arangoimp$(EXEEXT) \
|
||||
$(am__EXEEXT_1)
|
||||
noinst_PROGRAMS = $(am__EXEEXT_2) $(am__EXEEXT_3)
|
||||
|
@ -613,6 +614,9 @@ am__UnitTests_geo_suite_SOURCES_DIST = UnitTests/Cambridge/Runner.cpp \
|
|||
@ENABLE_BOOST_TEST_TRUE@ UnitTests/Cambridge/UnitTests_geo_suite-georeg.$(OBJEXT) \
|
||||
@ENABLE_BOOST_TEST_TRUE@ arangod/GeoIndex/UnitTests_geo_suite-GeoIndex.$(OBJEXT)
|
||||
UnitTests_geo_suite_OBJECTS = $(am_UnitTests_geo_suite_OBJECTS)
|
||||
bin_arango_dfdb_SOURCES = bin/arango-dfdb.c
|
||||
bin_arango_dfdb_OBJECTS = bin/arango-dfdb.$(OBJEXT)
|
||||
bin_arango_dfdb_LDADD = $(LDADD)
|
||||
am_bin_arango_password_OBJECTS =
|
||||
bin_arango_password_OBJECTS = $(am_bin_arango_password_OBJECTS)
|
||||
bin_arango_password_LDADD = $(LDADD)
|
||||
|
@ -817,17 +821,18 @@ SOURCES = $(lib_libarango_a_SOURCES) $(lib_libarango_client_a_SOURCES) \
|
|||
$(lib_libarango_fe_a_SOURCES) $(lib_libarango_mruby_a_SOURCES) \
|
||||
$(lib_libarango_v8_a_SOURCES) \
|
||||
$(UnitTests_basics_suite_SOURCES) \
|
||||
$(UnitTests_geo_suite_SOURCES) $(bin_arango_password_SOURCES) \
|
||||
$(bin_arangod_SOURCES) $(bin_arangoimp_SOURCES) \
|
||||
$(bin_arangoirb_SOURCES) $(bin_arangosh_SOURCES) \
|
||||
$(bin_zclient_SOURCES) $(bin_zserver_SOURCES)
|
||||
$(UnitTests_geo_suite_SOURCES) bin/arango-dfdb.c \
|
||||
$(bin_arango_password_SOURCES) $(bin_arangod_SOURCES) \
|
||||
$(bin_arangoimp_SOURCES) $(bin_arangoirb_SOURCES) \
|
||||
$(bin_arangosh_SOURCES) $(bin_zclient_SOURCES) \
|
||||
$(bin_zserver_SOURCES)
|
||||
DIST_SOURCES = $(am__lib_libarango_a_SOURCES_DIST) \
|
||||
$(lib_libarango_client_a_SOURCES) \
|
||||
$(lib_libarango_fe_a_SOURCES) \
|
||||
$(am__lib_libarango_mruby_a_SOURCES_DIST) \
|
||||
$(lib_libarango_v8_a_SOURCES) \
|
||||
$(am__UnitTests_basics_suite_SOURCES_DIST) \
|
||||
$(am__UnitTests_geo_suite_SOURCES_DIST) \
|
||||
$(am__UnitTests_geo_suite_SOURCES_DIST) bin/arango-dfdb.c \
|
||||
$(bin_arango_password_SOURCES) $(am__bin_arangod_SOURCES_DIST) \
|
||||
$(bin_arangoimp_SOURCES) $(am__bin_arangoirb_SOURCES_DIST) \
|
||||
$(bin_arangosh_SOURCES) $(bin_zclient_SOURCES) \
|
||||
|
@ -1053,11 +1058,11 @@ BUILT_SOURCES = build.h Installation/arangod.conf $(JAVASCRIPT_HEADER) \
|
|||
################################################################################
|
||||
### @brief cleanup
|
||||
################################################################################
|
||||
CLEANUP = bin/arango-password $(am__append_18) $(DOXYGEN) $(addsuffix \
|
||||
.md,$(addprefix Doxygen/xml/,$(WIKI))) $(addsuffix \
|
||||
.md,$(addprefix Doxygen/wiki/,$(WIKI))) $(JAVASCRIPT_HEADER) \
|
||||
.setup-js-directories $(am__append_20) $(am__append_22) \
|
||||
$(PROTOBUF_FILES)
|
||||
CLEANUP = bin/arango-password bin/arango-dfdb $(am__append_18) \
|
||||
$(DOXYGEN) $(addsuffix .md,$(addprefix Doxygen/xml/,$(WIKI))) \
|
||||
$(addsuffix .md,$(addprefix Doxygen/wiki/,$(WIKI))) \
|
||||
$(JAVASCRIPT_HEADER) .setup-js-directories $(am__append_20) \
|
||||
$(am__append_22) $(PROTOBUF_FILES)
|
||||
|
||||
################################################################################
|
||||
### @brief flex files
|
||||
|
@ -1238,6 +1243,7 @@ WIKI = \
|
|||
DbaManual \
|
||||
DbaManualBasics \
|
||||
DbaManualAuthentication \
|
||||
DbaManualDatafileDebugger \
|
||||
DefineAction \
|
||||
ExamplesSetup \
|
||||
FirstStepsArangoDB \
|
||||
|
@ -2451,6 +2457,11 @@ UnitTests/geo_suite$(EXEEXT): $(UnitTests_geo_suite_OBJECTS) $(UnitTests_geo_sui
|
|||
bin/$(am__dirstamp):
|
||||
@$(MKDIR_P) bin
|
||||
@: > bin/$(am__dirstamp)
|
||||
bin/$(DEPDIR)/$(am__dirstamp):
|
||||
@$(MKDIR_P) bin/$(DEPDIR)
|
||||
@: > bin/$(DEPDIR)/$(am__dirstamp)
|
||||
bin/arango-dfdb.$(OBJEXT): bin/$(am__dirstamp) \
|
||||
bin/$(DEPDIR)/$(am__dirstamp)
|
||||
arangod/Actions/$(am__dirstamp):
|
||||
@$(MKDIR_P) arangod/Actions
|
||||
@: > arangod/Actions/$(am__dirstamp)
|
||||
|
@ -2901,6 +2912,7 @@ mostlyclean-compile:
|
|||
-rm -f arangosh/V8Client/bin_arangosh-ImportHelper.$(OBJEXT)
|
||||
-rm -f arangosh/V8Client/bin_arangosh-V8ClientConnection.$(OBJEXT)
|
||||
-rm -f arangosh/V8Client/bin_arangosh-arangosh.$(OBJEXT)
|
||||
-rm -f bin/arango-dfdb.$(OBJEXT)
|
||||
-rm -f lib/Admin/ApplicationAdminServer.$(OBJEXT)
|
||||
-rm -f lib/Admin/RestAdminBaseHandler.$(OBJEXT)
|
||||
-rm -f lib/Admin/RestAdminFeConfigurationHandler.$(OBJEXT)
|
||||
|
@ -3164,6 +3176,7 @@ distclean-compile:
|
|||
@AMDEP_TRUE@@am__include@ @am__quote@arangosh/V8Client/$(DEPDIR)/bin_arangosh-ImportHelper.Po@am__quote@
|
||||
@AMDEP_TRUE@@am__include@ @am__quote@arangosh/V8Client/$(DEPDIR)/bin_arangosh-V8ClientConnection.Po@am__quote@
|
||||
@AMDEP_TRUE@@am__include@ @am__quote@arangosh/V8Client/$(DEPDIR)/bin_arangosh-arangosh.Po@am__quote@
|
||||
@AMDEP_TRUE@@am__include@ @am__quote@bin/$(DEPDIR)/arango-dfdb.Po@am__quote@
|
||||
@AMDEP_TRUE@@am__include@ @am__quote@lib/Admin/$(DEPDIR)/ApplicationAdminServer.Po@am__quote@
|
||||
@AMDEP_TRUE@@am__include@ @am__quote@lib/Admin/$(DEPDIR)/RestAdminBaseHandler.Po@am__quote@
|
||||
@AMDEP_TRUE@@am__include@ @am__quote@lib/Admin/$(DEPDIR)/RestAdminFeConfigurationHandler.Po@am__quote@
|
||||
|
@ -5007,6 +5020,7 @@ distclean-generic:
|
|||
-rm -f arangoirb/MRClient/$(am__dirstamp)
|
||||
-rm -f arangosh/V8Client/$(DEPDIR)/$(am__dirstamp)
|
||||
-rm -f arangosh/V8Client/$(am__dirstamp)
|
||||
-rm -f bin/$(DEPDIR)/$(am__dirstamp)
|
||||
-rm -f bin/$(am__dirstamp)
|
||||
-rm -f lib/$(am__dirstamp)
|
||||
-rm -f lib/Admin/$(DEPDIR)/$(am__dirstamp)
|
||||
|
@ -5069,7 +5083,7 @@ clean-am: clean-binPROGRAMS clean-generic clean-local \
|
|||
|
||||
distclean: distclean-am
|
||||
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
|
||||
-rm -rf PerfTests/$(DEPDIR) UnitTests/$(DEPDIR) UnitTests/Cambridge/$(DEPDIR) UnitTests/Jutland/$(DEPDIR) UnitTests/Philadelphia/$(DEPDIR) arangod/Actions/$(DEPDIR) arangod/Ahuacatl/$(DEPDIR) arangod/BitIndexes/$(DEPDIR) arangod/GeoIndex/$(DEPDIR) arangod/HashIndex/$(DEPDIR) arangod/IndexIterators/$(DEPDIR) arangod/IndexOperators/$(DEPDIR) arangod/MRServer/$(DEPDIR) arangod/PriorityQueue/$(DEPDIR) arangod/RestHandler/$(DEPDIR) arangod/RestServer/$(DEPDIR) arangod/SkipLists/$(DEPDIR) arangod/V8Server/$(DEPDIR) arangod/VocBase/$(DEPDIR) arangoirb/MRClient/$(DEPDIR) arangosh/V8Client/$(DEPDIR) lib/Admin/$(DEPDIR) lib/ApplicationServer/$(DEPDIR) lib/Basics/$(DEPDIR) lib/BasicsC/$(DEPDIR) lib/Dispatcher/$(DEPDIR) lib/HttpServer/$(DEPDIR) lib/JsonParser/$(DEPDIR) lib/JsonParserX/$(DEPDIR) lib/Logger/$(DEPDIR) lib/MRuby/$(DEPDIR) lib/ProgramOptions/$(DEPDIR) lib/ProtocolBuffers/$(DEPDIR) lib/Rest/$(DEPDIR) lib/ResultGenerator/$(DEPDIR) lib/Scheduler/$(DEPDIR) lib/ShapedJson/$(DEPDIR) lib/SimpleHttpClient/$(DEPDIR) lib/Statistics/$(DEPDIR) lib/UserManager/$(DEPDIR) lib/Utilities/$(DEPDIR) lib/V8/$(DEPDIR) lib/Variant/$(DEPDIR) lib/ZeroMQ/$(DEPDIR)
|
||||
-rm -rf PerfTests/$(DEPDIR) UnitTests/$(DEPDIR) UnitTests/Cambridge/$(DEPDIR) UnitTests/Jutland/$(DEPDIR) UnitTests/Philadelphia/$(DEPDIR) arangod/Actions/$(DEPDIR) arangod/Ahuacatl/$(DEPDIR) arangod/BitIndexes/$(DEPDIR) arangod/GeoIndex/$(DEPDIR) arangod/HashIndex/$(DEPDIR) arangod/IndexIterators/$(DEPDIR) arangod/IndexOperators/$(DEPDIR) arangod/MRServer/$(DEPDIR) arangod/PriorityQueue/$(DEPDIR) arangod/RestHandler/$(DEPDIR) arangod/RestServer/$(DEPDIR) arangod/SkipLists/$(DEPDIR) arangod/V8Server/$(DEPDIR) arangod/VocBase/$(DEPDIR) arangoirb/MRClient/$(DEPDIR) arangosh/V8Client/$(DEPDIR) bin/$(DEPDIR) lib/Admin/$(DEPDIR) lib/ApplicationServer/$(DEPDIR) lib/Basics/$(DEPDIR) lib/BasicsC/$(DEPDIR) lib/Dispatcher/$(DEPDIR) lib/HttpServer/$(DEPDIR) lib/JsonParser/$(DEPDIR) lib/JsonParserX/$(DEPDIR) lib/Logger/$(DEPDIR) lib/MRuby/$(DEPDIR) lib/ProgramOptions/$(DEPDIR) lib/ProtocolBuffers/$(DEPDIR) lib/Rest/$(DEPDIR) lib/ResultGenerator/$(DEPDIR) lib/Scheduler/$(DEPDIR) lib/ShapedJson/$(DEPDIR) lib/SimpleHttpClient/$(DEPDIR) lib/Statistics/$(DEPDIR) lib/UserManager/$(DEPDIR) lib/Utilities/$(DEPDIR) lib/V8/$(DEPDIR) lib/Variant/$(DEPDIR) lib/ZeroMQ/$(DEPDIR)
|
||||
-rm -f Makefile
|
||||
distclean-am: clean-am distclean-compile distclean-generic \
|
||||
distclean-hdr distclean-tags
|
||||
|
@ -5118,7 +5132,7 @@ installcheck-am:
|
|||
maintainer-clean: maintainer-clean-am
|
||||
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
|
||||
-rm -rf $(top_srcdir)/autom4te.cache
|
||||
-rm -rf PerfTests/$(DEPDIR) UnitTests/$(DEPDIR) UnitTests/Cambridge/$(DEPDIR) UnitTests/Jutland/$(DEPDIR) UnitTests/Philadelphia/$(DEPDIR) arangod/Actions/$(DEPDIR) arangod/Ahuacatl/$(DEPDIR) arangod/BitIndexes/$(DEPDIR) arangod/GeoIndex/$(DEPDIR) arangod/HashIndex/$(DEPDIR) arangod/IndexIterators/$(DEPDIR) arangod/IndexOperators/$(DEPDIR) arangod/MRServer/$(DEPDIR) arangod/PriorityQueue/$(DEPDIR) arangod/RestHandler/$(DEPDIR) arangod/RestServer/$(DEPDIR) arangod/SkipLists/$(DEPDIR) arangod/V8Server/$(DEPDIR) arangod/VocBase/$(DEPDIR) arangoirb/MRClient/$(DEPDIR) arangosh/V8Client/$(DEPDIR) lib/Admin/$(DEPDIR) lib/ApplicationServer/$(DEPDIR) lib/Basics/$(DEPDIR) lib/BasicsC/$(DEPDIR) lib/Dispatcher/$(DEPDIR) lib/HttpServer/$(DEPDIR) lib/JsonParser/$(DEPDIR) lib/JsonParserX/$(DEPDIR) lib/Logger/$(DEPDIR) lib/MRuby/$(DEPDIR) lib/ProgramOptions/$(DEPDIR) lib/ProtocolBuffers/$(DEPDIR) lib/Rest/$(DEPDIR) lib/ResultGenerator/$(DEPDIR) lib/Scheduler/$(DEPDIR) lib/ShapedJson/$(DEPDIR) lib/SimpleHttpClient/$(DEPDIR) lib/Statistics/$(DEPDIR) lib/UserManager/$(DEPDIR) lib/Utilities/$(DEPDIR) lib/V8/$(DEPDIR) lib/Variant/$(DEPDIR) lib/ZeroMQ/$(DEPDIR)
|
||||
-rm -rf PerfTests/$(DEPDIR) UnitTests/$(DEPDIR) UnitTests/Cambridge/$(DEPDIR) UnitTests/Jutland/$(DEPDIR) UnitTests/Philadelphia/$(DEPDIR) arangod/Actions/$(DEPDIR) arangod/Ahuacatl/$(DEPDIR) arangod/BitIndexes/$(DEPDIR) arangod/GeoIndex/$(DEPDIR) arangod/HashIndex/$(DEPDIR) arangod/IndexIterators/$(DEPDIR) arangod/IndexOperators/$(DEPDIR) arangod/MRServer/$(DEPDIR) arangod/PriorityQueue/$(DEPDIR) arangod/RestHandler/$(DEPDIR) arangod/RestServer/$(DEPDIR) arangod/SkipLists/$(DEPDIR) arangod/V8Server/$(DEPDIR) arangod/VocBase/$(DEPDIR) arangoirb/MRClient/$(DEPDIR) arangosh/V8Client/$(DEPDIR) bin/$(DEPDIR) lib/Admin/$(DEPDIR) lib/ApplicationServer/$(DEPDIR) lib/Basics/$(DEPDIR) lib/BasicsC/$(DEPDIR) lib/Dispatcher/$(DEPDIR) lib/HttpServer/$(DEPDIR) lib/JsonParser/$(DEPDIR) lib/JsonParserX/$(DEPDIR) lib/Logger/$(DEPDIR) lib/MRuby/$(DEPDIR) lib/ProgramOptions/$(DEPDIR) lib/ProtocolBuffers/$(DEPDIR) lib/Rest/$(DEPDIR) lib/ResultGenerator/$(DEPDIR) lib/Scheduler/$(DEPDIR) lib/ShapedJson/$(DEPDIR) lib/SimpleHttpClient/$(DEPDIR) lib/Statistics/$(DEPDIR) lib/UserManager/$(DEPDIR) lib/Utilities/$(DEPDIR) lib/V8/$(DEPDIR) lib/Variant/$(DEPDIR) lib/ZeroMQ/$(DEPDIR)
|
||||
-rm -f Makefile
|
||||
maintainer-clean-am: distclean-am maintainer-clean-generic
|
||||
|
||||
|
@ -5172,7 +5186,7 @@ Installation/arangod.conf: Installation/arangod.conf.in Makefile
|
|||
################################################################################
|
||||
|
||||
@ENABLE_INSTALL_DBDIR_TRUE@install-data-local:
|
||||
@ENABLE_INSTALL_DBDIR_TRUE@ test -d @localstatedir@/${PACKAGE_TARNAME} || mkdir -p @localstatedir@/${PACKAGE_TARNAME}
|
||||
@ENABLE_INSTALL_DBDIR_TRUE@ test -d $(DESTDIR)$(TRI_DATABASE_DIR) || mkdir -p $(DESTDIR)$(TRI_DATABASE_DIR)
|
||||
|
||||
################################################################################
|
||||
################################################################################
|
||||
|
@ -5467,8 +5481,26 @@ built-sources: build.h errorfiles $(JAVASCRIPT_HEADER)
|
|||
bin/arango-password: bin/arango-password.in
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ sed \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@SBINDIR@%bin%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@DATABASE@%${databasedir}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@STATICFILES@%${pkgdatadir}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@DATABASE@%${TRI_DATABASE_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@STATICFILES@%${TRI_PKGDATA_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ $< > $@
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ chmod 755 $@
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ sed \
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ -e 's%@SBINDIR@%${TRI_SBIN_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ -e 's%@DATABASE@%${TRI_DATABASE_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ -e 's%@STATICFILES@%${TRI_PKGDATA_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ $< > $@
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ chmod 755 $@
|
||||
|
||||
################################################################################
|
||||
### @brief arango datafile debugger
|
||||
################################################################################
|
||||
|
||||
bin/arango-dfdb: bin/arango-dfdb.in
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ sed \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@SBINDIR@%bin%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@DATABASE@%${TRI_DATABASE_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ -e 's%@STATICFILES@%${TRI_PKGDATA_DIR}%g' \
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ $< > $@
|
||||
@ENABLE_RELATIVE_DEVEL_TRUE@ chmod 755 $@
|
||||
@ENABLE_RELATIVE_DEVEL_FALSE@ sed \
|
||||
|
|
|
@ -25,12 +25,18 @@
|
|||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- DBA MANUAL
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page DbaManual ArangoDB's DBA Manual
|
||||
///
|
||||
/// @if LATEX
|
||||
/// <ul>
|
||||
/// <li>@ref DbaManualBasics</li>
|
||||
/// <li>@ref DbaManualAuthentication</li>
|
||||
/// <li>@ref DbaManualDatafileDebugger</li>
|
||||
/// <li>@ref ShellCollection</li>
|
||||
/// <li>@ref ShellIndex</li>
|
||||
/// <li>@ref IndexCap</li>
|
||||
|
@ -43,13 +49,18 @@
|
|||
/// </ul>
|
||||
/// @else
|
||||
/// @copydetails DbaManualBasicsTOC
|
||||
/// @copydetails DbaManualAuthenticationTOC
|
||||
/// @copydetails DbaManualDatafileDebuggerTOC
|
||||
/// @copydetails ShellCollectionTOC
|
||||
/// @copydetails ShellIndexTOC
|
||||
/// @copydetails IndexesTOC
|
||||
/// @copydetails DbaManualAuthenticationTOC
|
||||
/// @endif
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- DBA MANUAL BASCIS
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page DbaManualBasicsTOC
|
||||
///
|
||||
|
@ -96,6 +107,10 @@
|
|||
/// @copydetails JS_PropertiesVocbaseCol
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- DBA MANUAL AUTHENTICATION
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page DbaManualAuthenticationTOC
|
||||
///
|
||||
|
@ -130,6 +145,43 @@
|
|||
/// @section DbaManualAuthenticationCommandLine Command-Line Options for the Authentication and Authorisation
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- DBA MANUAL DATAFILE DEBUGGER
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page DbaManualDatafileDebuggerTOC
|
||||
///
|
||||
/// <ul>
|
||||
/// <li>@ref DbaManualDatafileDebugger
|
||||
/// </li>
|
||||
/// </ul>
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page DbaManualDatafileDebugger Datafile Debugger
|
||||
///
|
||||
/// @EMBEDTOC{DbaManualDatafileDebuggerTOC}
|
||||
///
|
||||
/// AranagoDB uses append-only journals. The corruption should only occur when
|
||||
/// the database server is kill. In this case, the corruption should only
|
||||
/// occur in the last object(s) being written to the journal.
|
||||
///
|
||||
/// If a corruption occurs within a normal datafile, then this can only happen
|
||||
/// if a hardware fault occurred.
|
||||
///
|
||||
/// If a journal or datafile is corrupt, shutdown the database server and start
|
||||
/// the program
|
||||
///
|
||||
/// @LIT{arango-dfdb}
|
||||
///
|
||||
/// in order to check the consistency of the datafiles and journals.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- END-OF-FILE
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// Local Variables:
|
||||
// mode: c++
|
||||
// mode: outline-minor
|
||||
|
|
|
@ -76,7 +76,16 @@
|
|||
////////////////////////
|
||||
///
|
||||
/// @subsection HomeCLINQ C\# / LINQ
|
||||
///
|
||||
/// @subsection HomeNodeJS Node.JS
|
||||
///
|
||||
/// There is a Node.JS client from Anders Elo available in a separate project:
|
||||
/// https://github.com/kaerus/arango-client
|
||||
///
|
||||
/// @subsection HomePerl Perl
|
||||
///
|
||||
/// @@hide_o_55 is currently developing a Perl client.
|
||||
///
|
||||
/// @subsection HomePHP PHP
|
||||
///
|
||||
/// There is a PHP client available in a separate project:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief installation guide
|
||||
///
|
||||
//
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
|
@ -218,6 +218,7 @@
|
|||
/// <ul>
|
||||
/// <li>@ref Compiling
|
||||
/// <ul>
|
||||
/// <li>@ref CompilingAmazonMicroInstance</li>
|
||||
/// <li>@ref CompilingAIO
|
||||
/// <ul>
|
||||
/// <li>@ref CompilingAIOPrerequisites
|
||||
|
@ -278,6 +279,19 @@
|
|||
///
|
||||
/// @EMBEDTOC{CompilingTOC}
|
||||
///
|
||||
/// @section CompilingAmazonMicroInstance Amazon Micro Instance
|
||||
///////////////////////////////////////////////////////////////
|
||||
///
|
||||
/// \@sohgoh has reported that it is very easy to install ArangoDB on a
|
||||
/// Amazon Micro Instance:
|
||||
///
|
||||
/// @code
|
||||
/// > sudo yum install readline-devel
|
||||
/// > ./configure
|
||||
/// > make
|
||||
/// > make install
|
||||
/// @endcode
|
||||
///
|
||||
/// @section CompilingAIO All-In-One Version
|
||||
////////////////////////////////////////////
|
||||
///
|
||||
|
|
|
@ -29,11 +29,12 @@
|
|||
|
||||
#include "v8-query.h"
|
||||
|
||||
#include "BasicsC/logging.h"
|
||||
#include "HashIndex/hashindex.h"
|
||||
#include "SkipLists/skiplistIndex.h"
|
||||
#include "V8/v8-conv.h"
|
||||
#include "V8/v8-utils.h"
|
||||
#include "V8Server/v8-vocbase.h"
|
||||
#include "BasicsC/logging.h"
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- HELPER FUNCTIONS
|
||||
|
|
|
@ -44,6 +44,7 @@
|
|||
#include "V8/v8-execution.h"
|
||||
#include "V8/v8-utils.h"
|
||||
#include "V8Server/v8-objects.h"
|
||||
#include "VocBase/datafile.h"
|
||||
#include "VocBase/general-cursor.h"
|
||||
#include "VocBase/simple-collection.h"
|
||||
#include "VocBase/voc-shaper.h"
|
||||
|
@ -1962,6 +1963,89 @@ static v8::Handle<v8::Value> JS_ParseAhuacatl (v8::Arguments const& argv) {
|
|||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- TRI_DATAFILE_T FUNCTIONS
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- javascript functions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup VocBase
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns information about the datafiles
|
||||
///
|
||||
/// @FUN{@FA{collection}.datafileScan(@FA{path})}
|
||||
///
|
||||
/// Returns information about the datafiles. The collection must be unloaded.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_DatafileScanVocbaseCol (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
TRI_vocbase_col_t* collection = TRI_UnwrapClass<TRI_vocbase_col_t>(argv.Holder(), WRP_VOCBASE_COL_TYPE);
|
||||
|
||||
if (collection == 0) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("illegal collection pointer")));
|
||||
}
|
||||
|
||||
if (argv.Length() != 1) {
|
||||
TRI_ReleaseCollection(collection);
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(TRI_ERROR_ILLEGAL_OPTION, "usage: datafileScan()")));
|
||||
}
|
||||
|
||||
string path = TRI_ObjectToString(argv[0]);
|
||||
|
||||
TRI_READ_LOCK_STATUS_VOCBASE_COL(collection);
|
||||
|
||||
if (collection->_status != TRI_VOC_COL_STATUS_UNLOADED) {
|
||||
TRI_READ_UNLOCK_STATUS_VOCBASE_COL(collection);
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(TRI_ERROR_ARANGO_COLLECTION_NOT_UNLOADED,
|
||||
"collection must be unloaded")));
|
||||
}
|
||||
|
||||
TRI_df_scan_t scan = TRI_ScanDatafile(path.c_str());
|
||||
|
||||
// build result
|
||||
v8::Handle<v8::Object> result = v8::Object::New();
|
||||
|
||||
result->Set(v8::String::New("currentSize"), v8::Number::New(scan._currentSize));
|
||||
result->Set(v8::String::New("maximalSize"), v8::Number::New(scan._maximalSize));
|
||||
result->Set(v8::String::New("endPosition"), v8::Number::New(scan._endPosition));
|
||||
result->Set(v8::String::New("numberMarkers"), v8::Number::New(scan._numberMarkers));
|
||||
result->Set(v8::String::New("status"), v8::Number::New(scan._status));
|
||||
|
||||
v8::Handle<v8::Array> entries = v8::Array::New();
|
||||
result->Set(v8::String::New("entries"), entries);
|
||||
|
||||
for (size_t i = 0; i < scan._entries._length; ++i) {
|
||||
TRI_df_scan_entry_t* entry = (TRI_df_scan_entry_t*) TRI_AtVector(&scan._entries, i);
|
||||
|
||||
v8::Handle<v8::Object> o = v8::Object::New();
|
||||
|
||||
o->Set(v8::String::New("position"), v8::Number::New(entry->_position));
|
||||
o->Set(v8::String::New("size"), v8::Number::New(entry->_size));
|
||||
o->Set(v8::String::New("tick"), v8::Number::New(entry->_tick));
|
||||
o->Set(v8::String::New("type"), v8::Number::New((int) entry->_type));
|
||||
o->Set(v8::String::New("status"), v8::Number::New((int) entry->_status));
|
||||
|
||||
entries->Set(i, o);
|
||||
}
|
||||
|
||||
TRI_DestroyDatafileScan(&scan);
|
||||
|
||||
TRI_READ_UNLOCK_STATUS_VOCBASE_COL(collection);
|
||||
return scope.Close(result);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- TRI_VOCBASE_COL_T FUNCTIONS
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -2005,46 +2089,66 @@ static v8::Handle<v8::Value> JS_CountVocbaseCol (v8::Arguments const& argv) {
|
|||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief deletes a document
|
||||
/// @brief returns information about the datafiles
|
||||
///
|
||||
/// @FUN{@FA{collection}.remove(@FA{document})}
|
||||
/// @FUN{@FA{collection}.datafiles()}
|
||||
///
|
||||
/// Deletes a document. If there is revision mismatch, then an error is thrown.
|
||||
///
|
||||
/// @FUN{@FA{collection}.remove(@FA{document}, true)}
|
||||
///
|
||||
/// Deletes a document. If there is revision mismatch, then mismatch
|
||||
/// is ignored and document is deleted. The function returns
|
||||
/// @LIT{true} if the document existed and was deleted. It returns
|
||||
/// @LIT{false}, if the document was already deleted.
|
||||
///
|
||||
/// @FUN{@FA{collection}.remove(@FA{document-handle}, @FA{data})}
|
||||
///
|
||||
/// As before. Instead of document a @FA{document-handle} can be passed as
|
||||
/// first argument.
|
||||
///
|
||||
/// @EXAMPLES
|
||||
///
|
||||
/// Delete a document:
|
||||
///
|
||||
/// @TINYEXAMPLE{shell_remove-document,delete a document}
|
||||
///
|
||||
/// Delete a document with a conflict:
|
||||
///
|
||||
/// @TINYEXAMPLE{shell_remove-document-conflict,delete a document}
|
||||
/// Returns information about the datafiles. The collection must be unloaded.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_RemoveVocbaseCol (v8::Arguments const& argv) {
|
||||
static v8::Handle<v8::Value> JS_DatafilesVocbaseCol (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
v8::Handle<v8::Object> err;
|
||||
TRI_vocbase_col_t const* collection = UseCollection(argv.Holder(), &err);
|
||||
TRI_vocbase_col_t* collection = TRI_UnwrapClass<TRI_vocbase_col_t>(argv.Holder(), WRP_VOCBASE_COL_TYPE);
|
||||
|
||||
if (collection == 0) {
|
||||
return scope.Close(v8::ThrowException(err));
|
||||
return scope.Close(v8::ThrowException(v8::String::New("illegal collection pointer")));
|
||||
}
|
||||
|
||||
return DeleteVocbaseCol(collection->_vocbase, collection, argv);
|
||||
TRI_READ_LOCK_STATUS_VOCBASE_COL(collection);
|
||||
|
||||
if (collection->_status != TRI_VOC_COL_STATUS_UNLOADED) {
|
||||
TRI_READ_UNLOCK_STATUS_VOCBASE_COL(collection);
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(TRI_ERROR_ARANGO_COLLECTION_NOT_UNLOADED,
|
||||
"collection must be unloaded")));
|
||||
}
|
||||
|
||||
TRI_col_file_structure_t structure = TRI_FileStructureCollectionDirectory(collection->_path);
|
||||
|
||||
// release lock
|
||||
TRI_READ_UNLOCK_STATUS_VOCBASE_COL(collection);
|
||||
|
||||
// build result
|
||||
v8::Handle<v8::Object> result = v8::Object::New();
|
||||
|
||||
// journals
|
||||
v8::Handle<v8::Array> journals = v8::Array::New();
|
||||
result->Set(v8::String::New("journals"), journals);
|
||||
|
||||
for (size_t i = 0; i < structure._journals._length; ++i) {
|
||||
journals->Set(i, v8::String::New(structure._journals._buffer[i]));
|
||||
}
|
||||
|
||||
// compactors
|
||||
v8::Handle<v8::Array> compactors = v8::Array::New();
|
||||
result->Set(v8::String::New("compactors"), compactors);
|
||||
|
||||
for (size_t i = 0; i < structure._compactors._length; ++i) {
|
||||
compactors->Set(i, v8::String::New(structure._compactors._buffer[i]));
|
||||
}
|
||||
|
||||
// datafiles
|
||||
v8::Handle<v8::Array> datafiles = v8::Array::New();
|
||||
result->Set(v8::String::New("datafiles"), datafiles);
|
||||
|
||||
for (size_t i = 0; i < structure._datafiles._length; ++i) {
|
||||
datafiles->Set(i, v8::String::New(structure._datafiles._buffer[i]));
|
||||
}
|
||||
|
||||
// free result
|
||||
TRI_DestroyFileStructureCollection(&structure);
|
||||
|
||||
return scope.Close(result);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -3172,6 +3276,49 @@ static v8::Handle<v8::Value> JS_PropertiesVocbaseCol (v8::Arguments const& argv)
|
|||
return scope.Close(result);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief deletes a document
|
||||
///
|
||||
/// @FUN{@FA{collection}.remove(@FA{document})}
|
||||
///
|
||||
/// Deletes a document. If there is revision mismatch, then an error is thrown.
|
||||
///
|
||||
/// @FUN{@FA{collection}.remove(@FA{document}, true)}
|
||||
///
|
||||
/// Deletes a document. If there is revision mismatch, then mismatch
|
||||
/// is ignored and document is deleted. The function returns
|
||||
/// @LIT{true} if the document existed and was deleted. It returns
|
||||
/// @LIT{false}, if the document was already deleted.
|
||||
///
|
||||
/// @FUN{@FA{collection}.remove(@FA{document-handle}, @FA{data})}
|
||||
///
|
||||
/// As before. Instead of document a @FA{document-handle} can be passed as
|
||||
/// first argument.
|
||||
///
|
||||
/// @EXAMPLES
|
||||
///
|
||||
/// Delete a document:
|
||||
///
|
||||
/// @TINYEXAMPLE{shell_remove-document,delete a document}
|
||||
///
|
||||
/// Delete a document with a conflict:
|
||||
///
|
||||
/// @TINYEXAMPLE{shell_remove-document-conflict,delete a document}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_RemoveVocbaseCol (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
v8::Handle<v8::Object> err;
|
||||
TRI_vocbase_col_t const* collection = UseCollection(argv.Holder(), &err);
|
||||
|
||||
if (collection == 0) {
|
||||
return scope.Close(v8::ThrowException(err));
|
||||
}
|
||||
|
||||
return DeleteVocbaseCol(collection->_vocbase, collection, argv);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief renames a collection
|
||||
///
|
||||
|
@ -3379,6 +3526,45 @@ static v8::Handle<v8::Value> JS_StatusVocbaseCol (v8::Arguments const& argv) {
|
|||
return scope.Close(v8::Number::New((int) status));
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief truncates a datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_TruncateDatafileVocbaseCol (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
TRI_vocbase_col_t* collection = TRI_UnwrapClass<TRI_vocbase_col_t>(argv.Holder(), WRP_VOCBASE_COL_TYPE);
|
||||
|
||||
if (collection == 0) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("illegal collection pointer")));
|
||||
}
|
||||
|
||||
if (argv.Length() != 2) {
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(TRI_ERROR_ILLEGAL_OPTION, "usage: truncateDatafile(<datafile>, <size>)")));
|
||||
}
|
||||
|
||||
string path = TRI_ObjectToString(argv[0]);
|
||||
size_t size = TRI_ObjectToDouble(argv[1]);
|
||||
|
||||
TRI_READ_LOCK_STATUS_VOCBASE_COL(collection);
|
||||
|
||||
if (collection->_status != TRI_VOC_COL_STATUS_UNLOADED) {
|
||||
TRI_READ_UNLOCK_STATUS_VOCBASE_COL(collection);
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(TRI_ERROR_ARANGO_COLLECTION_NOT_UNLOADED,
|
||||
"collection must be unloaded")));
|
||||
}
|
||||
|
||||
int res = TRI_TruncateDatafile(path.c_str(), size);
|
||||
|
||||
TRI_READ_UNLOCK_STATUS_VOCBASE_COL(collection);
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(res, "cannot truncate datafile")));
|
||||
}
|
||||
|
||||
return scope.Close(v8::Undefined());
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief unloads a collection
|
||||
///
|
||||
|
@ -4126,8 +4312,8 @@ static v8::Handle<v8::Value> MapGetShapedJson (v8::Local<v8::String> name,
|
|||
TRI_shaper_t* shaper = collection->_shaper;
|
||||
TRI_shape_pid_t pid = shaper->findAttributePathByName(shaper, key.c_str());
|
||||
|
||||
TRI_shape_sid_t sid;
|
||||
TRI_EXTRACT_SHAPE_IDENTIFIER_MARKER(sid, marker);
|
||||
// TRI_shape_sid_t sid;
|
||||
// TRI_EXTRACT_SHAPE_IDENTIFIER_MARKER(sid, marker);
|
||||
|
||||
TRI_shaped_json_t document;
|
||||
TRI_EXTRACT_SHAPED_JSON_MARKER(document, marker);
|
||||
|
@ -4333,38 +4519,6 @@ void TRI_ReleaseCollection (TRI_vocbase_col_t const* collection) {
|
|||
TRI_ReleaseCollectionVocBase(collection->_vocbase, const_cast<TRI_vocbase_col_t*>(collection));
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates an error in a javascript object
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
v8::Handle<v8::Object> TRI_CreateErrorObject (int errorNumber, string const& message) {
|
||||
TRI_v8_global_t* v8g;
|
||||
v8::HandleScope scope;
|
||||
|
||||
v8g = (TRI_v8_global_t*) v8::Isolate::GetCurrent()->GetData();
|
||||
|
||||
string msg;
|
||||
if (message.size()) {
|
||||
msg = message;
|
||||
}
|
||||
else {
|
||||
msg = TRI_errno_string(errorNumber) + string(": ") + message;
|
||||
}
|
||||
v8::Handle<v8::String> errorMessage = v8::String::New(msg.c_str());
|
||||
|
||||
v8::Handle<v8::Object> errorObject = v8::Exception::Error(errorMessage)->ToObject();
|
||||
v8::Handle<v8::Value> proto = v8g->ErrorTempl->NewInstance();
|
||||
|
||||
errorObject->Set(v8::String::New("errorNum"), v8::Number::New(errorNumber));
|
||||
errorObject->Set(v8::String::New("errorMessage"), errorMessage);
|
||||
|
||||
if (!proto.IsEmpty()) {
|
||||
errorObject->SetPrototype(proto);
|
||||
}
|
||||
|
||||
return errorObject;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief parse document or document handle
|
||||
///
|
||||
|
@ -4734,22 +4888,22 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
// .............................................................................
|
||||
|
||||
v8::Handle<v8::String> CountFuncName = v8::Persistent<v8::String>::New(v8::String::New("count"));
|
||||
v8::Handle<v8::String> DatafileScanFuncName = v8::Persistent<v8::String>::New(v8::String::New("datafileScan"));
|
||||
v8::Handle<v8::String> DatafilesFuncName = v8::Persistent<v8::String>::New(v8::String::New("datafiles"));
|
||||
v8::Handle<v8::String> DisposeFuncName = v8::Persistent<v8::String>::New(v8::String::New("dispose"));
|
||||
v8::Handle<v8::String> DocumentFuncName = v8::Persistent<v8::String>::New(v8::String::New("document"));
|
||||
v8::Handle<v8::String> DropFuncName = v8::Persistent<v8::String>::New(v8::String::New("drop"));
|
||||
v8::Handle<v8::String> DropIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("dropIndex"));
|
||||
|
||||
v8::Handle<v8::String> EnsureBitarrayFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureBitarray"));
|
||||
v8::Handle<v8::String> EnsureUndefBitarrayFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureUndefBitarray"));
|
||||
v8::Handle<v8::String> EnsureCapConstraintFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureCapConstraint"));
|
||||
v8::Handle<v8::String> EnsureGeoConstraintFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureGeoConstraint"));
|
||||
v8::Handle<v8::String> EnsureGeoIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureGeoIndex"));
|
||||
v8::Handle<v8::String> EnsureHashIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureHashIndex"));
|
||||
v8::Handle<v8::String> EnsurePriorityQueueIndexFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensurePQIndex"));
|
||||
v8::Handle<v8::String> EnsureSkiplistFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureSkiplist"));
|
||||
v8::Handle<v8::String> EnsureUndefBitarrayFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureUndefBitarray"));
|
||||
v8::Handle<v8::String> EnsureUniqueConstraintFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureUniqueConstraint"));
|
||||
v8::Handle<v8::String> EnsureUniqueSkiplistFuncName = v8::Persistent<v8::String>::New(v8::String::New("ensureUniqueSkiplist"));
|
||||
|
||||
v8::Handle<v8::String> FiguresFuncName = v8::Persistent<v8::String>::New(v8::String::New("figures"));
|
||||
v8::Handle<v8::String> GetBatchSizeFuncName = v8::Persistent<v8::String>::New(v8::String::New("getBatchSize"));
|
||||
v8::Handle<v8::String> GetIndexesFuncName = v8::Persistent<v8::String>::New(v8::String::New("getIndexes"));
|
||||
|
@ -4771,6 +4925,7 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
v8::Handle<v8::String> ReplaceFuncName = v8::Persistent<v8::String>::New(v8::String::New("replace"));
|
||||
v8::Handle<v8::String> SaveFuncName = v8::Persistent<v8::String>::New(v8::String::New("save"));
|
||||
v8::Handle<v8::String> StatusFuncName = v8::Persistent<v8::String>::New(v8::String::New("status"));
|
||||
v8::Handle<v8::String> TruncateDatafileFuncName = v8::Persistent<v8::String>::New(v8::String::New("truncateDatafile"));
|
||||
v8::Handle<v8::String> UnloadFuncName = v8::Persistent<v8::String>::New(v8::String::New("unload"));
|
||||
|
||||
v8::Handle<v8::String> _CollectionFuncName = v8::Persistent<v8::String>::New(v8::String::New("_collection"));
|
||||
|
@ -4913,7 +5068,6 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
rt->Set(DocumentFuncName, v8::FunctionTemplate::New(JS_DocumentVocbaseCol));
|
||||
rt->Set(DropFuncName, v8::FunctionTemplate::New(JS_DropVocbaseCol));
|
||||
rt->Set(DropIndexFuncName, v8::FunctionTemplate::New(JS_DropIndexVocbaseCol));
|
||||
|
||||
rt->Set(EnsureBitarrayFuncName, v8::FunctionTemplate::New(JS_EnsureBitarrayVocbaseCol));
|
||||
rt->Set(EnsureUndefBitarrayFuncName, v8::FunctionTemplate::New(JS_EnsureUndefBitarrayVocbaseCol));
|
||||
rt->Set(EnsureCapConstraintFuncName, v8::FunctionTemplate::New(JS_EnsureCapConstraintVocbaseCol));
|
||||
|
@ -4924,7 +5078,8 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
rt->Set(EnsureSkiplistFuncName, v8::FunctionTemplate::New(JS_EnsureSkiplistVocbaseCol));
|
||||
rt->Set(EnsureUniqueConstraintFuncName, v8::FunctionTemplate::New(JS_EnsureUniqueConstraintVocbaseCol));
|
||||
rt->Set(EnsureUniqueSkiplistFuncName, v8::FunctionTemplate::New(JS_EnsureUniqueSkiplistVocbaseCol));
|
||||
|
||||
rt->Set(DatafileScanFuncName, v8::FunctionTemplate::New(JS_DatafileScanVocbaseCol));
|
||||
rt->Set(DatafilesFuncName, v8::FunctionTemplate::New(JS_DatafilesVocbaseCol));
|
||||
rt->Set(FiguresFuncName, v8::FunctionTemplate::New(JS_FiguresVocbaseCol));
|
||||
rt->Set(GetIndexesFuncName, v8::FunctionTemplate::New(JS_GetIndexesVocbaseCol));
|
||||
rt->Set(LoadFuncName, v8::FunctionTemplate::New(JS_LoadVocbaseCol));
|
||||
|
@ -4937,6 +5092,7 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
rt->Set(RemoveFuncName, v8::FunctionTemplate::New(JS_RemoveVocbaseCol));
|
||||
rt->Set(RenameFuncName, v8::FunctionTemplate::New(JS_RenameVocbaseCol));
|
||||
rt->Set(StatusFuncName, v8::FunctionTemplate::New(JS_StatusVocbaseCol));
|
||||
rt->Set(TruncateDatafileFuncName, v8::FunctionTemplate::New(JS_TruncateDatafileVocbaseCol));
|
||||
rt->Set(UnloadFuncName, v8::FunctionTemplate::New(JS_UnloadVocbaseCol));
|
||||
|
||||
rt->Set(SaveFuncName, v8::FunctionTemplate::New(JS_SaveVocbaseCol));
|
||||
|
@ -4962,7 +5118,6 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
rt->Set(DocumentFuncName, v8::FunctionTemplate::New(JS_DocumentVocbaseCol));
|
||||
rt->Set(DropFuncName, v8::FunctionTemplate::New(JS_DropVocbaseCol));
|
||||
rt->Set(DropIndexFuncName, v8::FunctionTemplate::New(JS_DropIndexVocbaseCol));
|
||||
|
||||
rt->Set(EnsureBitarrayFuncName, v8::FunctionTemplate::New(JS_EnsureBitarrayVocbaseCol));
|
||||
rt->Set(EnsureUndefBitarrayFuncName, v8::FunctionTemplate::New(JS_EnsureUndefBitarrayVocbaseCol));
|
||||
rt->Set(EnsureCapConstraintFuncName, v8::FunctionTemplate::New(JS_EnsureCapConstraintVocbaseCol));
|
||||
|
@ -4973,7 +5128,8 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
rt->Set(EnsureSkiplistFuncName, v8::FunctionTemplate::New(JS_EnsureSkiplistVocbaseCol));
|
||||
rt->Set(EnsureUniqueConstraintFuncName, v8::FunctionTemplate::New(JS_EnsureUniqueConstraintVocbaseCol));
|
||||
rt->Set(EnsureUniqueSkiplistFuncName, v8::FunctionTemplate::New(JS_EnsureUniqueSkiplistVocbaseCol));
|
||||
|
||||
rt->Set(DatafileScanFuncName, v8::FunctionTemplate::New(JS_DatafileScanVocbaseCol));
|
||||
rt->Set(DatafilesFuncName, v8::FunctionTemplate::New(JS_DatafilesVocbaseCol));
|
||||
rt->Set(FiguresFuncName, v8::FunctionTemplate::New(JS_FiguresVocbaseCol));
|
||||
rt->Set(GetIndexesFuncName, v8::FunctionTemplate::New(JS_GetIndexesVocbaseCol));
|
||||
rt->Set(LoadFuncName, v8::FunctionTemplate::New(JS_LoadVocbaseCol));
|
||||
|
@ -4987,6 +5143,7 @@ TRI_v8_global_t* TRI_InitV8VocBridge (v8::Handle<v8::Context> context, TRI_vocba
|
|||
rt->Set(RenameFuncName, v8::FunctionTemplate::New(JS_RenameVocbaseCol));
|
||||
rt->Set(ReplaceFuncName, v8::FunctionTemplate::New(JS_ReplaceVocbaseCol));
|
||||
rt->Set(StatusFuncName, v8::FunctionTemplate::New(JS_StatusVocbaseCol));
|
||||
rt->Set(TruncateDatafileFuncName, v8::FunctionTemplate::New(JS_TruncateDatafileVocbaseCol));
|
||||
rt->Set(UnloadFuncName, v8::FunctionTemplate::New(JS_UnloadVocbaseCol));
|
||||
|
||||
rt->Set(SaveFuncName, v8::FunctionTemplate::New(JS_SaveEdgesCol));
|
||||
|
|
|
@ -55,12 +55,6 @@ TRI_sim_collection_t* TRI_ExtractAndUseSimpleCollection (v8::Arguments const& ar
|
|||
|
||||
void TRI_ReleaseCollection (TRI_vocbase_col_t const* collection);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates an error in a javascript object
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
v8::Handle<v8::Object> TRI_CreateErrorObject (int errorNumber, std::string const& message);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief parse document or document handle
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -79,8 +79,97 @@ static void InitCollection (TRI_vocbase_t* vocbase,
|
|||
TRI_InitVectorString(&collection->_indexFiles, TRI_UNKNOWN_MEM_ZONE);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief scans a collection and locates all files
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static TRI_col_file_structure_t ScanCollectionDirectory (char const* path) {
|
||||
TRI_col_file_structure_t structure;
|
||||
TRI_vector_string_t files;
|
||||
regex_t re;
|
||||
size_t i;
|
||||
size_t n;
|
||||
|
||||
// check files within the directory
|
||||
files = TRI_FilesDirectory(path);
|
||||
n = files._length;
|
||||
|
||||
regcomp(&re, "^(journal|datafile|index|compactor)-([0-9][0-9]*)\\.(db|json)$", REG_ICASE | REG_EXTENDED);
|
||||
|
||||
TRI_InitVectorString(&structure._journals, TRI_CORE_MEM_ZONE);
|
||||
TRI_InitVectorString(&structure._compactors, TRI_CORE_MEM_ZONE);
|
||||
TRI_InitVectorString(&structure._datafiles, TRI_CORE_MEM_ZONE);
|
||||
TRI_InitVectorString(&structure._indexes, TRI_CORE_MEM_ZONE);
|
||||
|
||||
for (i = 0; i < n; ++i) {
|
||||
char const* file = files._buffer[i];
|
||||
regmatch_t matches[4];
|
||||
|
||||
if (regexec(&re, file, sizeof(matches) / sizeof(matches[0]), matches, 0) == 0) {
|
||||
char const* first = file + matches[1].rm_so;
|
||||
size_t firstLen = matches[1].rm_eo - matches[1].rm_so;
|
||||
|
||||
char const* third = file + matches[3].rm_so;
|
||||
size_t thirdLen = matches[3].rm_eo - matches[3].rm_so;
|
||||
|
||||
// .............................................................................
|
||||
// file is an index
|
||||
// .............................................................................
|
||||
|
||||
if (TRI_EqualString2("index", first, firstLen) && TRI_EqualString2("json", third, thirdLen)) {
|
||||
char* filename;
|
||||
|
||||
filename = TRI_Concatenate2File(path, file);
|
||||
TRI_PushBackVectorString(&structure._indexes, filename);
|
||||
}
|
||||
|
||||
// .............................................................................
|
||||
// file is a journal or datafile
|
||||
// .............................................................................
|
||||
|
||||
else if (TRI_EqualString2("db", third, thirdLen)) {
|
||||
char* filename;
|
||||
|
||||
filename = TRI_Concatenate2File(path, file);
|
||||
|
||||
// file is a journal
|
||||
if (TRI_EqualString2("journal", first, firstLen)) {
|
||||
TRI_PushBackVectorString(&structure._journals, filename);
|
||||
}
|
||||
|
||||
// file is a compactor file
|
||||
else if (TRI_EqualString2("compactor", first, firstLen)) {
|
||||
TRI_PushBackVectorString(&structure._compactors, filename);
|
||||
}
|
||||
|
||||
// file is a datafile
|
||||
else if (TRI_EqualString2("datafile", first, firstLen)) {
|
||||
TRI_PushBackVectorString(&structure._datafiles, filename);
|
||||
}
|
||||
|
||||
// ups, what kind of file is that
|
||||
else {
|
||||
LOG_ERROR("unknown datafile '%s'", file);
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, filename);
|
||||
}
|
||||
}
|
||||
else {
|
||||
LOG_ERROR("unknown datafile '%s'", file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TRI_DestroyVectorString(&files);
|
||||
|
||||
regfree(&re);
|
||||
|
||||
return structure;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a collection
|
||||
///
|
||||
/// TODO: Use ScanCollectionDirectory
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool CheckCollection (TRI_collection_t* collection) {
|
||||
|
@ -130,7 +219,6 @@ static bool CheckCollection (TRI_collection_t* collection) {
|
|||
char* filename;
|
||||
|
||||
filename = TRI_Concatenate2File(collection->_directory, file);
|
||||
// TODO: memory allocation might fail
|
||||
TRI_PushBackVectorString(&collection->_indexFiles, filename);
|
||||
}
|
||||
|
||||
|
@ -246,11 +334,8 @@ static bool CheckCollection (TRI_collection_t* collection) {
|
|||
datafile = sealed._buffer[i];
|
||||
|
||||
number = TRI_StringUInt32(datafile->_fid);
|
||||
// TODO: memory allocation might fail
|
||||
dname = TRI_Concatenate3String("datafile-", number, ".db");
|
||||
// TODO: memory allocation might fail
|
||||
filename = TRI_Concatenate2File(collection->_directory, dname);
|
||||
// TODO: memory allocation might fail
|
||||
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, dname);
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, number);
|
||||
|
@ -938,6 +1023,25 @@ int TRI_CloseCollection (TRI_collection_t* collection) {
|
|||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns information about the collection files
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_col_file_structure_t TRI_FileStructureCollectionDirectory (char const* path) {
|
||||
return ScanCollectionDirectory(path);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief frees the information
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void TRI_DestroyFileStructureCollection (TRI_col_file_structure_t* info) {
|
||||
TRI_DestroyVectorString(&info->_journals);
|
||||
TRI_DestroyVectorString(&info->_compactors);
|
||||
TRI_DestroyVectorString(&info->_datafiles);
|
||||
TRI_DestroyVectorString(&info->_indexes);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -110,6 +110,18 @@ extern "C" {
|
|||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief collection file structure
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
typedef struct TRI_col_file_structure_s {
|
||||
TRI_vector_string_t _journals;
|
||||
TRI_vector_string_t _compactors;
|
||||
TRI_vector_string_t _datafiles;
|
||||
TRI_vector_string_t _indexes;
|
||||
}
|
||||
TRI_col_file_structure_t;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief state of the datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -340,6 +352,20 @@ TRI_collection_t* TRI_OpenCollection (TRI_vocbase_t*,
|
|||
|
||||
int TRI_CloseCollection (TRI_collection_t*);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns information about the collection files
|
||||
///
|
||||
/// Note that the collection must not be loaded
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_col_file_structure_t TRI_FileStructureCollectionDirectory (char const* path);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief frees the information
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void TRI_DestroyFileStructureCollection (TRI_col_file_structure_t*);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -83,6 +83,225 @@ static void InitDatafile (TRI_datafile_t* datafile,
|
|||
datafile->_nWritten = 0;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief truncates a datafile
|
||||
///
|
||||
/// Create a truncated datafile, seal it and rename the old.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static int TruncateDatafile (TRI_datafile_t* datafile, TRI_voc_size_t size) {
|
||||
char* filename;
|
||||
char* oldname;
|
||||
char zero;
|
||||
int fd;
|
||||
int res;
|
||||
size_t maximalSize;
|
||||
size_t offset;
|
||||
void* data;
|
||||
|
||||
// use multiples of page-size
|
||||
maximalSize = ((size + sizeof(TRI_df_footer_marker_t) + PageSize - 1) / PageSize) * PageSize;
|
||||
|
||||
// sanity check
|
||||
if (sizeof(TRI_df_header_marker_t) + sizeof(TRI_df_footer_marker_t) > maximalSize) {
|
||||
LOG_ERROR("cannot create datafile '%s', maximal size '%u' is too small", datafile->_filename, (unsigned int) maximalSize);
|
||||
return TRI_set_errno(TRI_ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL);
|
||||
}
|
||||
|
||||
// open the file
|
||||
filename = TRI_Concatenate2String(datafile->_filename, ".new");
|
||||
|
||||
fd = TRI_CREATE(filename, O_CREAT | O_EXCL | O_RDWR, S_IRUSR | S_IWUSR);
|
||||
|
||||
if (fd < 0) {
|
||||
LOG_ERROR("cannot create datafile '%s': '%s'", filename, TRI_last_error());
|
||||
return TRI_set_errno(TRI_ERROR_SYS_ERROR);
|
||||
}
|
||||
|
||||
// create sparse file
|
||||
offset = lseek(fd, maximalSize - 1, SEEK_SET);
|
||||
|
||||
if (offset == (off_t) -1) {
|
||||
TRI_set_errno(TRI_ERROR_SYS_ERROR);
|
||||
close(fd);
|
||||
|
||||
// remove empty file
|
||||
TRI_UnlinkFile(filename);
|
||||
|
||||
LOG_ERROR("cannot seek in datafile '%s': '%s'", filename, TRI_last_error());
|
||||
return TRI_ERROR_SYS_ERROR;
|
||||
}
|
||||
|
||||
zero = 0;
|
||||
res = write(fd, &zero, 1);
|
||||
|
||||
if (res < 0) {
|
||||
TRI_set_errno(TRI_ERROR_SYS_ERROR);
|
||||
close(fd);
|
||||
|
||||
// remove empty file
|
||||
TRI_UnlinkFile(filename);
|
||||
|
||||
LOG_ERROR("cannot create sparse datafile '%s': '%s'", filename, TRI_last_error());
|
||||
return TRI_ERROR_SYS_ERROR;
|
||||
}
|
||||
|
||||
// memory map the data
|
||||
data = mmap(0, maximalSize, PROT_WRITE | PROT_READ, MAP_SHARED, fd, 0);
|
||||
|
||||
if (data == MAP_FAILED) {
|
||||
if (errno == ENOMEM) {
|
||||
TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY_MMAP);
|
||||
}
|
||||
else {
|
||||
TRI_set_errno(TRI_ERROR_SYS_ERROR);
|
||||
}
|
||||
|
||||
close(fd);
|
||||
|
||||
// remove empty file
|
||||
TRI_UnlinkFile(filename);
|
||||
|
||||
LOG_ERROR("cannot memory map file '%s': '%s'", filename, TRI_last_error());
|
||||
return TRI_errno();
|
||||
}
|
||||
|
||||
// copy the data
|
||||
memcpy(data, datafile->_data, size);
|
||||
|
||||
// patch the datafile structure
|
||||
res = munmap(datafile->_data, datafile->_maximalSize);
|
||||
|
||||
if (res < 0) {
|
||||
LOG_ERROR("munmap failed with: %s", TRI_last_error());
|
||||
return res;
|
||||
}
|
||||
|
||||
close(datafile->_fd);
|
||||
|
||||
datafile->_data = data;
|
||||
datafile->_next = data + size;
|
||||
datafile->_maximalSize = maximalSize;
|
||||
|
||||
// rename files
|
||||
oldname = TRI_Concatenate2String(datafile->_filename, ".corrupted");
|
||||
|
||||
res = TRI_RenameFile(datafile->_filename, oldname);
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, filename);
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, oldname);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
res = TRI_RenameFile(filename, datafile->_filename);
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, filename);
|
||||
TRI_FreeString(TRI_CORE_MEM_ZONE, oldname);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
TRI_SealDatafile(datafile);
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief scans a datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static TRI_df_scan_t ScanDatafile (TRI_datafile_t const* datafile) {
|
||||
TRI_df_scan_t scan;
|
||||
TRI_df_scan_entry_t entry;
|
||||
|
||||
TRI_voc_size_t currentSize;
|
||||
char* end;
|
||||
char* ptr;
|
||||
|
||||
ptr = datafile->_data;
|
||||
end = datafile->_data + datafile->_currentSize;
|
||||
currentSize = 0;
|
||||
|
||||
TRI_InitVector(&scan._entries, TRI_CORE_MEM_ZONE, sizeof(TRI_df_scan_entry_t));
|
||||
|
||||
scan._currentSize = datafile->_currentSize;
|
||||
scan._maximalSize = datafile->_maximalSize;
|
||||
scan._numberMarkers = 0;
|
||||
scan._status = 1;
|
||||
|
||||
if (datafile->_currentSize == 0) {
|
||||
end = datafile->_data + datafile->_maximalSize;
|
||||
}
|
||||
|
||||
while (ptr < end) {
|
||||
TRI_df_marker_t* marker = (TRI_df_marker_t*) ptr;
|
||||
bool ok;
|
||||
size_t size;
|
||||
|
||||
memset(&entry, 0, sizeof(entry));
|
||||
|
||||
entry._position = ptr - datafile->_data;
|
||||
entry._size = marker->_size;
|
||||
entry._tick = marker->_tick;
|
||||
entry._type = marker->_type;
|
||||
entry._status = 1;
|
||||
|
||||
if (marker->_size == 0 && marker->_crc == 0 && marker->_type == 0 && marker->_tick == 0) {
|
||||
entry._status = 2;
|
||||
|
||||
scan._endPosition = currentSize;
|
||||
|
||||
TRI_PushBackVector(&scan._entries, &entry);
|
||||
return scan;
|
||||
}
|
||||
|
||||
++scan._numberMarkers;
|
||||
|
||||
if (marker->_size == 0) {
|
||||
entry._status = 3;
|
||||
|
||||
scan._status = 2;
|
||||
scan._endPosition = currentSize;
|
||||
|
||||
TRI_PushBackVector(&scan._entries, &entry);
|
||||
return scan;
|
||||
}
|
||||
|
||||
if (marker->_size < sizeof(TRI_df_marker_t)) {
|
||||
entry._status = 4;
|
||||
|
||||
scan._endPosition = currentSize;
|
||||
scan._status = 3;
|
||||
|
||||
TRI_PushBackVector(&scan._entries, &entry);
|
||||
return scan;
|
||||
}
|
||||
|
||||
ok = TRI_CheckCrcMarkerDatafile(marker);
|
||||
|
||||
if (! ok) {
|
||||
entry._status = 5;
|
||||
scan._status = 4;
|
||||
}
|
||||
|
||||
TRI_PushBackVector(&scan._entries, &entry);
|
||||
|
||||
size = ((marker->_size + TRI_DF_BLOCK_ALIGN - 1) / TRI_DF_BLOCK_ALIGN) * TRI_DF_BLOCK_ALIGN;
|
||||
currentSize += size;
|
||||
|
||||
if (marker->_type == TRI_DF_MARKER_FOOTER) {
|
||||
scan._endPosition = currentSize;
|
||||
return scan;
|
||||
}
|
||||
|
||||
ptr += size;
|
||||
}
|
||||
|
||||
return scan;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -949,6 +1168,62 @@ int TRI_SealDatafile (TRI_datafile_t* datafile) {
|
|||
return ok ? TRI_ERROR_NO_ERROR : datafile->_lastError;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief truncates a datafile and seals it
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int TRI_TruncateDatafile (char const* path, TRI_voc_size_t position) {
|
||||
TRI_datafile_t* datafile;
|
||||
int res;
|
||||
|
||||
datafile = OpenDatafile(path, true);
|
||||
|
||||
if (datafile == NULL) {
|
||||
return TRI_ERROR_ARANGO_DATAFILE_UNREADABLE;
|
||||
}
|
||||
|
||||
res = TruncateDatafile(datafile, position);
|
||||
TRI_CloseDatafile(datafile);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns information about the datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_df_scan_t TRI_ScanDatafile (char const* path) {
|
||||
TRI_df_scan_t scan;
|
||||
TRI_datafile_t* datafile;
|
||||
|
||||
datafile = OpenDatafile(path, true);
|
||||
|
||||
if (datafile != 0) {
|
||||
scan = ScanDatafile(datafile);
|
||||
TRI_CloseDatafile(datafile);
|
||||
}
|
||||
else {
|
||||
scan._currentSize = 0;
|
||||
scan._maximalSize = 0;
|
||||
scan._endPosition = 0;
|
||||
scan._numberMarkers = 0;
|
||||
|
||||
TRI_InitVector(&scan._entries, TRI_CORE_MEM_ZONE, sizeof(TRI_df_scan_entry_t));
|
||||
|
||||
scan._status = 5;
|
||||
}
|
||||
|
||||
return scan;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief destroys information about the datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void TRI_DestroyDatafileScan (TRI_df_scan_t* scan) {
|
||||
TRI_DestroyVector(&scan->_entries);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -182,6 +182,37 @@ typedef uint32_t TRI_df_version_t;
|
|||
|
||||
typedef uint32_t TRI_df_flag_t;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief scan result
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
typedef struct TRI_df_scan_s {
|
||||
TRI_voc_size_t _currentSize;
|
||||
TRI_voc_size_t _maximalSize;
|
||||
TRI_voc_size_t _endPosition;
|
||||
TRI_voc_size_t _numberMarkers;
|
||||
|
||||
TRI_vector_t _entries;
|
||||
|
||||
uint32_t _status;
|
||||
}
|
||||
TRI_df_scan_t;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief scan result entry
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
typedef struct TRI_df_scan_entry_s {
|
||||
TRI_voc_size_t _position;
|
||||
TRI_voc_size_t _size;
|
||||
TRI_voc_tick_t _tick;
|
||||
|
||||
TRI_df_marker_type_t _type;
|
||||
|
||||
uint32_t _status;
|
||||
}
|
||||
TRI_df_scan_entry_t;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -486,6 +517,24 @@ int TRI_SealDatafile (TRI_datafile_t* datafile);
|
|||
|
||||
bool TRI_RenameDatafile (TRI_datafile_t* datafile, char const* filename);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief truncates a datafile and seals it
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int TRI_TruncateDatafile (char const* path, TRI_voc_size_t position);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns information about the datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_df_scan_t TRI_ScanDatafile (char const* path);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief destroys information about the datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void TRI_DestroyDatafileScan (TRI_df_scan_t* scan);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -1346,6 +1346,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
|
||||
// update the datafile info
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, datafile->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberAlive += 1;
|
||||
dfi->_sizeAlive += header->_document._data.length;
|
||||
|
@ -1369,6 +1370,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
|
||||
// update the datafile info
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, found->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberAlive -= 1;
|
||||
dfi->_sizeAlive -= found->_document._data.length;
|
||||
|
@ -1378,6 +1380,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
}
|
||||
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, datafile->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberAlive += 1;
|
||||
dfi->_sizeAlive += update._document._data.length;
|
||||
|
@ -1390,6 +1393,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
// it is a stale update
|
||||
else {
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, datafile->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberDead += 1;
|
||||
dfi->_sizeDead += found->_document._data.length;
|
||||
|
@ -1430,6 +1434,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
|
||||
// update the datafile info
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, datafile->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberDeletion += 1;
|
||||
}
|
||||
|
@ -1445,6 +1450,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
|
||||
// update the datafile info
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, found->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberAlive -= 1;
|
||||
dfi->_sizeAlive -= found->_document._data.length;
|
||||
|
@ -1453,6 +1459,7 @@ static bool OpenIterator (TRI_df_marker_t const* marker, void* data, TRI_datafil
|
|||
dfi->_sizeDead += found->_document._data.length;
|
||||
}
|
||||
dfi = TRI_FindDatafileInfoDocCollection(&collection->base, datafile->_fid);
|
||||
|
||||
if (dfi != NULL) {
|
||||
dfi->_numberDeletion += 1;
|
||||
}
|
||||
|
@ -2901,40 +2908,6 @@ TRI_vector_pointer_t* TRI_IndexesSimCollection (TRI_sim_collection_t* sim) {
|
|||
return vector;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns a description of anindex
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_index_t* TRI_IndexSimCollection (TRI_sim_collection_t* sim, TRI_idx_iid_t iid) {
|
||||
TRI_index_t* idx = NULL;
|
||||
size_t n;
|
||||
size_t i;
|
||||
|
||||
// .............................................................................
|
||||
// inside read-lock
|
||||
// .............................................................................
|
||||
|
||||
TRI_READ_LOCK_DOCUMENTS_INDEXES_SIM_COLLECTION(sim);
|
||||
|
||||
n = sim->_indexes._length;
|
||||
|
||||
for (i = 0; i < n; ++i) {
|
||||
idx = sim->_indexes._buffer[i];
|
||||
|
||||
if (idx->_iid == iid) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
TRI_READ_UNLOCK_DOCUMENTS_INDEXES_SIM_COLLECTION(sim);
|
||||
|
||||
// .............................................................................
|
||||
// outside read-lock
|
||||
// .............................................................................
|
||||
|
||||
return i < n ? idx : NULL;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief drops an index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -4407,10 +4380,7 @@ TRI_index_t* TRI_EnsurePriorityQueueIndexSimCollection(TRI_sim_collection_t* sim
|
|||
|
||||
TRI_WRITE_LOCK_DOCUMENTS_INDEXES_SIM_COLLECTION(sim);
|
||||
|
||||
// .............................................................................
|
||||
// Given the list of attributes (as strings)
|
||||
// .............................................................................
|
||||
|
||||
idx = CreatePriorityQueueIndexSimCollection(sim, attributes, 0, unique, created);
|
||||
|
||||
if (idx == NULL) {
|
||||
|
|
|
@ -351,12 +351,6 @@ int TRI_CloseSimCollection (TRI_sim_collection_t* collection);
|
|||
|
||||
TRI_vector_pointer_t* TRI_IndexesSimCollection (TRI_sim_collection_t*);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief returns a description of an index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_index_t* TRI_IndexSimCollection (TRI_sim_collection_t*, TRI_idx_iid_t);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief drops an index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Placeholders starting with @ will be replaced by make
|
||||
|
||||
ARANGOD="@SBINDIR@/arangod"
|
||||
DATABASE="@DATABASE@"
|
||||
SCRIPT="@STATICFILES@/js/server/arango-dfdb.js"
|
||||
|
||||
if [ "$1" == "--database" ] || [ "$1" == "--database.directory" ] ; then
|
||||
shift
|
||||
DATABASE="$1"
|
||||
shift
|
||||
fi
|
||||
|
||||
if test ! -d "$DATABASE"; then
|
||||
echo "$0: database directory '$DATABASE' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$ARANGOD -c none --database.directory "$DATABASE" --javascript.script "$SCRIPT"
|
14
build.sh
14
build.sh
|
@ -52,6 +52,20 @@ echo
|
|||
|
||||
case $TRI_OS_LONG in
|
||||
|
||||
Linux-ArchLinux*)
|
||||
echo "Using configuration for Arch Linux"
|
||||
OPTIONS="$OPTIONS --enable-all-in-one --enable-mruby"
|
||||
LDD_INFO="yes"
|
||||
RESULTS="$RESULTS arangoirb"
|
||||
;;
|
||||
|
||||
Linux-LinuxMint-13*)
|
||||
echo "Using configuration for LinuxMint 13"
|
||||
OPTIONS="$OPTIONS --enable-all-in-one --enable-mruby"
|
||||
LDD_INFO="yes"
|
||||
RESULTS="$RESULTS arangoirb"
|
||||
;;
|
||||
|
||||
Linux-openSUSE-12*)
|
||||
echo "Using configuration for openSuSE 12"
|
||||
OPTIONS="$OPTIONS --enable-all-in-one --enable-mruby"
|
||||
|
|
|
@ -21,6 +21,11 @@ elif [ "${OS}" = "Linux" ] ; then
|
|||
|
||||
# use "lsb_release"
|
||||
DIST=$(lsb_release -d 2>/dev/null| awk '{ print $2 }')
|
||||
DIST2=$(lsb_release -d 2>/dev/null| awk '{ print $3 }')
|
||||
if [ "x${DIST2}" = "xMint" ] ; then
|
||||
DIST="LinuxMint"
|
||||
fi
|
||||
|
||||
RELEASE=$(lsb_release -r 2>/dev/null | awk '{ print $2 }')
|
||||
CODENAME=$(lsb_release -c 2>/dev/null | awk '{ print $2 }')
|
||||
|
||||
|
@ -45,6 +50,15 @@ elif [ "${OS}" = "Linux" ] ; then
|
|||
elif [ -f /etc/debian_version ] ; then
|
||||
DIST="Debian"
|
||||
RELEASE=`cat /etc/debian_version`
|
||||
|
||||
elif [ -f /etc/os-release ] ; then
|
||||
ID=$(cat /etc/os-release | tr "\n" ' ' | sed s/.*ID=// | awk '{ print $1}')
|
||||
if [ "${ID}" = "arch" ] ; then
|
||||
DIST='ArchLinux'
|
||||
RELEASE="current"
|
||||
CODENAME="arch"
|
||||
fi
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#! /bin/sh
|
||||
# Guess values for system-dependent variables and create Makefiles.
|
||||
# Generated by GNU Autoconf 2.68 for triAGENS ArangoDB 1.0.alpha3.
|
||||
# Generated by GNU Autoconf 2.68 for triAGENS ArangoDB 1.0.beta1.
|
||||
#
|
||||
# Report bugs to <info@triagens.de>.
|
||||
#
|
||||
|
@ -560,8 +560,8 @@ MAKEFLAGS=
|
|||
# Identity of this package.
|
||||
PACKAGE_NAME='triAGENS ArangoDB'
|
||||
PACKAGE_TARNAME='arangodb'
|
||||
PACKAGE_VERSION='1.0.alpha3'
|
||||
PACKAGE_STRING='triAGENS ArangoDB 1.0.alpha3'
|
||||
PACKAGE_VERSION='1.0.beta1'
|
||||
PACKAGE_STRING='triAGENS ArangoDB 1.0.beta1'
|
||||
PACKAGE_BUGREPORT='info@triagens.de'
|
||||
PACKAGE_URL='http://www.arangodb.org'
|
||||
|
||||
|
@ -1411,7 +1411,7 @@ if test "$ac_init_help" = "long"; then
|
|||
# Omit some internal or obsolete options to make the list less imposing.
|
||||
# This message is too long to be a string in the A/UX 3.1 sh.
|
||||
cat <<_ACEOF
|
||||
\`configure' configures triAGENS ArangoDB 1.0.alpha3 to adapt to many kinds of systems.
|
||||
\`configure' configures triAGENS ArangoDB 1.0.beta1 to adapt to many kinds of systems.
|
||||
|
||||
Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||
|
||||
|
@ -1482,7 +1482,7 @@ fi
|
|||
|
||||
if test -n "$ac_init_help"; then
|
||||
case $ac_init_help in
|
||||
short | recursive ) echo "Configuration of triAGENS ArangoDB 1.0.alpha3:";;
|
||||
short | recursive ) echo "Configuration of triAGENS ArangoDB 1.0.beta1:";;
|
||||
esac
|
||||
cat <<\_ACEOF
|
||||
|
||||
|
@ -1637,7 +1637,7 @@ fi
|
|||
test -n "$ac_init_help" && exit $ac_status
|
||||
if $ac_init_version; then
|
||||
cat <<\_ACEOF
|
||||
triAGENS ArangoDB configure 1.0.alpha3
|
||||
triAGENS ArangoDB configure 1.0.beta1
|
||||
generated by GNU Autoconf 2.68
|
||||
|
||||
Copyright (C) 2010 Free Software Foundation, Inc.
|
||||
|
@ -2102,7 +2102,7 @@ cat >config.log <<_ACEOF
|
|||
This file contains any messages produced by compilers while
|
||||
running configure, to aid debugging if configure makes a mistake.
|
||||
|
||||
It was created by triAGENS ArangoDB $as_me 1.0.alpha3, which was
|
||||
It was created by triAGENS ArangoDB $as_me 1.0.beta1, which was
|
||||
generated by GNU Autoconf 2.68. Invocation command line was
|
||||
|
||||
$ $0 $@
|
||||
|
@ -3241,7 +3241,7 @@ fi
|
|||
|
||||
# Define the identity of the package.
|
||||
PACKAGE='arangodb'
|
||||
VERSION='1.0.alpha3'
|
||||
VERSION='1.0.beta1'
|
||||
|
||||
|
||||
cat >>confdefs.h <<_ACEOF
|
||||
|
@ -10267,7 +10267,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
|
|||
# report actual input values of CONFIG_FILES etc. instead of their
|
||||
# values after options handling.
|
||||
ac_log="
|
||||
This file was extended by triAGENS ArangoDB $as_me 1.0.alpha3, which was
|
||||
This file was extended by triAGENS ArangoDB $as_me 1.0.beta1, which was
|
||||
generated by GNU Autoconf 2.68. Invocation command line was
|
||||
|
||||
CONFIG_FILES = $CONFIG_FILES
|
||||
|
@ -10334,7 +10334,7 @@ _ACEOF
|
|||
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
|
||||
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
|
||||
ac_cs_version="\\
|
||||
triAGENS ArangoDB config.status 1.0.alpha3
|
||||
triAGENS ArangoDB config.status 1.0.beta1
|
||||
configured by $0, generated by GNU Autoconf 2.68,
|
||||
with options \\"\$ac_cs_config\\"
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ dnl ============================================================================
|
|||
dnl --SECTION-- triAGENS GmbH Build Environment
|
||||
dnl ============================================================================
|
||||
|
||||
AC_INIT([triAGENS ArangoDB], [1.0.alpha3], [info@triagens.de], [arangodb], [http://www.arangodb.org])
|
||||
AC_INIT([triAGENS ArangoDB], [1.0.beta1], [info@triagens.de], [arangodb], [http://www.arangodb.org])
|
||||
|
||||
dnl ----------------------------------------------------------------------------
|
||||
dnl auxillary directory for install-sh and missing
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
"ERROR_ARANGO_UNKNOWN_COLLECTION_TYPE" : { "code" : 1003, "message" : "unknown type" },
|
||||
"ERROR_ARANGO_READ_ONLY" : { "code" : 1004, "message" : "ready only" },
|
||||
"ERROR_ARANGO_DUPLICATE_IDENTIFIER" : { "code" : 1005, "message" : "duplicate identifier" },
|
||||
"ERROR_ARANGO_DATAFILE_UNREADABLE" : { "code" : 1006, "message" : "datafile unreadable" },
|
||||
"ERROR_ARANGO_CORRUPTED_DATAFILE" : { "code" : 1100, "message" : "corrupted datafile" },
|
||||
"ERROR_ARANGO_ILLEGAL_PARAMETER_FILE" : { "code" : 1101, "message" : "illegal parameter file" },
|
||||
"ERROR_ARANGO_CORRUPTED_COLLECTION" : { "code" : 1102, "message" : "corrupted collection" },
|
||||
|
@ -68,6 +69,7 @@
|
|||
"ERROR_ARANGO_INDEX_HANDLE_BAD" : { "code" : 1214, "message" : "illegal index handle" },
|
||||
"ERROR_ARANGO_CAP_CONSTRAINT_ALREADY_DEFINED" : { "code" : 1215, "message" : "cap constraint already defined" },
|
||||
"ERROR_ARANGO_DOCUMENT_TOO_LARGE" : { "code" : 1216, "message" : "document too large" },
|
||||
"ERROR_ARANGO_COLLECTION_NOT_UNLOADED" : { "code" : 1217, "message" : "collection must be unloaded" },
|
||||
"ERROR_ARANGO_DATAFILE_FULL" : { "code" : 1300, "message" : "datafile full" },
|
||||
"ERROR_QUERY_KILLED" : { "code" : 1500, "message" : "query killed" },
|
||||
"ERROR_QUERY_PARSE" : { "code" : 1501, "message" : "%s" },
|
||||
|
|
|
@ -43,6 +43,7 @@ static string JS_common_bootstrap_errors =
|
|||
" \"ERROR_ARANGO_UNKNOWN_COLLECTION_TYPE\" : { \"code\" : 1003, \"message\" : \"unknown type\" }, \n"
|
||||
" \"ERROR_ARANGO_READ_ONLY\" : { \"code\" : 1004, \"message\" : \"ready only\" }, \n"
|
||||
" \"ERROR_ARANGO_DUPLICATE_IDENTIFIER\" : { \"code\" : 1005, \"message\" : \"duplicate identifier\" }, \n"
|
||||
" \"ERROR_ARANGO_DATAFILE_UNREADABLE\" : { \"code\" : 1006, \"message\" : \"datafile unreadable\" }, \n"
|
||||
" \"ERROR_ARANGO_CORRUPTED_DATAFILE\" : { \"code\" : 1100, \"message\" : \"corrupted datafile\" }, \n"
|
||||
" \"ERROR_ARANGO_ILLEGAL_PARAMETER_FILE\" : { \"code\" : 1101, \"message\" : \"illegal parameter file\" }, \n"
|
||||
" \"ERROR_ARANGO_CORRUPTED_COLLECTION\" : { \"code\" : 1102, \"message\" : \"corrupted collection\" }, \n"
|
||||
|
@ -69,6 +70,7 @@ static string JS_common_bootstrap_errors =
|
|||
" \"ERROR_ARANGO_INDEX_HANDLE_BAD\" : { \"code\" : 1214, \"message\" : \"illegal index handle\" }, \n"
|
||||
" \"ERROR_ARANGO_CAP_CONSTRAINT_ALREADY_DEFINED\" : { \"code\" : 1215, \"message\" : \"cap constraint already defined\" }, \n"
|
||||
" \"ERROR_ARANGO_DOCUMENT_TOO_LARGE\" : { \"code\" : 1216, \"message\" : \"document too large\" }, \n"
|
||||
" \"ERROR_ARANGO_COLLECTION_NOT_UNLOADED\" : { \"code\" : 1217, \"message\" : \"collection must be unloaded\" }, \n"
|
||||
" \"ERROR_ARANGO_DATAFILE_FULL\" : { \"code\" : 1300, \"message\" : \"datafile full\" }, \n"
|
||||
" \"ERROR_QUERY_KILLED\" : { \"code\" : 1500, \"message\" : \"query killed\" }, \n"
|
||||
" \"ERROR_QUERY_PARSE\" : { \"code\" : 1501, \"message\" : \"%s\" }, \n"
|
||||
|
@ -144,7 +146,7 @@ static string JS_common_bootstrap_errors =
|
|||
" \"WARNING_ARANGO_INDEX_BITARRAY_DOCUMENT_ATTRIBUTE_MISSING\" : { \"code\" : 3400, \"message\" : \"bitarray index insertion warning - attribute missing in document\" }, \n"
|
||||
" \"WARNING_ARANGO_INDEX_BITARRAY_UPDATE_ATTRIBUTE_MISSING\" : { \"code\" : 3402, \"message\" : \"bitarray index update warning - attribute missing in revised document\" }, \n"
|
||||
" \"WARNING_ARANGO_INDEX_BITARRAY_REMOVE_ITEM_MISSING\" : { \"code\" : 3411, \"message\" : \"bitarray index remove failure - item missing in index\" }, \n"
|
||||
" \"ERROR_ARANGO_INDEX_BITARRAY_INSERT_ITEM_UNSUPPORTED_VALUE\" : { \"code\" : 3313, \"message\" : \"bitarray index insert failure - document attribute value unsupported in index\" }, \n"
|
||||
" \"ERROR_ARANGO_INDEX_BITARRAY_INSERT_ITEM_UNSUPPORTED_VALUE\" : { \"code\" : 3413, \"message\" : \"bitarray index insert failure - document attribute value unsupported in index\" }, \n"
|
||||
"};\n"
|
||||
"}());\n"
|
||||
"\n"
|
||||
|
|
|
@ -7,7 +7,8 @@ static string JS_common_bootstrap_modules =
|
|||
" white: true,\n"
|
||||
" plusplus: true */\n"
|
||||
"/*global require, module, ModuleCache, SYS_EXECUTE, CONSOLE_ERROR,\n"
|
||||
" FS_EXISTS, SYS_LOAD, SYS_LOG, SYS_LOG_LEVEL, SYS_OUTPUT,\n"
|
||||
" FS_MOVE, FS_REMOVE, FS_EXISTS, \n"
|
||||
" SYS_LOAD, SYS_LOG, SYS_LOG_LEVEL, SYS_OUTPUT,\n"
|
||||
" SYS_PROCESS_STAT, SYS_READ, SYS_SPRINTF, SYS_TIME,\n"
|
||||
" SYS_START_PAGER, SYS_STOP_PAGER, ARANGO_QUIET, MODULES_PATH,\n"
|
||||
" COLOR_OUTPUT, COLOR_OUTPUT_RESET, COLOR_BRIGHT, PRETTY_PRINT */\n"
|
||||
|
@ -240,6 +241,8 @@ static string JS_common_bootstrap_modules =
|
|||
" var fs = ModuleCache[\"/fs\"].exports;\n"
|
||||
"\n"
|
||||
" fs.exists = FS_EXISTS;\n"
|
||||
" fs.move = FS_MOVE;\n"
|
||||
" fs.remove = FS_REMOVE;\n"
|
||||
"}());\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
white: true,
|
||||
plusplus: true */
|
||||
/*global require, module, ModuleCache, SYS_EXECUTE, CONSOLE_ERROR,
|
||||
FS_EXISTS, SYS_LOAD, SYS_LOG, SYS_LOG_LEVEL, SYS_OUTPUT,
|
||||
FS_MOVE, FS_REMOVE, FS_EXISTS,
|
||||
SYS_LOAD, SYS_LOG, SYS_LOG_LEVEL, SYS_OUTPUT,
|
||||
SYS_PROCESS_STAT, SYS_READ, SYS_SPRINTF, SYS_TIME,
|
||||
SYS_START_PAGER, SYS_STOP_PAGER, ARANGO_QUIET, MODULES_PATH,
|
||||
COLOR_OUTPUT, COLOR_OUTPUT_RESET, COLOR_BRIGHT, PRETTY_PRINT */
|
||||
|
@ -239,6 +240,8 @@ ModuleCache["/fs"] = new Module("/fs");
|
|||
var fs = ModuleCache["/fs"].exports;
|
||||
|
||||
fs.exists = FS_EXISTS;
|
||||
fs.move = FS_MOVE;
|
||||
fs.remove = FS_REMOVE;
|
||||
}());
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -0,0 +1,385 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief arango datafile debugger
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2004-2012 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Dr. Frank Celler
|
||||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var internal = require("internal");
|
||||
var console = require("console");
|
||||
var fs = require("fs");
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup ArangoDB
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief C-like printf to stdout
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function printf () {
|
||||
var text = internal.sprintf.apply(internal.springf, arguments);
|
||||
|
||||
internal.output(text);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief remove datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function RemoveDatafile (collection, type, datafile) {
|
||||
var backup = datafile + ".corrupt";
|
||||
|
||||
fs.move(datafile, backup);
|
||||
|
||||
printf("Removed %s at %s\n", type, datafile);
|
||||
printf("Backup is in %s\n", backup);
|
||||
printf("\n");
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief wipe entries
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function WipeDatafile (collection, type, datafile, lastGoodPos) {
|
||||
collection.truncateDatafile(datafile, lastGoodPos);
|
||||
|
||||
printf("Truncated and sealed datafile\n");
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a journal
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function DeepCheckJournal (collection, type, datafile, scan, lastGoodPos) {
|
||||
var entries = scan.entries;
|
||||
|
||||
if (entries.length == 0) {
|
||||
printf("WARNING: The journal is empty. Even the header is missing. Going\n");
|
||||
printf(" to remove the datafile.\n");
|
||||
printf("\n");
|
||||
|
||||
RemoveDatafile(collection, type, datafile);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (entries.length === lastGoodPos + 3 && entries[lastGoodPos + 2].status === 2) {
|
||||
printf("WARNING: The journal was not closed properly, the last entry is corrupted.\n");
|
||||
printf(" This might happen ArangoDB was killed and the last entry was not\n");
|
||||
printf(" fully written to disk. Going to remove the last entry.\n");
|
||||
printf("\n");
|
||||
}
|
||||
else {
|
||||
printf("WARNING: The journal was not closed properly, the last entries is corrupted.\n");
|
||||
printf(" This might happen ArangoDB was killed and the last entries were not\n");
|
||||
printf(" fully written to disk.\n");
|
||||
printf("\n");
|
||||
|
||||
printf("Wipe the last entries (Y/N)? ");
|
||||
var line = console.getline();
|
||||
|
||||
if (line !== "yes" && line !== "YES" && line !== "y" && line !== "Y") {
|
||||
printf("ABORTING\n");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
var entry = entries[lastGoodPos];
|
||||
|
||||
WipeDatafile(collection, type, datafile, entry.position + entry.size);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function DeepCheckDatafile (collection, type, datafile, scan, lastGoodPos) {
|
||||
var entries = scan.entries;
|
||||
|
||||
if (entries.length == 0) {
|
||||
printf("WARNING: The datafile is empty. Even the header is missing. Going\n");
|
||||
printf(" to remove the datafile. This should never happen. Datafiles\n");
|
||||
printf(" are append-only. Make sure your hard disk does not contain\n");
|
||||
printf(" any hardware errors.\n");
|
||||
printf("\n");
|
||||
|
||||
RemoveDatafile(collection, type, datafile);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
printf("WARNING: The datafile contains corrupt entries. This should never happen.\n");
|
||||
printf(" Datafiles are append-only. Make sure your hard disk does not contain\n");
|
||||
printf(" any hardware errors.\n");
|
||||
printf("\n");
|
||||
|
||||
printf("Wipe the last entries (Y/N)? ");
|
||||
var line = console.getline();
|
||||
|
||||
if (line !== "yes" && line !== "YES" && line !== "y" && line !== "Y") {
|
||||
printf("ABORTING\n");
|
||||
return;
|
||||
}
|
||||
|
||||
var entry = entries[lastGoodPos];
|
||||
|
||||
WipeDatafile(collection, type, datafile, entry.position + entry.size);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a datafile deeply
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function DeepCheckDatafile (collection, type, datafile, scan) {
|
||||
var entries = scan.entries;
|
||||
|
||||
printf("Entries\n");
|
||||
|
||||
var lastGood = 0;
|
||||
var lastGoodPos = 0;
|
||||
var stillGood = true;
|
||||
|
||||
for (var i = 0; i < entries.length; ++i) {
|
||||
var entry = entries[i];
|
||||
var s = "unknown";
|
||||
|
||||
switch (entry.status) {
|
||||
case 1: s = "OK"; break;
|
||||
case 2: s = "OK (end)"; break;
|
||||
case 3: s = "FAILED (empty)"; break;
|
||||
case 4: s = "FAILED (too small)"; break;
|
||||
case 5: s = "FAILED (crc mismatch)"; break;
|
||||
}
|
||||
|
||||
printf(" %d: status %s type %d size %d\n", i, s, entry.type, entry.size);
|
||||
|
||||
if (entry.status === 1 || entry.status === 2) {
|
||||
if (stillGood) {
|
||||
lastGood = entry;
|
||||
lastGoodPos = i;
|
||||
}
|
||||
}
|
||||
else {
|
||||
stillGood = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (! stillGood) {
|
||||
printf(" Last good position: %d\n", lastGood.position + lastGood.size);
|
||||
printf("\n");
|
||||
|
||||
if (type === "journal" || type === "compactor") {
|
||||
DeepCheckJournal(collection, type, datafile, scan, lastGoodPos);
|
||||
}
|
||||
else {
|
||||
DeepCheckDatafile(collection, type, datafile, scan, lastGoodPos);
|
||||
}
|
||||
}
|
||||
|
||||
printf("\n");
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a datafile
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function CheckDatafile (collection, type, datafile) {
|
||||
printf("Datafile\n");
|
||||
printf(" path: %s\n", datafile);
|
||||
printf(" type: %s\n", type);
|
||||
|
||||
var scan = collection.datafileScan(datafile);
|
||||
|
||||
printf(" current size: %d\n", scan.currentSize);
|
||||
printf(" maximal size: %d\n", scan.maximalSize);
|
||||
printf(" total used: %d\n", scan.endPosition);
|
||||
printf(" # of entries: %d\n", scan.numberMarkers);
|
||||
|
||||
switch (scan.status) {
|
||||
case 1:
|
||||
printf(" status: OK\n");
|
||||
break;
|
||||
|
||||
case 2:
|
||||
printf(" status: NOT OK (reached empty marker)\n");
|
||||
break;
|
||||
|
||||
case 3:
|
||||
printf(" status: FATAL (reached corrupt marker)\n");
|
||||
break;
|
||||
|
||||
case 4:
|
||||
printf(" status: FATAL (crc failed)\n");
|
||||
break;
|
||||
|
||||
case 5:
|
||||
printf(" status: FATAL (cannot open datafile or too small)\n");
|
||||
break;
|
||||
|
||||
default:
|
||||
printf(" status: UNKNOWN (%d)\n", scan.status);
|
||||
break;
|
||||
}
|
||||
|
||||
printf("\n");
|
||||
|
||||
if (scan.numberMarkers === 0) {
|
||||
printf("WARNING: datafile contains no entries!\n");
|
||||
RemoveDatafile(collection, type, datafile);
|
||||
return;
|
||||
}
|
||||
|
||||
if (scan.entries[0].type !== 1000) {
|
||||
printf("WARNING: datafile contains no header marker!\n");
|
||||
RemoveDatafile(collection, type, datafile);
|
||||
return;
|
||||
}
|
||||
|
||||
if (scan.status === 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
DeepCheckDatafile(collection, type, datafile, scan);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks a collection
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function CheckCollection (collection) {
|
||||
printf("Database\n");
|
||||
printf(" path: %s\n", internal.db._path);
|
||||
printf("\n");
|
||||
|
||||
printf("Collection\n");
|
||||
printf(" name: %s\n", collection.name());
|
||||
printf(" identifier: %s\n", collection._id);
|
||||
printf("\n");
|
||||
|
||||
var datafiles = collection.datafiles();
|
||||
|
||||
printf("Datafiles\n");
|
||||
printf(" # of journals: %d\n", datafiles.journals.length);
|
||||
printf(" # of compactors: %d\n", datafiles.compactors.length);
|
||||
printf(" # of datafiles: %d\n", datafiles.datafiles.length);
|
||||
printf("\n");
|
||||
|
||||
for (var i = 0; i < datafiles.journals.length; ++i) {
|
||||
CheckDatafile(collection, "journal", datafiles.journals[i]);
|
||||
}
|
||||
|
||||
for (var i = 0; i < datafiles.datafiles.length; ++i) {
|
||||
CheckDatafile(collection, "datafiles", datafiles.datafiles[i]);
|
||||
}
|
||||
|
||||
for (var i = 0; i < datafiles.compactors.length; ++i) {
|
||||
CheckDatafile(collection, "compactor", datafiles.compactors[i]);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief select and check a collection
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function main (argv) {
|
||||
var argc = argv.length;
|
||||
var collections = internal.db._collections();
|
||||
var i;
|
||||
|
||||
printf("%s\n", " ___ _ __ _ _ ___ ___ ___ ");
|
||||
printf("%s\n", " / \\__ _| |_ __ _ / _(_) | ___ / \\/ __\\ / _ \\");
|
||||
printf("%s\n", " / /\\ / _` | __/ _` | |_| | |/ _ \\ / /\\ /__\\// / /_\\/");
|
||||
printf("%s\n", " / /_// (_| | || (_| | _| | | __/ / /_// \\/ \\/ /_\\\\ ");
|
||||
printf("%s\n", "/___,' \\__,_|\\__\\__,_|_| |_|_|\\___| /___,'\\_____/\\____/ ");
|
||||
printf("\n");
|
||||
|
||||
printf("Available collections:\n");
|
||||
|
||||
for (i = 0; i < collections.length; ++i) {
|
||||
printf(" %d: %s\n", i, collections[i].name());
|
||||
}
|
||||
|
||||
printf(" *: all\n");
|
||||
|
||||
printf("\n");
|
||||
|
||||
printf("Collection to check: ");
|
||||
var a = [];
|
||||
|
||||
while (true) {
|
||||
var line = console.getline();
|
||||
|
||||
if (line === "*") {
|
||||
for (i = 0; i < collections.length; ++i) {
|
||||
a.push(i);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
else {
|
||||
var l = parseInt(line);
|
||||
|
||||
if (l < 0 || l >= collections.length) {
|
||||
printf("Please select a number between 0 and %d: ", collections.length - 1);
|
||||
}
|
||||
else {
|
||||
a.push(l);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < a.length; ++i) {
|
||||
var collection = collections[a[i]];
|
||||
|
||||
printf("Checking collection #%d: %s\n", a[i], collection.name());
|
||||
|
||||
var last = Math.round(internal.time());
|
||||
|
||||
while (collection.status() !== 2) {
|
||||
collection.unload();
|
||||
|
||||
var next = Math.round(internal.time());
|
||||
|
||||
if (next != last) {
|
||||
printf("Trying to unload collection '%s'\n", collection.name());
|
||||
last = next;
|
||||
}
|
||||
}
|
||||
|
||||
printf("\n");
|
||||
|
||||
CheckCollection(collection);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Local Variables:
|
||||
// mode: outline-minor
|
||||
// outline-regexp: "^\\(/// @brief\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @}\\)"
|
||||
// End:
|
|
@ -25,6 +25,15 @@
|
|||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup ArangoDB
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates or changes the passwords
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function main (argv) {
|
||||
var argc = argv.length;
|
||||
var internal = require("internal");
|
||||
|
@ -83,3 +92,12 @@ function main (argv) {
|
|||
|
||||
console.info("password hash '%s'", hash);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Local Variables:
|
||||
// mode: outline-minor
|
||||
// outline-regexp: "^\\(/// @brief\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @}\\)"
|
||||
// End:
|
||||
|
|
|
@ -84,6 +84,10 @@ static string JS_server_server =
|
|||
"/// @{\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
"/// @brief simple-query module\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
"\n"
|
||||
"(function () {\n"
|
||||
" var console = require(\"console\");\n"
|
||||
"\n"
|
||||
|
@ -108,6 +112,10 @@ static string JS_server_server =
|
|||
"/// @{\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
"/// @brief monkeypatches module\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
"\n"
|
||||
"(function () {\n"
|
||||
" var console = require(\"console\");\n"
|
||||
"\n"
|
||||
|
@ -155,7 +163,7 @@ static string JS_server_server =
|
|||
"}());\n"
|
||||
"\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"// --SECTION-- ArangoError\n"
|
||||
"// --SECTION-- ArangoError\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
|
@ -195,7 +203,7 @@ static string JS_server_server =
|
|||
"}());\n"
|
||||
"\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"// --SECTION-- ArangoDatabase\n"
|
||||
"// --SECTION-- ArangoDatabase\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
|
@ -434,7 +442,7 @@ static string JS_server_server =
|
|||
"}());\n"
|
||||
"\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"// --SECTION-- ArangoCollection\n"
|
||||
"// --SECTION-- ArangoCollection\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
|
@ -600,7 +608,7 @@ static string JS_server_server =
|
|||
"}());\n"
|
||||
"\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"// --SECTION-- ArangoEdgesCollection\n"
|
||||
"// --SECTION-- ArangoEdgesCollection\n"
|
||||
"// -----------------------------------------------------------------------------\n"
|
||||
"\n"
|
||||
"////////////////////////////////////////////////////////////////////////////////\n"
|
||||
|
|
|
@ -83,6 +83,10 @@
|
|||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief simple-query module
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
(function () {
|
||||
var console = require("console");
|
||||
|
||||
|
@ -107,6 +111,10 @@
|
|||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief monkeypatches module
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
(function () {
|
||||
var console = require("console");
|
||||
|
||||
|
@ -154,7 +162,7 @@
|
|||
}());
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- ArangoError
|
||||
// --SECTION-- ArangoError
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -194,7 +202,7 @@
|
|||
}());
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- ArangoDatabase
|
||||
// --SECTION-- ArangoDatabase
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -433,7 +441,7 @@
|
|||
}());
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- ArangoCollection
|
||||
// --SECTION-- ArangoCollection
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -599,7 +607,7 @@
|
|||
}());
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- ArangoEdgesCollection
|
||||
// --SECTION-- ArangoEdgesCollection
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -46,6 +46,7 @@ ERROR_ARANGO_DATAFILE_SEALED,1002,"datafile sealed","Internal error that will be
|
|||
ERROR_ARANGO_UNKNOWN_COLLECTION_TYPE,1003,"unknown type","Internal error that will be raised when an unknown collection type is encountered."
|
||||
ERROR_ARANGO_READ_ONLY,1004,"ready only","Internal error that will be raised when trying to write to a read-only datafile or collection."
|
||||
ERROR_ARANGO_DUPLICATE_IDENTIFIER,1005,"duplicate identifier","Internal error that will be raised when a identifier duplicate is detected."
|
||||
ERROR_ARANGO_DATAFILE_UNREADABLE,1006,"datafile unreadable","Internal error that will be raised when the datafile is unreadable."
|
||||
|
||||
################################################################################
|
||||
## ArangoDB storage errors
|
||||
|
@ -86,6 +87,7 @@ ERROR_ARANGO_CROSS_COLLECTION_REQUEST,1213,"cross collection request not allowed
|
|||
ERROR_ARANGO_INDEX_HANDLE_BAD,1214,"illegal index handle","Will be raised when a index handle is corrupt."
|
||||
ERROR_ARANGO_CAP_CONSTRAINT_ALREADY_DEFINED,1215,"cap constraint already defined","Will be raised when a cap constraint was already defined."
|
||||
ERROR_ARANGO_DOCUMENT_TOO_LARGE,1216,"document too large","Will be raised when the document cannot fit into any datafile because of it is too large."
|
||||
ERROR_ARANGO_COLLECTION_NOT_UNLOADED,1217,"collection must be unloaded","Will be raised when a collection should be unloaded, but has a different status."
|
||||
|
||||
################################################################################
|
||||
## ArangoDB storage errors
|
||||
|
|
|
@ -38,6 +38,7 @@ void TRI_InitialiseErrorMessages (void) {
|
|||
REG_ERROR(ERROR_ARANGO_UNKNOWN_COLLECTION_TYPE, "unknown type");
|
||||
REG_ERROR(ERROR_ARANGO_READ_ONLY, "ready only");
|
||||
REG_ERROR(ERROR_ARANGO_DUPLICATE_IDENTIFIER, "duplicate identifier");
|
||||
REG_ERROR(ERROR_ARANGO_DATAFILE_UNREADABLE, "datafile unreadable");
|
||||
REG_ERROR(ERROR_ARANGO_CORRUPTED_DATAFILE, "corrupted datafile");
|
||||
REG_ERROR(ERROR_ARANGO_ILLEGAL_PARAMETER_FILE, "illegal parameter file");
|
||||
REG_ERROR(ERROR_ARANGO_CORRUPTED_COLLECTION, "corrupted collection");
|
||||
|
@ -64,6 +65,7 @@ void TRI_InitialiseErrorMessages (void) {
|
|||
REG_ERROR(ERROR_ARANGO_INDEX_HANDLE_BAD, "illegal index handle");
|
||||
REG_ERROR(ERROR_ARANGO_CAP_CONSTRAINT_ALREADY_DEFINED, "cap constraint already defined");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_TOO_LARGE, "document too large");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_NOT_UNLOADED, "collection must be unloaded");
|
||||
REG_ERROR(ERROR_ARANGO_DATAFILE_FULL, "datafile full");
|
||||
REG_ERROR(ERROR_QUERY_KILLED, "query killed");
|
||||
REG_ERROR(ERROR_QUERY_PARSE, "%s");
|
||||
|
|
|
@ -69,6 +69,8 @@ extern "C" {
|
|||
/// - 1005: @CODE{duplicate identifier}
|
||||
/// Internal error that will be raised when a identifier duplicate is
|
||||
/// detected.
|
||||
/// - 1006: @CODE{datafile unreadable}
|
||||
/// Internal error that will be raised when the datafile is unreadable.
|
||||
/// - 1100: @CODE{corrupted datafile}
|
||||
/// Will be raised when a corruption is detected in a datafile.
|
||||
/// - 1101: @CODE{illegal parameter file}
|
||||
|
@ -128,6 +130,9 @@ extern "C" {
|
|||
/// - 1216: @CODE{document too large}
|
||||
/// Will be raised when the document cannot fit into any datafile because of
|
||||
/// it is too large.
|
||||
/// - 1217: @CODE{collection must be unloaded}
|
||||
/// Will be raised when a collection should be unloaded, but has a different
|
||||
/// status.
|
||||
/// - 1300: @CODE{datafile full}
|
||||
/// Will be raised when the datafile reaches its limit.
|
||||
/// - 1500: @CODE{query killed}
|
||||
|
@ -610,6 +615,16 @@ void TRI_InitialiseErrorMessages (void);
|
|||
|
||||
#define TRI_ERROR_ARANGO_DUPLICATE_IDENTIFIER (1005)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1006: ERROR_ARANGO_DATAFILE_UNREADABLE
|
||||
///
|
||||
/// datafile unreadable
|
||||
///
|
||||
/// Internal error that will be raised when the datafile is unreadable.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define TRI_ERROR_ARANGO_DATAFILE_UNREADABLE (1006)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1100: ERROR_ARANGO_CORRUPTED_DATAFILE
|
||||
///
|
||||
|
@ -875,6 +890,17 @@ void TRI_InitialiseErrorMessages (void);
|
|||
|
||||
#define TRI_ERROR_ARANGO_DOCUMENT_TOO_LARGE (1216)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1217: ERROR_ARANGO_COLLECTION_NOT_UNLOADED
|
||||
///
|
||||
/// collection must be unloaded
|
||||
///
|
||||
/// Will be raised when a collection should be unloaded, but has a different
|
||||
/// status.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define TRI_ERROR_ARANGO_COLLECTION_NOT_UNLOADED (1217)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1300: ERROR_ARANGO_DATAFILE_FULL
|
||||
///
|
||||
|
|
|
@ -50,7 +50,7 @@ using namespace triagens::admin;
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup RestServer
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
@ -69,7 +69,7 @@ string ApplicationUserManager::optionUserDatabase;
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup RestServer
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
@ -97,7 +97,7 @@ ApplicationUserManager::~ApplicationUserManager () {
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup RestServer
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
|
|
@ -46,6 +46,11 @@ namespace triagens {
|
|||
// --SECTION-- class ApplicationUserManager
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief application simple user and session management feature
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -63,7 +68,7 @@ namespace triagens {
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup RestServer
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
@ -90,7 +95,7 @@ namespace triagens {
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup RestServer
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
@ -168,7 +173,7 @@ namespace triagens {
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup RestServer
|
||||
/// @addtogroup ApplicationServer
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
|
|
@ -580,6 +580,28 @@ static v8::Handle<v8::Value> JS_Execute (v8::Arguments const& argv) {
|
|||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks if a file of any type or directory exists
|
||||
///
|
||||
/// @FUN{fs.exists(@FA{filename})}
|
||||
///
|
||||
/// Returns true if a file (of any type) or a directory exists at a given
|
||||
/// path. If the file is a broken symbolic link, returns false.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_Exists (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
// extract arguments
|
||||
if (argv.Length() != 1) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("usage: exists(<filename>)")));
|
||||
}
|
||||
|
||||
string filename = TRI_ObjectToString(argv[0]);
|
||||
|
||||
return scope.Close(TRI_ExistsFile(filename.c_str()) ? v8::True() : v8::False());;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief reads in a line from stdin
|
||||
///
|
||||
|
@ -727,6 +749,36 @@ static v8::Handle<v8::Value> JS_LogLevel (v8::Arguments const& argv) {
|
|||
return scope.Close(v8::String::New(TRI_LogLevelLogging()));
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief renames a file
|
||||
///
|
||||
/// @FUN{fs.move(@FA{source}, @FA{destination})}
|
||||
///
|
||||
/// Moves @FA{source} to @FA{destination}. Failure to move the file, or
|
||||
/// specifying a directory for target when source is a file will throw an
|
||||
/// exception.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_Move (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
// extract two arguments
|
||||
if (argv.Length() != 2) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("usage: move(<source>, <destination>)")));
|
||||
}
|
||||
|
||||
string source = TRI_ObjectToString(argv[0]);
|
||||
string destination = TRI_ObjectToString(argv[1]);
|
||||
|
||||
int res = TRI_RenameFile(source.c_str(), destination.c_str());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(res, "cannot move file")));
|
||||
}
|
||||
|
||||
return scope.Close(v8::Undefined());;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief outputs the arguments
|
||||
///
|
||||
|
@ -853,6 +905,35 @@ static v8::Handle<v8::Value> JS_Read (v8::Arguments const& argv) {
|
|||
return scope.Close(result);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief removes a file
|
||||
///
|
||||
/// @FUN{fs.remove(@FA{filename})}
|
||||
///
|
||||
/// Removes the file @FA{filename} at the given path. Throws an exception if the
|
||||
/// path corresponds to anything that is not a file or a symbolic link. If
|
||||
/// "path" refers to a symbolic link, removes the symbolic link.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_Remove (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
// extract two arguments
|
||||
if (argv.Length() != 1) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("usage: remove(<filename>)")));
|
||||
}
|
||||
|
||||
string filename = TRI_ObjectToString(argv[1]);
|
||||
|
||||
int res = TRI_UnlinkFile(filename.c_str());
|
||||
|
||||
if (res != TRI_ERROR_NO_ERROR) {
|
||||
return scope.Close(v8::ThrowException(TRI_CreateErrorObject(res, "cannot remove file")));
|
||||
}
|
||||
|
||||
return scope.Close(v8::Undefined());;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief formats the arguments
|
||||
///
|
||||
|
@ -1022,38 +1103,6 @@ static v8::Handle<v8::Value> JS_Time (v8::Arguments const& argv) {
|
|||
return scope.Close(v8::Number::New(TRI_microtime()));
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks if a file of any type or directory exists
|
||||
///
|
||||
/// @FUN{fs.exists(@FA{filename})}
|
||||
///
|
||||
/// Returns true if a file (of any type) or a directory exists at a given
|
||||
/// path. If the file is a broken symbolic link, returns false.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_Exists (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
// extract arguments
|
||||
if (argv.Length() != 1) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("exists: execute(<filename>)")));
|
||||
}
|
||||
|
||||
v8::Handle<v8::Value> filename = argv[0];
|
||||
|
||||
if (! filename->IsString()) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("<filename> must be a string")));
|
||||
}
|
||||
|
||||
v8::String::Utf8Value name(filename);
|
||||
|
||||
if (*name == 0) {
|
||||
return scope.Close(v8::ThrowException(v8::String::New("<filename> must be an UTF8 string")));
|
||||
}
|
||||
|
||||
return scope.Close(TRI_ExistsFile(*name) ? v8::True() : v8::False());;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -1325,6 +1374,38 @@ v8::Handle<v8::Value> TRI_ExecuteJavaScriptString (v8::Handle<v8::Context> conte
|
|||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates an error in a javascript object
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
v8::Handle<v8::Object> TRI_CreateErrorObject (int errorNumber, string const& message) {
|
||||
TRI_v8_global_t* v8g;
|
||||
v8::HandleScope scope;
|
||||
|
||||
v8g = (TRI_v8_global_t*) v8::Isolate::GetCurrent()->GetData();
|
||||
|
||||
string msg;
|
||||
if (message.size()) {
|
||||
msg = message;
|
||||
}
|
||||
else {
|
||||
msg = TRI_errno_string(errorNumber) + string(": ") + message;
|
||||
}
|
||||
v8::Handle<v8::String> errorMessage = v8::String::New(msg.c_str());
|
||||
|
||||
v8::Handle<v8::Object> errorObject = v8::Exception::Error(errorMessage)->ToObject();
|
||||
v8::Handle<v8::Value> proto = v8g->ErrorTempl->NewInstance();
|
||||
|
||||
errorObject->Set(v8::String::New("errorNum"), v8::Number::New(errorNumber));
|
||||
errorObject->Set(v8::String::New("errorMessage"), errorMessage);
|
||||
|
||||
if (! proto.IsEmpty()) {
|
||||
errorObject->SetPrototype(proto);
|
||||
}
|
||||
|
||||
return errorObject;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief stores the V8 utils functions inside the global variable
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -1375,6 +1456,10 @@ void TRI_InitV8Utils (v8::Handle<v8::Context> context, string const& path) {
|
|||
v8::FunctionTemplate::New(JS_Execute)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
context->Global()->Set(v8::String::New("FS_EXISTS"),
|
||||
v8::FunctionTemplate::New(JS_Exists)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
context->Global()->Set(v8::String::New("SYS_GETLINE"),
|
||||
v8::FunctionTemplate::New(JS_Getline)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
@ -1391,6 +1476,14 @@ void TRI_InitV8Utils (v8::Handle<v8::Context> context, string const& path) {
|
|||
v8::FunctionTemplate::New(JS_LogLevel)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
context->Global()->Set(v8::String::New("FS_MOVE"),
|
||||
v8::FunctionTemplate::New(JS_Move)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
context->Global()->Set(v8::String::New("FS_REMOVE"),
|
||||
v8::FunctionTemplate::New(JS_Remove)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
context->Global()->Set(v8::String::New("SYS_OUTPUT"),
|
||||
v8::FunctionTemplate::New(JS_Output)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
@ -1415,10 +1508,6 @@ void TRI_InitV8Utils (v8::Handle<v8::Context> context, string const& path) {
|
|||
v8::FunctionTemplate::New(JS_Time)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
context->Global()->Set(v8::String::New("FS_EXISTS"),
|
||||
v8::FunctionTemplate::New(JS_Exists)->GetFunction(),
|
||||
v8::ReadOnly);
|
||||
|
||||
// .............................................................................
|
||||
// create the global variables
|
||||
// .............................................................................
|
||||
|
|
|
@ -138,6 +138,12 @@ v8::Handle<v8::Value> TRI_ExecuteJavaScriptString (v8::Handle<v8::Context> conte
|
|||
v8::Handle<v8::Value> name,
|
||||
bool printResult);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief creates an error in a javascript object
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
v8::Handle<v8::Object> TRI_CreateErrorObject (int errorNumber, std::string const& message);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief stores the V8 utils function inside the global variable
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
29
packetize.sh
29
packetize.sh
|
@ -51,6 +51,11 @@ echo
|
|||
|
||||
case $TRI_OS_LONG in
|
||||
|
||||
Linux-ArchLinux-*)
|
||||
echo "Packetize for ArchLinux is not not supported."
|
||||
exit 0
|
||||
;;
|
||||
|
||||
Linux-openSUSE*)
|
||||
echo "Using configuration for openSuSE"
|
||||
package_type="rpm"
|
||||
|
@ -90,7 +95,21 @@ case $TRI_OS_LONG in
|
|||
echo "Using configuration for Ubuntu"
|
||||
package_type="deb"
|
||||
START_SCRIPT="rc.arangodb.Ubuntu"
|
||||
runlevels="runlevel(0235)"
|
||||
runlevels="runlevel(02345)"
|
||||
|
||||
if [ ${TRI_MACH} == "x86_64" ] ; then
|
||||
TRI_MACH="amd64"
|
||||
fi
|
||||
|
||||
# export "insserv" for the epm configuration file
|
||||
export insserv="true"
|
||||
;;
|
||||
|
||||
Linux-LinuxMint-*)
|
||||
echo "Using configuration for LinuxMint"
|
||||
package_type="deb"
|
||||
START_SCRIPT="rc.arangodb.Ubuntu"
|
||||
runlevels="runlevel(02345)"
|
||||
|
||||
if [ ${TRI_MACH} == "x86_64" ] ; then
|
||||
TRI_MACH="amd64"
|
||||
|
@ -296,6 +315,14 @@ case $TRI_OS_LONG in
|
|||
remove_package="sudo dpkg --purge $product_name"
|
||||
;;
|
||||
|
||||
Linux-LinuxMint-*)
|
||||
start_server=""
|
||||
stop_server="sudo /etc/init.d/arango stop"
|
||||
|
||||
install_package="sudo dpkg -i ${sfolder_name}/${package_name}"
|
||||
remove_package="sudo dpkg --purge $product_name"
|
||||
;;
|
||||
|
||||
Darwin*)
|
||||
start_server=""
|
||||
stop_server="sudo launchctl unload /Library/LaunchDaemons/org.arangodb.plist"
|
||||
|
|
Loading…
Reference in New Issue