1
0
Fork 0

merge with SVN

This commit is contained in:
Jan Steemann 2012-03-20 17:18:18 +01:00
parent 2631c3cea3
commit 4fbb2ae126
28 changed files with 907 additions and 414 deletions

View File

@ -63,7 +63,7 @@ namespace triagens {
logFacility("-"),
logLevel("info"),
logFormat(),
logSeverity("all"),
logSeverity("human"),
logFile("+"),
logPrefix(),
logSyslog(),

View File

@ -93,6 +93,22 @@ static void Reserve (TRI_string_buffer_t * self, size_t size) {
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief create a new string buffer and initialise it
////////////////////////////////////////////////////////////////////////////////
TRI_string_buffer_t* TRI_CreateStringBuffer (void) {
TRI_string_buffer_t* self = (TRI_string_buffer_t*) TRI_Allocate(sizeof(TRI_string_buffer_t));
if (!self) {
return NULL;
}
TRI_InitStringBuffer(self);
return self;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief initialises the string buffer
///
@ -135,6 +151,18 @@ void TRI_AnnihilateStringBuffer (TRI_string_buffer_t * self) {
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the string buffer and the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeStringBuffer (TRI_string_buffer_t * self) {
if (self->_buffer != NULL) {
TRI_Free(self->_buffer);
}
TRI_Free(self);
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////

View File

@ -67,13 +67,19 @@ TRI_string_buffer_t;
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief create a new string buffer and initialise it
////////////////////////////////////////////////////////////////////////////////
TRI_string_buffer_t* TRI_CreateStringBuffer (void);
////////////////////////////////////////////////////////////////////////////////
/// @brief initialises the string buffer
///
/// @warning You must call initialise before using the string buffer.
////////////////////////////////////////////////////////////////////////////////
void TRI_InitStringBuffer (TRI_string_buffer_t * self);
void TRI_InitStringBuffer (TRI_string_buffer_t *);
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the string buffer
@ -81,7 +87,7 @@ void TRI_InitStringBuffer (TRI_string_buffer_t * self);
/// @warning You must call free or destroy after using the string buffer.
////////////////////////////////////////////////////////////////////////////////
void TRI_DestroyStringBuffer (TRI_string_buffer_t * self);
void TRI_DestroyStringBuffer (TRI_string_buffer_t *);
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the string buffer and cleans the buffer
@ -89,7 +95,13 @@ void TRI_DestroyStringBuffer (TRI_string_buffer_t * self);
/// @warning You must call free after or destroy using the string buffer.
////////////////////////////////////////////////////////////////////////////////
void TRI_AnnihilateStringBuffer (TRI_string_buffer_t * self);
void TRI_AnnihilateStringBuffer (TRI_string_buffer_t *);
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the string buffer and the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeStringBuffer (TRI_string_buffer_t *);
////////////////////////////////////////////////////////////////////////////////
/// @}

View File

@ -216,6 +216,7 @@ avocsh_SOURCES = \
################################################################################
JAVASCRIPT_HEADER = \
js/common/bootstrap/js-errors.h \
js/common/bootstrap/js-modules.h \
js/common/bootstrap/js-print.h \
js/common/bootstrap/js-errors.h \

View File

@ -762,6 +762,7 @@ avocsh_SOURCES = \
################################################################################
################################################################################
JAVASCRIPT_HEADER = \
js/common/bootstrap/js-errors.h \
js/common/bootstrap/js-modules.h \
js/common/bootstrap/js-print.h \
js/common/bootstrap/js-errors.h \

View File

@ -549,8 +549,7 @@ bool QLAstQueryAddGeoRestriction (QL_ast_query_t* query,
restriction->_compareLat._collection =
TRI_DuplicateString(valueNode->_lhs->_value._stringValue);
restriction->_compareLat._field = TRI_DuplicateString(fieldName->_buffer);
TRI_DestroyStringBuffer(fieldName);
TRI_Free(fieldName);
TRI_FreeStringBuffer(fieldName);
}
else {
QLAstQueryFreeRestriction(restriction);
@ -571,8 +570,7 @@ bool QLAstQueryAddGeoRestriction (QL_ast_query_t* query,
restriction->_compareLon._collection =
TRI_DuplicateString(valueNode->_lhs->_value._stringValue);
restriction->_compareLon._field = TRI_DuplicateString(fieldName->_buffer);
TRI_DestroyStringBuffer(fieldName);
TRI_Free(fieldName);
TRI_FreeStringBuffer(fieldName);
}
else {
QLAstQueryFreeRestriction(restriction);
@ -613,13 +611,11 @@ TRI_string_buffer_t* QLAstQueryGetMemberNameString (TRI_query_node_t* node,
TRI_query_node_t *lhs, *rhs;
TRI_string_buffer_t* buffer;
buffer = (TRI_string_buffer_t*) TRI_Allocate(sizeof(TRI_string_buffer_t));
buffer = TRI_CreateStringBuffer();
if (!buffer) {
return NULL;
}
TRI_InitStringBuffer(buffer);
if (includeCollection) {
// add collection part
lhs = node->_lhs;

View File

@ -1373,8 +1373,7 @@ static QL_optimize_range_t* QLOptimizeCreateRange (TRI_query_node_t* memberNode,
range = (QL_optimize_range_t*) TRI_Allocate(sizeof(QL_optimize_range_t));
if (!range) {
// clean up
TRI_DestroyStringBuffer(name);
TRI_Free(name);
TRI_FreeStringBuffer(name);
return NULL;
}
@ -1411,8 +1410,7 @@ static QL_optimize_range_t* QLOptimizeCreateRange (TRI_query_node_t* memberNode,
range->_hash = QLAstQueryGetMemberNameHash(memberNode);
// we can now free the temporary name buffer
TRI_DestroyStringBuffer(name);
TRI_Free(name);
TRI_FreeStringBuffer(name);
if (type == TRI_QueryNodeBinaryOperatorIdentical ||
type == TRI_QueryNodeBinaryOperatorEqual) {
@ -1423,8 +1421,7 @@ static QL_optimize_range_t* QLOptimizeCreateRange (TRI_query_node_t* memberNode,
name = QLAstQueryGetMemberNameString(valueNode, false);
if (name) {
range->_refValue._field = TRI_DuplicateString(name->_buffer);
TRI_DestroyStringBuffer(name);
TRI_Free(name);
TRI_FreeStringBuffer(name);
}
}
else if (range->_valueType == RANGE_TYPE_DOUBLE) {

View File

@ -21,14 +21,14 @@ This will install AvocadoDB and all dependencies.
## First Steps
./avocado --shell
avocado> db.examples.count();
avocado> db.examples.save({ Hallo: "World" });
avocado> db.examples.select();
avocado> db.examples.count();
avocado> db.examples.all().toArray();
## Caveat
Please note that this is a very early version of AvocadoDB. There will be
bugs and we'd realy appreciate it if you
bugs and we'd really appreciate it if you
<a href="https://github.com/triAGENS/AvocadoDB/issues">report</a> them:
https://github.com/triAGENS/AvocadoDB/issues

View File

@ -237,7 +237,7 @@ bool RestDocumentHandler::createDocument () {
return false;
}
// should we create the collection
// shall we create the collection?
string createStr = request->value("createCollection", found);
bool create = found ? StringUtils::boolean(createStr) : false;
@ -263,15 +263,8 @@ bool RestDocumentHandler::createDocument () {
bool waitForSync = _documentCollection->base._waitForSync;
// note: unlocked is performed by createJson() FIXME URGENT SHOULD RETURN A DOC_MPTR NOT A POINTER!!!
TRI_doc_mptr_t const* mptr = _documentCollection->createJson(_documentCollection, TRI_DOC_MARKER_DOCUMENT, json, 0, true);
TRI_voc_did_t did = 0;
TRI_voc_rid_t rid = 0;
if (mptr != 0) {
did = mptr->_did;
rid = mptr->_rid;
}
// note: unlocked is performed by createJson()
TRI_doc_mptr_t const mptr = _documentCollection->createJson(_documentCollection, TRI_DOC_MARKER_DOCUMENT, json, 0, true);
// .............................................................................
// outside write transaction
@ -279,12 +272,12 @@ bool RestDocumentHandler::createDocument () {
TRI_FreeJson(json);
if (mptr != 0) {
if (mptr._did != 0) {
if (waitForSync) {
generateCreated(_documentCollection->base._cid, did, rid);
generateCreated(_documentCollection->base._cid, mptr._did, mptr._rid);
}
else {
generateAccepted(_documentCollection->base._cid, did, rid);
generateAccepted(_documentCollection->base._cid, mptr._did, mptr._rid);
}
return true;
@ -322,7 +315,7 @@ bool RestDocumentHandler::readDocument () {
default:
generateError(HttpResponse::BAD,
TRI_REST_ERROR_SUPERFLUOUS_SUFFICES,
"expecting GET /document/<document-handle>");
"expecting GET /document/<document-handle> or GET /document?collection=<collection-identifier>");
return false;
}
}
@ -389,8 +382,7 @@ bool RestDocumentHandler::readSingleDocument (bool generateBody) {
_documentCollection->beginRead(_documentCollection);
// FIXME FIXME
TRI_doc_mptr_t const* document = findDocument(did);
TRI_doc_mptr_t const document = findDocument(did);
_documentCollection->endRead(_documentCollection);
@ -398,22 +390,22 @@ bool RestDocumentHandler::readSingleDocument (bool generateBody) {
// outside read transaction
// .............................................................................
if (document == 0) {
if (document._did == 0) {
generateDocumentNotFound(suffix[0]);
return false;
}
TRI_voc_rid_t rid = document->_rid;
TRI_voc_rid_t rid = document._rid;
if (ifNoneRid == 0) {
if (ifRid == 0) {
generateDocument(document, generateBody);
generateDocument(&document, generateBody);
}
else if (ifRid == rid) {
generateDocument(document, generateBody);
generateDocument(&document, generateBody);
}
else {
generatePreconditionFailed(_documentCollection->base._cid, document->_did, rid);
generatePreconditionFailed(_documentCollection->base._cid, document._did, rid);
}
}
else if (ifNoneRid == rid) {
@ -424,18 +416,18 @@ bool RestDocumentHandler::readSingleDocument (bool generateBody) {
generateNotModified(StringUtils::itoa(rid));
}
else {
generatePreconditionFailed(_documentCollection->base._cid, document->_did, rid);
generatePreconditionFailed(_documentCollection->base._cid, document._did, rid);
}
}
else {
if (ifRid == 0) {
generateDocument(document, generateBody);
generateDocument(&document, generateBody);
}
else if (ifRid == rid) {
generateDocument(document, generateBody);
generateDocument(&document, generateBody);
}
else {
generatePreconditionFailed(_documentCollection->base._cid, document->_did, rid);
generatePreconditionFailed(_documentCollection->base._cid, document._did, rid);
}
}
@ -672,14 +664,14 @@ bool RestDocumentHandler::updateDocument () {
// unlocking is performed in updateJson()
TRI_voc_rid_t rid = 0;
TRI_doc_mptr_t const* mptr = _documentCollection->updateJson(_documentCollection, json, did, revision, &rid, policy, true);
TRI_doc_mptr_t const mptr = _documentCollection->updateJson(_documentCollection, json, did, revision, &rid, policy, true);
// .............................................................................
// outside write transaction
// .............................................................................
if (mptr != 0) {
generateUpdated(_documentCollection->base._cid, did, mptr->_rid);
if (mptr._did != 0) {
generateUpdated(_documentCollection->base._cid, did, mptr._rid);
return true;
}
else {
@ -723,7 +715,7 @@ bool RestDocumentHandler::updateDocument () {
/// body of the response contains an error document.
///
/// If an etag is supplied in the "If-Match" field, then the AvocadoDB checks
/// that the revision of the document is equal to the tag. If there is a
/// that the revision of the document is equal to the etag. If there is a
/// mismatch, then a @LIT{HTTP 412} conflict is returned and no delete is
/// performed.
///
@ -783,6 +775,13 @@ bool RestDocumentHandler::deleteDocument () {
// extract or chose the update policy
TRI_doc_update_policy_e policy = extractUpdatePolicy();
if (policy == TRI_DOC_UPDATE_ILLEGAL) {
generateError(HttpResponse::BAD,
TRI_REST_ERROR_BAD_PARAMETER,
"policy must be 'error' or 'last'");
return false;
}
// .............................................................................
// inside write transaction
// .............................................................................

View File

@ -427,6 +427,9 @@ bool RestVocbaseBaseHandler::findCollection (string const& name, bool create) {
_collection = 0;
if (name.empty()) {
generateError(HttpResponse::BAD,
TRI_REST_ERROR_CORRUPTED_JSON,
"collection identifier is empty");
return false;
}
@ -557,9 +560,12 @@ TRI_json_t* RestVocbaseBaseHandler::parseJsonBody () {
/// @brief sets the restult-set, needs a loaded collection
////////////////////////////////////////////////////////////////////////////////
TRI_doc_mptr_t const* RestVocbaseBaseHandler::findDocument (string const& doc) {
TRI_doc_mptr_t const RestVocbaseBaseHandler::findDocument (string const& doc) {
TRI_doc_mptr_t document;
if (_documentCollection == 0) {
return 0;
document._did = 0;
return document;
}
uint32_t id = StringUtils::uint32(doc);
@ -570,7 +576,7 @@ TRI_doc_mptr_t const* RestVocbaseBaseHandler::findDocument (string const& doc) {
_documentCollection->beginRead(_documentCollection);
TRI_doc_mptr_t const* document = _documentCollection->read(_documentCollection, id);
document = _documentCollection->read(_documentCollection, id);
// keep the oldest barrier
if (_barrier != 0) {

View File

@ -245,7 +245,7 @@ namespace triagens {
/// @brief sets the rest set, needs the collection
////////////////////////////////////////////////////////////////////////////////
TRI_doc_mptr_t const* findDocument (string const& doc);
TRI_doc_mptr_t const findDocument (string const& doc);
////////////////////////////////////////////////////////////////////////////////
/// @}

View File

@ -35,20 +35,33 @@ class AvocadoDB
method = args[:method] || :get
url = args[:url]
body = args[:body]
headers = args[:headers]
result = args[:result]
response = result.parsed_response
logfile.puts '-' * 80
h_option = ""
h_sep = ""
if headers
for k in [ "if-match", "if-none-match" ] do
if headers.key?(k)
h_option = h_option + h_sep + "'-H #{k}: #{headers[k]}'"
h_sep = " "
end
end
end
if method == :get
logfile.puts "> curl -X GET --dump - http://localhost:8529#{url}"
logfile.puts "> curl -X GET #{h_option} --dump - http://localhost:8529#{url}"
logfile.puts
elsif method == :post
if body == nil
logfile.puts "> curl -X POST --dump - http://localhost:8529#{url}"
logfile.puts "> curl -X POST #{h_option} --dump - http://localhost:8529#{url}"
logfile.puts
else
logfile.puts "> curl --data @- -X POST --dump - http://localhost:8529#{url}"
logfile.puts "> curl --data @- -X POST #{h_option} --dump - http://localhost:8529#{url}"
logfile.puts body
logfile.puts
end

View File

@ -2,6 +2,8 @@ require 'rspec'
require './avocadodb.rb'
describe AvocadoDB do
prefix = "rest_create-document"
context "creating a document in a collection" do
################################################################################
@ -9,19 +11,6 @@ describe AvocadoDB do
################################################################################
context "error handling" do
it "returns an error if collection idenifier is missing" do
cmd = "/document"
doc = AvocadoDB.post(cmd)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(1202)
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "rest_create-document-missing-cid")
end
it "returns an error if url contains a suffix" do
cmd = "/document/123456"
doc = AvocadoDB.post(cmd)
@ -32,7 +21,20 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "rest_create.document-superfluous-suffix")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "#{prefix}-superfluous-suffix")
end
it "returns an error if collection idenifier is missing" do
cmd = "/document"
doc = AvocadoDB.post(cmd)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(1202)
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "#{prefix}-missing-cid")
end
it "returns an error if the collection identifier is unknown" do
@ -45,7 +47,7 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "rest_create-document-unknown-cid")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "#{prefix}-unknown-cid")
end
it "returns an error if the collection name is unknown" do
@ -58,7 +60,7 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "rest_create-document-unknown-name")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "#{prefix}-unknown-name")
end
it "returns an error if the JSON body is corrupted" do
@ -77,7 +79,7 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "rest_create-document-bad-json")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "#{prefix}-bad-json")
AvocadoDB.drop_collection("UnitTestsCollectionBasics")
end
end
@ -124,7 +126,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
location.should eq("/document/#{did}")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "rest_create-document")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "#{prefix}")
AvocadoDB.delete(location)
end
@ -172,7 +174,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
location.should eq("/document/#{did}")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "rest_create-document-accept")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "#{prefix}-accept")
AvocadoDB.delete(location)
end
@ -220,7 +222,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
location.should eq("/document/#{did}")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "rest_create-document-new-named-collection")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "#{prefix}-new-named-collection")
AvocadoDB.delete(location)
end
@ -250,7 +252,7 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc)
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "#{prefix}-unknown-collection-name")
end
it "create the collection and the document" do
@ -277,7 +279,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
location.should eq("/document/#{did}")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "rest_create-document-create-collection")
AvocadoDB.log(:method => :post, :url => cmd, :body => body, :result => doc, :output => "#{prefix}-create-collection")
AvocadoDB.delete(location)
end

View File

@ -2,6 +2,8 @@ require 'rspec'
require './avocadodb.rb'
describe AvocadoDB do
prefix = "rest_delete-document"
context "delete a document in a collection" do
################################################################################
@ -9,17 +11,236 @@ describe AvocadoDB do
################################################################################
context "error handling" do
before do
@cn = "UnitTestsCollectionBasics"
@cid = AvocadoDB.create_collection(@cn)
end
after do
AvocadoDB.drop_collection(@cn)
end
it "returns an error if document handle is missing" do
cmd = "/document"
doc = AvocadoDB.delete(cmd)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(1202)
doc.parsed_response['errorNum'].should eq(501)
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "rest_delete-document-missing-handle")
AvocadoDB.log(:method => :post, :url => cmd, :result => doc, :output => "#{prefix}-missing-handle")
end
it "returns an error if document handle is corrupted" do
cmd = "/document/123456"
doc = AvocadoDB.delete(cmd)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(501)
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-bad-handle")
end
it "returns an error if document handle is corrupted" do
cmd = "/document//123456"
doc = AvocadoDB.delete(cmd)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(502)
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-bad-handle2")
end
it "returns an error if collection identifier is unknown" do
cmd = "/document/123456/234567"
doc = AvocadoDB.delete(cmd)
doc.code.should eq(404)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(1201)
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-unknown-cid")
end
it "returns an error if document handle is unknown" do
cmd = "/document/#{@cid}/234567"
doc = AvocadoDB.delete(cmd)
doc.code.should eq(404)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(1200)
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-unknown-handle")
end
it "returns an error if the policy parameter is bad" do
cmd = "/document?collection=#{@cid}"
body = "{ \"Hallo\" : \"World\" }"
doc = AvocadoDB.post(cmd, :body => body)
doc.code.should eq(201)
location = doc.headers['location']
location.should be_kind_of(String)
did = doc.parsed_response['_id']
rev = doc.parsed_response['_rev']
# delete document, different revision
cmd = "/document/#{did}?policy=last-write"
hdr = { "if-match" => "\"#{rev-1}\"" }
doc = AvocadoDB.delete(cmd, :headers => hdr)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-policy-bad")
end
end
################################################################################
## deleting documents
################################################################################
context "deleting documents" do
before do
@cn = "UnitTestsCollectionBasics"
@cid = AvocadoDB.create_collection(@cn)
end
after do
AvocadoDB.drop_collection(@cn)
end
it "create a document and delete it" do
cmd = "/document?collection=#{@cid}"
body = "{ \"Hallo\" : \"World\" }"
doc = AvocadoDB.post(cmd, :body => body)
doc.code.should eq(201)
location = doc.headers['location']
location.should be_kind_of(String)
did = doc.parsed_response['_id']
rev = doc.parsed_response['_rev']
# delete document
cmd = "/document/#{did}"
doc = AvocadoDB.delete(cmd)
doc.code.should eq(200)
doc.parsed_response['error'].should eq(false)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
did2 = doc.parsed_response['_id']
did2.should be_kind_of(String)
did2.should eq(did)
rev2 = doc.parsed_response['_rev']
rev2.should be_kind_of(Integer)
rev2.should eq(rev)
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}")
end
it "create a document and delete it, using if-match" do
cmd = "/document?collection=#{@cid}"
body = "{ \"Hallo\" : \"World\" }"
doc = AvocadoDB.post(cmd, :body => body)
doc.code.should eq(201)
location = doc.headers['location']
location.should be_kind_of(String)
did = doc.parsed_response['_id']
rev = doc.parsed_response['_rev']
# delete document, different revision
cmd = "/document/#{did}"
hdr = { "if-match" => "\"#{rev-1}\"" }
doc = AvocadoDB.delete(cmd, :headers => hdr)
doc.code.should eq(412)
doc.parsed_response['error'].should eq(true)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
did2 = doc.parsed_response['_id']
did2.should be_kind_of(String)
did2.should eq(did)
rev2 = doc.parsed_response['_rev']
rev2.should be_kind_of(Integer)
rev2.should eq(rev)
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-match-other")
# delete document, same revision
cmd = "/document/#{did}"
hdr = { "if-match" => "\"#{rev}\"" }
doc = AvocadoDB.delete(cmd, :headers => hdr)
doc.code.should eq(200)
doc.parsed_response['error'].should eq(false)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
did2 = doc.parsed_response['_id']
did2.should be_kind_of(String)
did2.should eq(did)
rev2 = doc.parsed_response['_rev']
rev2.should be_kind_of(Integer)
rev2.should eq(rev)
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-match")
end
it "create a document and delete it, using if-match and last-write wins" do
cmd = "/document?collection=#{@cid}"
body = "{ \"Hallo\" : \"World\" }"
doc = AvocadoDB.post(cmd, :body => body)
doc.code.should eq(201)
location = doc.headers['location']
location.should be_kind_of(String)
did = doc.parsed_response['_id']
rev = doc.parsed_response['_rev']
# delete document, different revision
cmd = "/document/#{did}?policy=last"
hdr = { "if-match" => "\"#{rev-1}\"" }
doc = AvocadoDB.delete(cmd, :headers => hdr)
doc.code.should eq(200)
doc.parsed_response['error'].should eq(false)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
did2 = doc.parsed_response['_id']
did2.should be_kind_of(String)
did2.should eq(did)
rev2 = doc.parsed_response['_rev']
rev2.should be_kind_of(Integer)
rev2.should eq(rev)
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-match-other-last-write")
end
end

View File

@ -2,6 +2,8 @@ require 'rspec'
require './avocadodb.rb'
describe AvocadoDB do
prefix = "rest_read-document"
context "reading a document in a collection" do
################################################################################
@ -28,7 +30,20 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "rest_read-document-bad-handle")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-bad-handle")
end
it "returns an error if document handle is corrupted" do
cmd = "/document//123456"
doc = AvocadoDB.get(cmd)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['errorNum'].should eq(503)
doc.parsed_response['code'].should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-bad-handle2")
end
it "returns an error if collection identifier is unknown" do
@ -41,7 +56,7 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "rest_read-document-unknown-cid")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-unknown-cid")
end
it "returns an error if document handle is unknown" do
@ -54,7 +69,7 @@ describe AvocadoDB do
doc.parsed_response['code'].should eq(404)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "rest_read-document-unknown-handle")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}-unknown-handle")
end
end
@ -105,7 +120,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "rest_read-document")
AvocadoDB.log(:method => :get, :url => cmd, :result => doc, :output => "#{prefix}")
AvocadoDB.delete(location)
end
@ -135,7 +150,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "rest_read-document-if-none-match")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-none-match")
# get document, if-none-match with different rev
cmd = "/document/#{did}"
@ -163,7 +178,23 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "rest_read-document-if-none-match-other")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-none-match-other")
AvocadoDB.delete(location)
end
it "create a document and read it, use if-match" do
cmd = "/document?collection=#{@cid}"
body = "{ \"Hallo\" : \"World\" }"
doc = AvocadoDB.post(cmd, :body => body)
doc.code.should eq(201)
location = doc.headers['location']
location.should be_kind_of(String)
did = doc.parsed_response['_id']
rev = doc.parsed_response['_rev']
# get document, if-match with same rev
cmd = "/document/#{did}"
@ -186,7 +217,7 @@ describe AvocadoDB do
etag.should eq("\"#{rev}\"")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "rest_read-document-if-match")
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-match")
# get document, if-match with different rev
cmd = "/document/#{did}"
@ -194,9 +225,16 @@ describe AvocadoDB do
doc = AvocadoDB.get(cmd, :headers => hdr)
doc.code.should eq(412)
doc.headers['content-length'].should eq(0)
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "rest_read-document-if-match-other")
did2 = doc.parsed_response['_id']
did2.should be_kind_of(String)
did2.should eq(did)
rev2 = doc.parsed_response['_rev']
rev2.should be_kind_of(Integer)
rev2.should eq(rev)
AvocadoDB.log(:method => :get, :url => cmd, :headers => hdr, :result => doc, :output => "#{prefix}-if-match-other")
AvocadoDB.delete(location)
end

View File

@ -1078,7 +1078,8 @@ static v8::Handle<v8::Value> JS_DocumentQuery (v8::Arguments const& argv) {
// get document
// .............................................................................
TRI_doc_mptr_t const* document;
TRI_doc_mptr_t document;
v8::Handle<v8::Value> result;
// .............................................................................
// inside a read transaction
@ -1087,13 +1088,12 @@ static v8::Handle<v8::Value> JS_DocumentQuery (v8::Arguments const& argv) {
collection->_collection->beginRead(collection->_collection);
document = collection->_collection->read(collection->_collection, did);
v8::Handle<v8::Value> result;
if (document != 0) {
if (document._did != 0) {
TRI_barrier_t* barrier;
barrier = TRI_CreateBarrierElement(&collection->_collection->_barrierList);
result = TRI_WrapShapedJson(collection, document, barrier);
result = TRI_WrapShapedJson(collection, &document, barrier);
}
collection->_collection->endRead(collection->_collection);
@ -1102,7 +1102,7 @@ static v8::Handle<v8::Value> JS_DocumentQuery (v8::Arguments const& argv) {
// outside a write transaction
// .............................................................................
if (document == 0) {
if (document._did == 0) {
return scope.Close(v8::ThrowException(v8::String::New("document not found")));
}

View File

@ -51,31 +51,28 @@ static TRI_voc_did_t CreateLock (TRI_doc_collection_t* document,
TRI_df_marker_type_e type,
TRI_shaped_json_t const* json,
void const* data) {
TRI_doc_mptr_t const* result;
document->beginWrite(document);
result = document->create(document, type, json, data, true);
return result == NULL ? 0 : result->_did;
return document->create(document, type, json, data, true)._did;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief creates a new document in the collection from json
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const* CreateJson (TRI_doc_collection_t* collection,
static TRI_doc_mptr_t const CreateJson (TRI_doc_collection_t* collection,
TRI_df_marker_type_e type,
TRI_json_t const* json,
void const* data,
bool release) {
TRI_shaped_json_t* shaped;
TRI_doc_mptr_t const* result;
TRI_doc_mptr_t result;
shaped = TRI_ShapedJsonJson(collection->_shaper, json);
if (shaped == 0) {
collection->base._lastError = TRI_set_errno(TRI_VOC_ERROR_SHAPER_FAILED);
return false;
result._did = 0;
return result;
}
result = collection->create(collection, type, shaped, data, release);
@ -95,19 +92,15 @@ static bool UpdateLock (TRI_doc_collection_t* document,
TRI_voc_rid_t rid,
TRI_voc_rid_t* oldRid,
TRI_doc_update_policy_e policy) {
TRI_doc_mptr_t const* result;
document->beginWrite(document);
result = document->update(document, json, did, rid, oldRid, policy, true);
return result != NULL;
return document->update(document, json, did, rid, oldRid, policy, true)._did != 0;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief updates a document in the collection from json
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const* UpdateJson (TRI_doc_collection_t* collection,
static TRI_doc_mptr_t const UpdateJson (TRI_doc_collection_t* collection,
TRI_json_t const* json,
TRI_voc_did_t did,
TRI_voc_rid_t rid,
@ -115,13 +108,14 @@ static TRI_doc_mptr_t const* UpdateJson (TRI_doc_collection_t* collection,
TRI_doc_update_policy_e policy,
bool release) {
TRI_shaped_json_t* shaped;
TRI_doc_mptr_t const* result;
TRI_doc_mptr_t result;
shaped = TRI_ShapedJsonJson(collection->_shaper, json);
if (shaped == 0) {
collection->base._lastError = TRI_set_errno(TRI_VOC_ERROR_SHAPER_FAILED);
return false;
result._did = 0;
return result;
}
result = collection->update(collection, shaped, did, rid, oldRid, policy, release);

View File

@ -114,131 +114,136 @@ TRI_doc_collection_info_t;
/// represented as @ref ShapedJson "shaped JSON objects". Each document has a
/// place in memory which is determined by the position in the memory mapped
/// file. As datafiles are compacted during garbage collection, this position
/// can change over time. Each active document also has a master pointer of
/// type @ref TRI_doc_mptr_t. This master pointer never changes and is
/// valid as long as the object is not deleted.
/// can change over time. Each active document also has a master pointer of type
/// @ref TRI_doc_mptr_t. This master pointer never changes and is valid as long
/// as the object is not deleted.
///
/// It is important to use transaction for create, read, update, and delete.
/// The functions "create", "createJson", "update", "updateJson", and "destroy"
/// are only allowed within a "beginWrite" and "endWrite". The function read is
/// only allowed with a "beginRead" and "endRead". Note that "read" returns
/// a master pointer. After the "endRead" this master pointer might no longer
/// be valid, because the document could be deleted by another thread after
/// "endRead" has been executed.
/// It is important to use locks for create, read, update, and delete. The
/// functions @FN{create}, @FN{createJson}, @FN{update}, @FN{updateJson}, and
/// @FN{destroy} are only allowed within a @FN{beginWrite} and
/// @FN{endWrite}. The function @FN{read} is only allowed within a
/// @FN{beginRead} and @FN{endRead}. Note that @FN{read} returns a copy of the
/// master pointer.
///
/// If a document is deleted, it's master pointer becomes invalid. However, the
/// document itself exists. Executing a query and constructing its result set,
/// must be done inside a "beginRead" and
/// "endRead". However, the result set itself does not contain any master
/// pointers. Therefore, it stays valid after the "endRead" call.
/// document itself still exists. Executing a query and constructing its result
/// set, must be done inside a "beginRead" and "endRead".
///
/// <b><tt>bool beginRead (TRI_doc_collection_t*)</tt></b>
/// @FUN{bool beginRead (TRI_doc_collection_t*)}
////////////////////////////////////////////////
///
/// Starts a read transaction. Query and calls to "read" are allowed within a
/// read transaction, but not calls to "create", "update", or "destroy".
/// Returns @c true if the transaction could be started. This call might block
/// until a running write transaction is finished.
/// Starts a read transaction. Query and calls to @FN{read} are allowed within a
/// read transaction, but not calls to @FN{create}, @FN{update}, or
/// @FN{destroy}. Returns @LIT{true} if the transaction could be started. This
/// call might block until a running write transaction is finished.
///
/// <b><tt>bool endRead (TRI_doc_collection_t*)</tt></b>
/// @FUN{<b><tt>bool endRead (TRI_doc_collection_t*)}
/////////////////////////////////////////////////////
///
/// Ends a read transaction. Should only be called after a successful
/// "beginRead".
///
/// <b><tt>bool beginWrite (TRI_doc_collection_t*)</tt></b>
/// @FUN{bool beginWrite (TRI_doc_collection_t*)}
/////////////////////////////////////////////////
///
/// Starts a write transaction. Query and calls to "create", "read", "update",
/// and "destroy" are allowed within a write transaction. Returns @c true if the
/// transaction could be started. This call might block until a running write
/// transaction is finished.
/// Starts a write transaction. Query and calls to @FN{create}, @FN{read},
/// @FN{update}, and @FN{destroy} are allowed within a write
/// transaction. Returns @LIT{true} if the transaction could be started. This
/// call might block until a running write transaction is finished.
///
/// <b><tt>bool endWrite (TRI_doc_collection_t*)</tt></b>
/// @FUN{bool endWrite (TRI_doc_collection_t*)}
///////////////////////////////////////////////
///
/// Ends a write transaction. Should only be called after a successful
/// "beginWrite".
/// @LIT{beginWrite}.
///
/// @FUN{void createHeader (TRI_doc_collection_t* @FA{collection}, TRI_datafile_t* @FA{datafile}, TRI_df_marker_t const* @FA{marker}, size_t @FA{markerSize}, TRI_doc_mptr_t* {mptr}, void const* @FA{data})}
/// @FUN{void createHeader (TRI_doc_collection_t*, TRI_datafile_t*, TRI_df_marker_t const*, size_t @FA{markerSize}, TRI_doc_mptr_t*, void const* @FA{data})}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Creates a new header.
///
/// @FUN{void updateHeader (TRI_doc_collection_t* @FA{collection}, TRI_datafile_t* @FA{datafile}, TRI_df_marker_t const* @FA{marker}, size_t @FA{markerSize}, TRI_doc_mptr_t const* {mptr}, TRI_doc_mptr_t* @FA{update})}
/// @FUN{void updateHeader (TRI_doc_collection_t*, TRI_datafile_t*, TRI_df_marker_t const*, size_t @FA{markerSize}, TRI_doc_mptr_t const* {current}, TRI_doc_mptr_t* @FA{update})}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Updates an existing header.
///
/// <b><tt>TRI_doc_mptr_t const* create (TRI_doc_collection_t*, TRI_df_marker_type_e, TRI_shaped_json_t const*, bool release)</tt></b>
/// @FUN{TRI_doc_mptr_t const create (TRI_doc_collection_t*, TRI_df_marker_type_e, TRI_shaped_json_t const*, bool @FA{release})}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Adds a new document to the collection and returns the master pointer of the
/// newly created entry. In case of an error, NULL is returned and "TRI_errno()" is
/// set accordingly. The function DOES NOT acquire or release a write lock. This
/// must be done by the caller. If release is true, it will release the write lock as
/// soon as possible.
/// newly created entry. In case of an error, the attribute @LIT{_did} of the
/// result is @LIT{0} and "TRI_errno()" is set accordingly. The function DOES
/// NOT acquire a write lock. This must be done by the caller. If @FA{release}
/// is true, it will release the write lock as soon as possible.
///
/// <b><tt>TRI_doc_mptr_t const* createJson (TRI_doc_collection_t*, TRI_df_marker_type_e, TRI_json_t const*)</tt></b>
/// @FUN{TRI_doc_mptr_t const createJson (TRI_doc_collection_t*, TRI_df_marker_type_e, TRI_json_t const*, bool @FA{release})}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Adds a new document to the collection and returns the master pointer of the
/// newly created entry. In case of an error, NULL is returned and "TRI_errno()"
/// is set accordingly. The function DOES NOT acquire or release a write lock. This
/// must be done by the caller. If release is true, it will release the write lock as
/// soon as possible.
/// As before, but instead of a shaped json a json object must be given.
///
/// <b><tt>TRI_voc_did_t createLock (TRI_doc_collection_t*, TRI_df_marker_type_e, TRI_shaped_json_t const*)</tt></b>
/// @FUN{TRI_voc_did_t createLock (TRI_doc_collection_t*, TRI_df_marker_type_e, TRI_shaped_json_t const*)}
//////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Adds a new document to the collection and returns document identifier of the
/// newly created entry. In case of an error, NULL is returned and "TRI_errno()"
/// is set accordingly. The function will acquire and release a write lock.
/// As before, but the function will acquire and release the write lock.
///
/// <b><tt>TRI_doc_mptr_t const* read (TRI_doc_collection_t*, TRI_voc_did_t did)</tt></b>
/// @FUN{TRI_doc_mptr_t const read (TRI_doc_collection_t*, TRI_voc_did_t)}
//////////////////////////////////////////////////////////////////////////
///
/// Returns the master pointer of the document with identifier @c did. If the
/// document does not exists or is deleted, then @c NULL is returned. The function
/// DOES NOT acquire or release a read lock. This must be done by the caller.
/// Returns the master pointer of the document with the given identifier. If the
/// document does not exists or is deleted, then the identifier @LIT{_did} of
/// the result is @LIT{0}. The function DOES NOT acquire or release a read
/// lock. This must be done by the caller.
///
/// <b><tt>TRI_doc_mptr_t const* update (TRI_doc_collection_t*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_doc_update_policy_e, bool release)</tt></b>
/// @FUN{TRI_doc_mptr_t const update (TRI_doc_collection_t*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t @FA{rid}, TRI_voc_rid_t* @FA{current}, TRI_doc_update_policy_e, bool @FA{release})}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Updates an existing document of the collection and returns the master
/// pointer in case of success. Otherwise, @c false is returned and the
/// "TRI_errno()" is accordingly. The function DOES NOT acquire or release a
/// write lock. This must be done by the caller. However, if release is true, it
/// will release the write lock as soon as possible.
/// Updates an existing document of the collection and returns copy of a valid
/// master pointer in case of success. Otherwise, the attribute @LIT{_did} of
/// the result is @LIT{0} and the "TRI_errno()" is set accordingly. The function
/// DOES NOT acquire a write lock. This must be done by the caller. However, if
/// @FA{release} is true, it will release the write lock as soon as possible.
///
/// If the policy is @c TRI_DOC_UPDATE_LAST_WRITE, than the revision is ignored
/// and the update is always performed. If the policy is @c TRI_DOC_UPDATE_ERROR
/// and the revision is given (i. e. not equal 0), then the update is only
/// performed if the current revision matches the given.
/// If the policy is @ref TRI_DOC_UPDATE_LAST_WRITE, than the revision @FA{rid}
/// is ignored and the update is always performed. If the policy is @ref
/// TRI_DOC_UPDATE_ERROR and the revision @REF{rid} is given (i. e. not equal
/// 0), then the update is only performed if the current revision matches the
/// given. In any case the current revision after the updated of the document is
/// returned in @FA{current}.
///
/// <b><tt>TRI_doc_mptr_t const* updateJson (TRI_doc_collection_t*, TRI_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_doc_update_policy_e, bool release)</tt></b>
/// @FUN{TRI_doc_mptr_t const updateJson (TRI_doc_collection_t*, TRI_json_t const*, TRI_voc_did_t, TRI_voc_rid_t @FA{rid}, TRI_voc_rid_t* @FA{current}, TRI_doc_update_policy_e, bool @FA{release})}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Updates an existing document of the collection and returns the master
/// pointer in case of success. Otherwise, @c false is returned and the
/// "TRI_errno()" is accordingly. If release is true, it will release the write
/// lock as soon as possible.
/// As before, but instead of a shaped json a json object must be given.
///
/// <b><tt>bool updateLock (TRI_doc_collection_t*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_doc_update_policy_e)</tt></b>
/// @FUN{bool updateLock (TRI_doc_collection_t*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t @FA{rid}, TRI_voc_rid_t* @FA{current}, TRI_doc_update_policy_e)}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Updates an existing document of the collection and returns @c true in case
/// As before, but the function will acquire and release the write lock.
///
/// @FUN{bool destroy (TRI_doc_collection_t*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t @FA{rid}, TRI_voc_rid_t* @FA{current}, TRI_doc_update_policy_e, bool @FA{release})}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// Deletes an existing document from the given collection and returns @c true in case
/// of success. Otherwise, @c false is returned and the "TRI_errno()" is
/// accordingly. The function will acquire and release a write lock.
/// accordingly. The function DOES NOT acquire a write lock. However, if
/// @FA{release} is true, it will release the write lock as soon as possible.
///
/// <b><tt>bool destroy (TRI_doc_collection_t*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e, bool release)</tt></b>
/// If the policy is @ref TRI_DOC_UPDATE_ERROR and the reivision is given, than
/// it must match the current revision of the document. If the delete was
/// executed, than @FA{current} contains the last valid revision of the
/// document. If the delete was aborted, than @FA{current} contains the revision
/// of the still alive document.
///
/// Deletes an existing document of the collection and returns @c true in case
/// of success. Otherwise, @c false is returned and the "TRI_errno()" is
/// accordingly. The function DOES NOT acquire or release a write lock.
/// However, if release is true, it will release the write lock as soon as
/// possible.
/// @FUN{destroyLock (TRI_doc_collection_t*, TRI_voc_did_t, TRI_voc_rid_t @FA{rid}, TRI_voc_rid_t* @FA{current}, TRI_doc_update_policy_e)}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///
/// If the policy is @c TRI_DOC_UPDATE_ERROR and the reivision is given, than
/// it must match the current revision of the document.
/// As before, but the function will acquire and release the write lock.
///
/// <b><tt>bool destroyLock (TRI_doc_collection_t*, TRI_voc_did_t, TRI_voc_rid_t, TRI_doc_update_policy_e)</tt></b>
/// @FUN{TRI_doc_collection_info_t* figures (TRI_doc_collection_t*)}
////////////////////////////////////////////////////////////////////
///
/// Deletes an existing document of the collection and returns @c true in case
/// of success. Otherwise, @c false is returned and the "TRI_errno()" is
/// accordingly. The function will acquire and release a write lock.
///
/// <b><tt>TRI_doc_collection_info_t* figures (TRI_doc_collection_t*)</tt></b>
///
/// Returns informatiom about the collection. You must hold a read lock and
/// must destroy the result after usage.
/// Returns informatiom about the collection. You must hold a read lock and must
/// destroy the result after usage.
////////////////////////////////////////////////////////////////////////////////
typedef struct TRI_doc_collection_s {
@ -257,14 +262,14 @@ typedef struct TRI_doc_collection_s {
void (*createHeader) (struct TRI_doc_collection_s*, TRI_datafile_t*, TRI_df_marker_t const*, size_t, TRI_doc_mptr_t*, void const* data);
void (*updateHeader) (struct TRI_doc_collection_s*, TRI_datafile_t*, TRI_df_marker_t const*, size_t, TRI_doc_mptr_t const*, TRI_doc_mptr_t*);
TRI_doc_mptr_t const* (*create) (struct TRI_doc_collection_s*, TRI_df_marker_type_e, TRI_shaped_json_t const*, void const*, bool release);
TRI_doc_mptr_t const* (*createJson) (struct TRI_doc_collection_s*, TRI_df_marker_type_e, TRI_json_t const*, void const*, bool release);
TRI_doc_mptr_t const (*create) (struct TRI_doc_collection_s*, TRI_df_marker_type_e, TRI_shaped_json_t const*, void const*, bool release);
TRI_doc_mptr_t const (*createJson) (struct TRI_doc_collection_s*, TRI_df_marker_type_e, TRI_json_t const*, void const*, bool release);
TRI_voc_did_t (*createLock) (struct TRI_doc_collection_s*, TRI_df_marker_type_e, TRI_shaped_json_t const*, void const*);
TRI_doc_mptr_t const* (*read) (struct TRI_doc_collection_s*, TRI_voc_did_t);
TRI_doc_mptr_t const (*read) (struct TRI_doc_collection_s*, TRI_voc_did_t);
TRI_doc_mptr_t const* (*update) (struct TRI_doc_collection_s*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e, bool release);
TRI_doc_mptr_t const* (*updateJson) (struct TRI_doc_collection_s*, TRI_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e, bool release);
TRI_doc_mptr_t const (*update) (struct TRI_doc_collection_s*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e, bool release);
TRI_doc_mptr_t const (*updateJson) (struct TRI_doc_collection_s*, TRI_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e, bool release);
bool (*updateLock) (struct TRI_doc_collection_s*, TRI_shaped_json_t const*, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e);
bool (*destroy) (struct TRI_doc_collection_s* collection, TRI_voc_did_t, TRI_voc_rid_t, TRI_voc_rid_t*, TRI_doc_update_policy_e, bool release);

View File

@ -42,6 +42,113 @@
// --SECTION-- INDEX
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// --SECTION-- private functions
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief read the fields of an index from a json structure and return them
////////////////////////////////////////////////////////////////////////////////
static TRI_vector_string_t* GetFieldsIndex (const TRI_idx_type_e indexType,
TRI_json_t* json,
TRI_index_geo_variant_e* geoVariant) {
TRI_vector_string_t* fields;
TRI_json_t* strVal;
TRI_json_t* strVal2;
char* temp1;
char* temp2;
char* temp3;
uint32_t numFields;
size_t i;
*geoVariant = INDEX_GEO_NONE;
fields = (TRI_vector_string_t*) TRI_Allocate(sizeof(TRI_vector_string_t));
if (!fields) {
return NULL;
}
TRI_InitVectorString(fields);
if (indexType == TRI_IDX_TYPE_GEO_INDEX) {
strVal = TRI_LookupArrayJson(json, "location");
if (!strVal || strVal->_type != TRI_JSON_STRING) {
strVal = TRI_LookupArrayJson(json, "latitude");
if (!strVal || strVal->_type != TRI_JSON_STRING) {
return fields;
}
strVal2 = TRI_LookupArrayJson(json, "longitude");
if (!strVal2 || strVal2->_type != TRI_JSON_STRING) {
return fields;
}
temp1 = TRI_DuplicateString(strVal->_value._string.data);
if (!temp1) {
return fields;
}
TRI_PushBackVectorString(fields, temp1);
temp1 = TRI_DuplicateString(strVal2->_value._string.data);
if (!temp1) {
return fields;
}
TRI_PushBackVectorString(fields, temp1);
*geoVariant = INDEX_GEO_INDIVIDUAL_LAT_LON;
}
else {
*geoVariant = INDEX_GEO_COMBINED_LON_LAT;
strVal2 = TRI_LookupArrayJson(json, "geoJson");
if (strVal2 && strVal2->_type == TRI_JSON_BOOLEAN) {
if (strVal2->_value._boolean) {
*geoVariant = INDEX_GEO_COMBINED_LAT_LON;
}
}
TRI_PushBackVectorString(fields, strVal->_value._string.data);
}
}
else {
// read number of fields
strVal = TRI_LookupArrayJson(json, "fieldCount");
if (!strVal || strVal->_type != TRI_JSON_NUMBER) {
return fields;
}
numFields = (uint32_t) strVal->_value._number;
if (numFields == 0) {
return fields;
}
// read field names
for (i = 0; i < numFields ; i++) {
temp1 = TRI_StringUInt32(i);
if (temp1) {
temp2 = TRI_Concatenate2String("field_", temp1);
if (temp2) {
strVal = TRI_LookupArrayJson(json, temp2);
if (strVal && strVal->_type == TRI_JSON_STRING) {
temp3 = TRI_DuplicateString(strVal->_value._string.data);
if (temp3) {
TRI_PushBackVectorString(fields, temp3);
}
}
TRI_FreeString(temp2);
}
TRI_FreeString(temp1);
}
}
}
return fields;
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- public functions
// -----------------------------------------------------------------------------
@ -217,97 +324,22 @@ void TRI_FreeIndexDefinitions (TRI_vector_pointer_t* definitions) {
}
////////////////////////////////////////////////////////////////////////////////
/// @brief read the fields of an index from a json structure and return them
/// @brief gets name of index type
////////////////////////////////////////////////////////////////////////////////
static TRI_vector_string_t* GetFieldsIndex (const TRI_idx_type_e indexType,
TRI_json_t* json,
TRI_index_geo_variant_e* geoVariant) {
TRI_vector_string_t* fields;
TRI_json_t* strVal;
TRI_json_t* strVal2;
char* temp1;
char* temp2;
char* temp3;
uint32_t numFields;
size_t i;
*geoVariant = INDEX_GEO_NONE;
fields = (TRI_vector_string_t*) TRI_Allocate(sizeof(TRI_vector_string_t));
if (!fields) {
return NULL;
char* TRI_GetTypeNameIndex (const TRI_index_definition_t* const indexDefinition) {
switch (indexDefinition->_type) {
case TRI_IDX_TYPE_HASH_INDEX:
return "hash";
case TRI_IDX_TYPE_SKIPLIST_INDEX:
return "skiplist";
case TRI_IDX_TYPE_GEO_INDEX:
return "geo";
case TRI_IDX_TYPE_PRIMARY_INDEX:
return "primary";
}
TRI_InitVectorString(fields);
if (indexType == TRI_IDX_TYPE_GEO_INDEX) {
strVal = TRI_LookupArrayJson(json, "location");
if (!strVal || strVal->_type != TRI_JSON_STRING) {
strVal = TRI_LookupArrayJson(json, "latitude");
if (!strVal || strVal->_type != TRI_JSON_STRING) {
return fields;
}
strVal2 = TRI_LookupArrayJson(json, "longitude");
if (!strVal2 || strVal2->_type != TRI_JSON_STRING) {
return fields;
}
temp1 = TRI_DuplicateString(strVal->_value._string.data);
if (!temp1) {
return fields;
}
TRI_PushBackVectorString(fields, temp1);
temp1 = TRI_DuplicateString(strVal2->_value._string.data);
if (!temp1) {
return fields;
}
TRI_PushBackVectorString(fields, temp1);
*geoVariant = INDEX_GEO_INDIVIDUAL_LAT_LON;
}
else {
*geoVariant = INDEX_GEO_COMBINED_LON_LAT;
strVal2 = TRI_LookupArrayJson(json, "geoJson");
if (strVal2 && strVal2->_type == TRI_JSON_BOOLEAN) {
if (strVal2->_value._boolean) {
*geoVariant = INDEX_GEO_COMBINED_LAT_LON;
}
}
TRI_PushBackVectorString(fields, strVal->_value._string.data);
}
}
else {
// read number of fields
strVal = TRI_LookupArrayJson(json, "fieldCount");
if (!strVal || strVal->_type != TRI_JSON_NUMBER) {
return fields;
}
numFields = (uint32_t) strVal->_value._number;
if (numFields == 0) {
return fields;
}
// read field names
for (i = 0; i < numFields ; i++) {
temp1 = TRI_StringUInt32(i);
if (temp1) {
temp2 = TRI_Concatenate2String("field_", temp1);
if (temp2) {
strVal = TRI_LookupArrayJson(json, temp2);
if (strVal && strVal->_type == TRI_JSON_STRING) {
temp3 = TRI_DuplicateString(strVal->_value._string.data);
if (temp3) {
TRI_PushBackVectorString(fields, temp3);
}
}
TRI_FreeString(temp2);
}
TRI_FreeString(temp1);
}
}
}
return fields;
return "unknown";
}
////////////////////////////////////////////////////////////////////////////////
@ -1390,7 +1422,7 @@ static TRI_json_t* JsonHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec
return NULL;
}
fieldCounter = TRI_Allocate(30);
fieldCounter = TRI_Allocate(64); // used below to store strings like "field_ddd"
if (!fieldCounter) {
TRI_Free(fieldList);
@ -2108,7 +2140,8 @@ static TRI_json_t* JsonSkiplistIndex (TRI_index_t* idx, TRI_doc_collection_t* co
return NULL;
}
fieldCounter = TRI_Allocate(30);
fieldCounter = TRI_Allocate(64);
if (!fieldCounter) {
TRI_Free(fieldList);
TRI_FreeJson(json);

View File

@ -60,7 +60,7 @@ struct TRI_sim_collection_s;
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief index indetifier
/// @brief index identifier
////////////////////////////////////////////////////////////////////////////////
typedef TRI_voc_tick_t TRI_idx_iid_t;
@ -208,6 +208,12 @@ void TRI_FreeIndexDefinition (TRI_index_definition_t*);
void TRI_FreeIndexDefinitions (TRI_vector_pointer_t*);
////////////////////////////////////////////////////////////////////////////////
/// @brief gets name of index type
////////////////////////////////////////////////////////////////////////////////
char* TRI_GetTypeNameIndex (const TRI_index_definition_t* const);
////////////////////////////////////////////////////////////////////////////////
/// @brief gets the definitions of all index files for a collection
////////////////////////////////////////////////////////////////////////////////
@ -263,7 +269,7 @@ TRI_index_t* TRI_CreateGeoIndex2 (struct TRI_doc_collection_s*,
/// @brief frees the memory allocated, but does not free the pointer
////////////////////////////////////////////////////////////////////////////////
void TRI_DestoryGeoIndex (TRI_index_t*);
void TRI_DestroyGeoIndex (TRI_index_t*);
////////////////////////////////////////////////////////////////////////////////
/// @brief frees the memory allocated and frees the pointer
@ -379,11 +385,10 @@ HashIndexElements* TRI_LookupHashIndex (TRI_index_t*, TRI_json_t*);
/// @{
////////////////////////////////////////////////////////////////////////////////
TRI_skiplist_iterator_t* TRI_LookupSkiplistIndex (TRI_index_t*, TRI_sl_operator_t*);
////////////////////////////////////////////////////////////////////////////////
/// @brief creates a hash-index
/// @brief creates a skiplist index
////////////////////////////////////////////////////////////////////////////////
TRI_index_t* TRI_CreateSkiplistIndex (struct TRI_doc_collection_s*,

View File

@ -25,10 +25,13 @@
/// @author Copyright 2012, triagens GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#include <BasicsC/logging.h>
#include "VocBase/query-data-feeder.h"
#include "VocBase/query-join.h"
#include "V8/v8-c-utils.h"
#include "QL/optimize.h"
#include "SkipLists/sl-operator.h"
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
@ -301,7 +304,7 @@ static void InitFeederPrimaryLookup (TRI_data_feeder_t* feeder) {
// ref access
feeder->_accessType = ACCESS_REF;
buffer = (TRI_string_buffer_t*) TRI_Allocate(sizeof(TRI_string_buffer_t));
buffer = TRI_CreateStringBuffer();
if (!buffer) {
return;
}
@ -314,8 +317,7 @@ static void InitFeederPrimaryLookup (TRI_data_feeder_t* feeder) {
TRI_AppendStringStringBuffer(buffer, "] })");
state->_context = TRI_CreateExecutionContext(buffer->_buffer);
TRI_DestroyStringBuffer(buffer);
TRI_Free(buffer);
TRI_FreeStringBuffer(buffer);
if (!state->_context) {
return;
@ -538,7 +540,7 @@ static void InitFeederHashLookup (TRI_data_feeder_t* feeder) {
if (range->_valueType == RANGE_TYPE_FIELD) {
// ref access
feeder->_accessType = ACCESS_REF;
buffer = (TRI_string_buffer_t*) TRI_Allocate(sizeof(TRI_string_buffer_t));
buffer = TRI_CreateStringBuffer();
if (!buffer) {
return;
}
@ -557,8 +559,7 @@ static void InitFeederHashLookup (TRI_data_feeder_t* feeder) {
TRI_AppendStringStringBuffer(buffer, "] })");
state->_context = TRI_CreateExecutionContext(buffer->_buffer);
TRI_DestroyStringBuffer(buffer);
TRI_Free(buffer);
TRI_FreeStringBuffer(buffer);
if (!state->_context) {
return;
@ -771,6 +772,109 @@ static void FreeSkiplistElements (SkiplistIndexElements* elements) {
TRI_Free(elements);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief create a skiplist single-value operator
////////////////////////////////////////////////////////////////////////////////
static TRI_sl_operator_t* CreateSkipListValueOperator (const TRI_sl_operator_type_e type,
const QL_optimize_range_t* const range,
const bool useMax) {
TRI_sl_operator_t* operator;
TRI_json_t* parameters = TRI_CreateListJson();
if (!parameters) {
return NULL;
}
if (range->_valueType == RANGE_TYPE_STRING) {
if (useMax) {
TRI_PushBack2ListJson(parameters, TRI_CreateStringCopyJson(range->_maxValue._stringValue));
}
else {
TRI_PushBack2ListJson(parameters, TRI_CreateStringCopyJson(range->_minValue._stringValue));
}
}
else if (range->_valueType == RANGE_TYPE_DOUBLE) {
if (useMax) {
TRI_PushBack2ListJson(parameters, TRI_CreateNumberJson(range->_maxValue._doubleValue));
}
else {
TRI_PushBack2ListJson(parameters, TRI_CreateNumberJson(range->_minValue._doubleValue));
}
}
else if (range->_valueType == RANGE_TYPE_JSON) {
TRI_json_t* doc = TRI_JsonString(range->_minValue._stringValue);
if (!doc) {
TRI_FreeJson(parameters);
return NULL;
}
TRI_PushBackListJson(parameters, doc);
}
operator = CreateSLOperator(type, NULL, NULL, parameters, NULL, 1, NULL);
return operator;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief create a skiplist operation (complete instruction)
////////////////////////////////////////////////////////////////////////////////
static TRI_sl_operator_t* CreateSkipListOperation (TRI_data_feeder_t* feeder) {
TRI_sl_operator_t* lastOp = NULL;
size_t i;
for (i = 0; i < feeder->_ranges->_length; i++) {
TRI_sl_operator_t* op = NULL;
QL_optimize_range_t* range;
range = (QL_optimize_range_t*) feeder->_ranges->_buffer[i];
if (range->_minStatus == RANGE_VALUE_INFINITE &&
range->_maxStatus == RANGE_VALUE_INCLUDED) {
// oo .. x|
op = CreateSkipListValueOperator(TRI_SL_LE_OPERATOR, range, true);
}
else if (range->_minStatus == RANGE_VALUE_INFINITE &&
range->_maxStatus == RANGE_VALUE_EXCLUDED) {
// oo .. |x
op = CreateSkipListValueOperator(TRI_SL_LT_OPERATOR, range, true);
}
else if (range->_minStatus == RANGE_VALUE_INCLUDED &&
range->_maxStatus == RANGE_VALUE_INFINITE) {
// |x .. oo
op = CreateSkipListValueOperator(TRI_SL_GE_OPERATOR, range, false);
}
else if (range->_minStatus == RANGE_VALUE_EXCLUDED &&
range->_maxStatus == RANGE_VALUE_INFINITE) {
// x| .. oo
op = CreateSkipListValueOperator(TRI_SL_GT_OPERATOR, range, false);
}
else if (range->_minStatus == RANGE_VALUE_INCLUDED &&
range->_maxStatus == RANGE_VALUE_INCLUDED) {
// x
if ((range->_valueType == RANGE_TYPE_DOUBLE && range->_minValue._doubleValue == range->_maxValue._doubleValue) ||
(range->_valueType == RANGE_TYPE_STRING && strcmp(range->_minValue._stringValue, range->_maxValue._stringValue) == 0) ||
(range->_valueType == RANGE_TYPE_JSON)) {
op = CreateSkipListValueOperator(TRI_SL_EQ_OPERATOR, range, true);
}
}
if (op == NULL) {
continue;
}
if (lastOp != NULL) {
lastOp = CreateSLOperator(TRI_SL_AND_OPERATOR, op, lastOp, NULL, NULL, 2, NULL);
}
else {
lastOp = op;
}
}
return lastOp;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief init skiplist data feeder
////////////////////////////////////////////////////////////////////////////////
@ -778,17 +882,13 @@ static void FreeSkiplistElements (SkiplistIndexElements* elements) {
static void InitFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
QL_optimize_range_t* range;
TRI_data_feeder_skiplist_lookup_t* state;
TRI_json_t* parameters;
TRI_json_t* doc;
TRI_string_buffer_t* buffer;
size_t i;
state = (TRI_data_feeder_skiplist_lookup_t*) feeder->_state;
state->_isEmpty = true;
state->_context = NULL;
state->_position = 0;
state->_skiplistElements = NULL;
state->_skiplistIterator = NULL;
state->_index = TRI_IndexSimCollection((TRI_sim_collection_t*) feeder->_collection,
feeder->_indexId);
if (!state->_index) {
@ -799,9 +899,12 @@ static void InitFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
range = (QL_optimize_range_t*) feeder->_ranges->_buffer[0];
if (range->_valueType == RANGE_TYPE_FIELD) {
TRI_string_buffer_t* buffer;
size_t i;
// ref access
feeder->_accessType = ACCESS_REF;
buffer = (TRI_string_buffer_t*) TRI_Allocate(sizeof(TRI_string_buffer_t));
buffer = TRI_CreateStringBuffer();
if (!buffer) {
return;
}
@ -820,43 +923,23 @@ static void InitFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
TRI_AppendStringStringBuffer(buffer, "] })");
state->_context = TRI_CreateExecutionContext(buffer->_buffer);
TRI_DestroyStringBuffer(buffer);
TRI_Free(buffer);
TRI_FreeStringBuffer(buffer);
if (!state->_context) {
return;
}
}
else {
TRI_sl_operator_t* skipListOperation;
// const access
feeder->_accessType = ACCESS_CONST;
parameters = TRI_CreateListJson();
if (!parameters) {
skipListOperation = CreateSkipListOperation(feeder);
if (!skipListOperation) {
return;
}
for (i = 0; i < feeder->_ranges->_length; i++) {
range = (QL_optimize_range_t*) feeder->_ranges->_buffer[i];
if (range->_valueType == RANGE_TYPE_STRING) {
TRI_PushBack2ListJson(parameters,
TRI_CreateStringCopyJson(range->_minValue._stringValue));
}
else if (range->_valueType == RANGE_TYPE_DOUBLE) {
TRI_PushBack2ListJson(parameters,
TRI_CreateNumberJson(range->_minValue._doubleValue));
}
else if (range->_valueType == RANGE_TYPE_JSON) {
doc = TRI_JsonString(range->_minValue._stringValue);
if (!doc) {
TRI_FreeJson(parameters);
return;
}
TRI_PushBackListJson(parameters, doc);
}
}
state->_skiplistElements = TRI_LookupSkiplistIndex(state->_index, parameters);
// TODO: properly free parameters
TRI_FreeJson(parameters);
state->_skiplistIterator = TRI_LookupSkiplistIndex(state->_index, skipListOperation);
}
state->_isEmpty = false;
@ -874,10 +957,11 @@ static void RewindFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
state->_position = 0;
if (feeder->_accessType == ACCESS_REF) {
if (state->_skiplistElements) {
FreeSkiplistElements(state->_skiplistElements);
if (state->_skiplistIterator) {
// TODO: free skiplist iterator!
// FreeSkiplistElements(state->_skiplistElements);
}
state->_skiplistElements = NULL;
state->_skiplistIterator = NULL;
if (!state->_context) {
return;
@ -891,7 +975,8 @@ static void RewindFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
feeder->_level,
true);
if (TRI_ExecuteRefExecutionContext (state->_context, parameters)) {
state->_skiplistElements = TRI_LookupSkiplistIndex(state->_index, parameters);
// TODO: fix
// state->_skiplistIterator = TRI_LookupSkiplistIndex(state->_index, parameters);
}
TRI_FreeJson(parameters);
@ -904,24 +989,24 @@ static void RewindFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
static bool CurrentFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
TRI_data_feeder_skiplist_lookup_t* state;
SkiplistIndexElement* indexElement;
TRI_doc_mptr_t* document;
TRI_join_part_t* part;
state = (TRI_data_feeder_skiplist_lookup_t*) feeder->_state;
part = (TRI_join_part_t*) feeder->_part;
if (state->_isEmpty || !state->_skiplistElements) {
if (state->_isEmpty || !state->_skiplistIterator) {
part->_singleDocument = NULL;
return false;
}
while (state->_position < state->_skiplistElements->_numElements) {
document = (TRI_doc_mptr_t*) ((state->_skiplistElements->_elements[state->_position++]).data);
if (document && !document->_deletion) {
indexElement = (SkiplistIndexElement*) state->_skiplistIterator->_next(state->_skiplistIterator);
if (indexElement) {
document = (TRI_doc_mptr_t*) indexElement->data;
part->_singleDocument = document;
return true;
}
}
part->_singleDocument = NULL;
return false;
@ -935,8 +1020,9 @@ static void FreeFeederSkiplistLookup (TRI_data_feeder_t* feeder) {
TRI_data_feeder_skiplist_lookup_t* state;
state = (TRI_data_feeder_skiplist_lookup_t*) feeder->_state;
if (state->_skiplistElements) {
FreeSkiplistElements(state->_skiplistElements);
if (state->_skiplistIterator) {
// TODO: free iterator!!!!!!
// FreeSkiplistElements(state->_skiplistElements);
}
if (state->_context) {

View File

@ -388,7 +388,7 @@ TRI_data_feeder_t* TRI_CreateDataFeederHashLookup (TRI_query_instance_t* const,
typedef struct TRI_data_feeder_skiplist_lookup_s {
bool _isEmpty;
TRI_index_t* _index;
SkiplistIndexElements* _skiplistElements;
TRI_skiplist_iterator_t* _skiplistIterator;
TRI_js_exec_context_t _context;
size_t _position;
}

View File

@ -209,13 +209,15 @@ TRI_query_cursor_t* TRI_ExecuteQueryInstance (TRI_query_instance_t* const instan
if (instance->_query._order._type == QLQueryOrderTypeMustEvaluate) {
cursor->_result._orderContext = TRI_CreateExecutionContext(instance->_query._order._functionCode);
if (cursor->_result._orderContext) {
LOG_DEBUG("performing order by");
TRI_OrderDataQuery(&cursor->_result);
}
TRI_FreeExecutionContext(cursor->_result._orderContext);
}
}
// apply a negative limit or a limit after ordering
if (applyPostSkipLimit) {
LOG_DEBUG("applying post-order skip/limit");
TransformDataSkipLimit(&cursor->_result,
instance->_query._limit._offset,
instance->_query._limit._count);

View File

@ -87,13 +87,12 @@ TRI_query_javascript_converter_t* TRI_InitQueryJavascript (void) {
converter->_buffer = NULL;
converter->_prefix = NULL;
buffer = (TRI_string_buffer_t*) TRI_Allocate(sizeof(TRI_string_buffer_t));
buffer = TRI_CreateStringBuffer();
if (!buffer) {
TRI_Free(converter);
return NULL;
}
TRI_InitStringBuffer(buffer);
converter->_buffer = buffer;
return converter;
@ -107,8 +106,7 @@ void TRI_FreeQueryJavascript (TRI_query_javascript_converter_t* converter) {
assert(converter);
assert(converter->_buffer);
TRI_DestroyStringBuffer(converter->_buffer);
TRI_Free(converter->_buffer);
TRI_FreeStringBuffer(converter->_buffer);
TRI_Free(converter);
}

View File

@ -26,6 +26,7 @@
////////////////////////////////////////////////////////////////////////////////
#include <BasicsC/logging.h>
#include <BasicsC/string-buffer.h>
#include "VocBase/query-join-execute.h"
#include "VocBase/query-join.h"
@ -35,6 +36,36 @@
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief log information about the used index
////////////////////////////////////////////////////////////////////////////////
static void LogIndexString(const TRI_index_definition_t* const indexDefinition,
const char* const collectionName) {
TRI_string_buffer_t* buffer = TRI_CreateStringBuffer();
size_t i;
if (!buffer) {
return;
}
for (i = 0; i < indexDefinition->_fields->_length; i++) {
if (i > 0) {
TRI_AppendStringStringBuffer(buffer, ", ");
}
TRI_AppendStringStringBuffer(buffer, indexDefinition->_fields->_buffer[i]);
}
LOG_DEBUG("using %s index (%s) for '%s'",
TRI_GetTypeNameIndex(indexDefinition),
buffer->_buffer,
collectionName);
TRI_FreeStringBuffer(buffer);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief Determine which geo indexes to use in a query - DEPRECATED
////////////////////////////////////////////////////////////////////////////////
@ -333,7 +364,7 @@ static TRI_data_feeder_t* DetermineGeoIndexUsage (TRI_query_instance_t* const in
indexDefinition->_iid,
part->_geoRestriction);
LOG_DEBUG("using geo index for '%s'", part->_alias);
LogIndexString(indexDefinition, part->_alias);
break;
}
@ -488,7 +519,7 @@ static TRI_data_feeder_t* DetermineIndexUsage (TRI_query_instance_t* const insta
if (feeder) {
// we always exit if we can use the primary index
// the primary index guarantees uniqueness
LOG_DEBUG("using primary index for '%s'", part->_alias);
LogIndexString(indexDefinition, part->_alias);
goto EXIT;
}
}
@ -513,7 +544,7 @@ static TRI_data_feeder_t* DetermineIndexUsage (TRI_query_instance_t* const insta
indexDefinition->_iid,
TRI_CopyVectorPointer(&matches));
LOG_DEBUG("using skiplist index for '%s'", part->_alias);
LogIndexString(indexDefinition, part->_alias);
}
else {
feeder =
@ -523,7 +554,7 @@ static TRI_data_feeder_t* DetermineIndexUsage (TRI_query_instance_t* const insta
indexDefinition->_iid,
TRI_CopyVectorPointer(&matches));
LOG_DEBUG("using hash index for '%s'", part->_alias);
LogIndexString(indexDefinition, part->_alias);
}
if (!feeder) {

View File

@ -246,7 +246,7 @@ static void CreateHeader (TRI_doc_collection_t* c,
/// @brief creates a new document splitted into marker and body to file
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t* CreateDocument (TRI_sim_collection_t* collection,
static TRI_doc_mptr_t CreateDocument (TRI_sim_collection_t* collection,
TRI_doc_document_marker_t* marker,
size_t markerSize,
void const* body,
@ -257,6 +257,7 @@ static TRI_doc_mptr_t* CreateDocument (TRI_sim_collection_t* collection,
TRI_datafile_t* journal;
TRI_doc_mptr_t* header;
TRI_doc_mptr_t mptr;
TRI_voc_size_t total;
TRI_doc_datafile_info_t* dfi;
bool ok;
@ -278,7 +279,8 @@ static TRI_doc_mptr_t* CreateDocument (TRI_sim_collection_t* collection,
collection->base.endWrite(&collection->base);
}
return NULL;
mptr._did = 0;
return mptr;
}
// verify the header pointer
@ -305,7 +307,9 @@ static TRI_doc_mptr_t* CreateDocument (TRI_sim_collection_t* collection,
// update immediate indexes
CreateImmediateIndexes(collection, header);
// release lock
mptr = *header;
// release lock, header might be invalid after this
if (release) {
collection->base.endWrite(&collection->base);
}
@ -314,7 +318,7 @@ static TRI_doc_mptr_t* CreateDocument (TRI_sim_collection_t* collection,
WaitSync(collection, journal, ((char const*) *result) + markerSize + bodySize);
// and return
return header;
return mptr;
}
else {
if (release) {
@ -322,7 +326,8 @@ static TRI_doc_mptr_t* CreateDocument (TRI_sim_collection_t* collection,
}
LOG_ERROR("cannot write element: %s", TRI_last_error());
return NULL;
mptr._did = 0;
return mptr;
}
}
@ -353,7 +358,7 @@ static void UpdateHeader (TRI_doc_collection_t* c,
/// @brief updates an existing document splitted into marker and body to file
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
static TRI_doc_mptr_t const UpdateDocument (TRI_sim_collection_t* collection,
TRI_doc_mptr_t const* header,
TRI_doc_document_marker_t* marker,
size_t markerSize,
@ -364,6 +369,7 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
TRI_doc_update_policy_e policy,
TRI_df_marker_t** result,
bool release) {
TRI_doc_mptr_t mptr;
TRI_datafile_t* journal;
TRI_voc_size_t total;
bool ok;
@ -383,7 +389,8 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
collection->base.endWrite(&collection->base);
}
return NULL;
mptr._did = 0;
return mptr;
}
}
@ -399,7 +406,8 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
collection->base.endWrite(&collection->base);
}
return NULL;
mptr._did = 0;
return mptr;
case TRI_DOC_UPDATE_ILLEGAL:
TRI_set_errno(TRI_VOC_ERROR_ILLEGAL_PARAMETER);
@ -408,7 +416,8 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
collection->base.endWrite(&collection->base);
}
return NULL;
mptr._did = 0;
return mptr;
}
// generate a new tick
@ -425,7 +434,8 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
collection->base.endWrite(&collection->base);
}
return NULL;
mptr._did = 0;
return mptr;
}
// generate crc
@ -459,7 +469,9 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
// update immediate indexes
UpdateImmediateIndexes(collection, header, &update);
// release lock
mptr = *header;
// release lock, header might be invalid after this
if (release) {
collection->base.endWrite(&collection->base);
}
@ -468,7 +480,7 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
WaitSync(collection, journal, ((char const*) *result) + markerSize + bodySize);
// and return
return header;
return mptr;
}
else {
if (release) {
@ -476,7 +488,8 @@ static TRI_doc_mptr_t const* UpdateDocument (TRI_sim_collection_t* collection,
}
LOG_ERROR("cannot write element");
return NULL;
mptr._did = 0;
return mptr;
}
}
@ -721,7 +734,7 @@ static void DebugHeaderSimCollection (TRI_sim_collection_t* collection) {
/// @brief creates a new document in the collection from shaped json
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const* CreateShapedJson (TRI_doc_collection_t* document,
static TRI_doc_mptr_t const CreateShapedJson (TRI_doc_collection_t* document,
TRI_df_marker_type_e type,
TRI_shaped_json_t const* json,
void const* data,
@ -785,9 +798,10 @@ static TRI_doc_mptr_t const* CreateShapedJson (TRI_doc_collection_t* document,
/// @brief reads an element from the document collection
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const* ReadShapedJson (TRI_doc_collection_t* document,
static TRI_doc_mptr_t const ReadShapedJson (TRI_doc_collection_t* document,
TRI_voc_did_t did) {
TRI_sim_collection_t* collection;
TRI_doc_mptr_t result;
TRI_doc_mptr_t const* header;
collection = (TRI_sim_collection_t*) document;
@ -795,10 +809,11 @@ static TRI_doc_mptr_t const* ReadShapedJson (TRI_doc_collection_t* document,
header = TRI_LookupByKeyAssociativePointer(&collection->_primaryIndex, &did);
if (header == NULL || header->_deletion != 0) {
return NULL;
result._did = 0;
return result;
}
else {
return header;
return *header;
}
}
@ -806,7 +821,7 @@ static TRI_doc_mptr_t const* ReadShapedJson (TRI_doc_collection_t* document,
/// @brief updates a document in the collection from shaped json
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const* UpdateShapedJson (TRI_doc_collection_t* document,
static TRI_doc_mptr_t const UpdateShapedJson (TRI_doc_collection_t* document,
TRI_shaped_json_t const* json,
TRI_voc_did_t did,
TRI_voc_rid_t rid,
@ -815,6 +830,7 @@ static TRI_doc_mptr_t const* UpdateShapedJson (TRI_doc_collection_t* document,
bool release) {
TRI_df_marker_t const* original;
TRI_df_marker_t* result;
TRI_doc_mptr_t mptr;
TRI_doc_mptr_t const* header;
TRI_sim_collection_t* collection;
@ -824,8 +840,13 @@ static TRI_doc_mptr_t const* UpdateShapedJson (TRI_doc_collection_t* document,
header = TRI_LookupByKeyAssociativePointer(&collection->_primaryIndex, &did);
if (header == NULL || header->_deletion != 0) {
if (release) {
document->endWrite(&collection->base);
}
TRI_set_errno(TRI_VOC_ERROR_DOCUMENT_NOT_FOUND);
return NULL;
mptr._did = 0;
return mptr;
}
original = header->_data;
@ -890,6 +911,10 @@ static TRI_doc_mptr_t const* UpdateShapedJson (TRI_doc_collection_t* document,
// do not know
else {
if (release) {
document->endWrite(&collection->base);
}
LOG_FATAL("unknown marker type %lu", (unsigned long) original->_type);
exit(EXIT_FAILURE);
}

6
configure vendored
View File

@ -563,7 +563,7 @@ PACKAGE_TARNAME='avocado'
PACKAGE_VERSION='0.3.3'
PACKAGE_STRING='triAGENS AvocadoDB 0.3.3'
PACKAGE_BUGREPORT='info@triagens.de'
PACKAGE_URL='http://www.worldofvoc.com'
PACKAGE_URL='http://www.avocadodb.org'
# Factoring default headers for most tests.
ac_includes_default="\
@ -1523,7 +1523,7 @@ Use these variables to override the choices made by `configure' or to help
it to find libraries and programs with nonstandard names/locations.
Report bugs to <info@triagens.de>.
triAGENS AvocadoDB home page: <http://www.worldofvoc.com>.
triAGENS AvocadoDB home page: <http://www.avocadodb.org>.
_ACEOF
ac_status=$?
fi
@ -9914,7 +9914,7 @@ Configuration commands:
$config_commands
Report bugs to <info@triagens.de>.
triAGENS AvocadoDB home page: <http://www.worldofvoc.com>."
triAGENS AvocadoDB home page: <http://www.avocadodb.org>."
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1

View File

@ -6,7 +6,7 @@ dnl ============================================================================
dnl PREAMBLE triAGENS GmbH Build Environment
dnl ============================================================================
AC_INIT([triAGENS AvocadoDB], [0.3.3], [info@triagens.de], [avocado], [http://www.worldofvoc.com])
AC_INIT([triAGENS AvocadoDB], [0.3.3], [info@triagens.de], [avocado], [http://www.avocadodb.org])
dnl ----------------------------------------------------------------------------
dnl auxillary directory for install-sh and missing