1
0
Fork 0

more tests, partial fix for update

This commit is contained in:
Frank Celler 2012-04-02 15:19:51 +02:00
parent 20599a185c
commit abdf471b84
3 changed files with 199 additions and 100 deletions

View File

@ -716,6 +716,10 @@ bool RestDocumentHandler::updateDocument () {
generateDocumentNotFound(cid, didStr);
return false;
case TRI_ERROR_AVOCADO_UNIQUE_CONSTRAINT_VIOLATED:
generateError(HttpResponse::CONFLICT, res, "cannot create document, unique constraint violated");
return false;
case TRI_ERROR_AVOCADO_CONFLICT:
generatePreconditionFailed(_documentCollection->base._cid, did, rid);
return false;

View File

@ -5,7 +5,11 @@ describe AvocadoDB do
api = "/_api/index"
prefix = "api-index-unique-constraint"
context "one attribute:" do
################################################################################
## unique constraints during create
################################################################################
context "creating:" do
context "dealing with unique constraints:" do
before do
@cn = "UnitTestsCollectionIndexes"
@ -26,9 +30,8 @@ describe AvocadoDB do
doc.parsed_response['type'].should eq("hash")
doc.parsed_response['unique'].should eq(true)
cmd1 = "/document?collection=#{@cid}"
# create a document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
@ -40,14 +43,15 @@ describe AvocadoDB do
rev1 = doc.parsed_response['_rev']
rev1.should be_kind_of(Integer)
cmd2 = "/document/#{id1}"
# check it
cmd2 = "/document/#{id1}"
doc = AvocadoDB.log_get("#{prefix}", cmd2)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(1)
doc.parsed_response['b'].should eq(1)
doc.parsed_response['_id'].should eq(id1)
doc.parsed_response['_rev'].should eq(rev1)
# create a unique constraint violation
body = "{ \"a\" : 1, \"b\" : 2 }"
@ -55,34 +59,138 @@ describe AvocadoDB do
doc.code.should eq(409)
# id2 = doc.parsed_response['_id']
# id2.should be_kind_of(String)
# rev2 = doc.parsed_response['_rev']
# rev2.should be_kind_of(Integer)
# check the old document again
# check it again
doc = AvocadoDB.log_get("#{prefix}", cmd2)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(1)
doc.parsed_response['b'].should eq(1)
doc.parsed_response['_id'].should eq(id1)
doc.parsed_response['_rev'].should eq(rev1)
#
# third try (make sure the rollback has not destroyed anything)
body = "{ \"a\" : 1, \"b\" : 3 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc.code.should eq(409)
# id3 = doc.parsed_response['_id']
# id3.should be_kind_of(String)
# check it again
doc = AvocadoDB.log_get("#{prefix}", cmd2)
# rev3 = doc.parsed_response['_rev']
# rev3.should be_kind_of(Integer)
body = "{ \"a\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(1)
doc.parsed_response['b'].should eq(1)
doc.parsed_response['_id'].should eq(id1)
doc.parsed_response['_rev'].should eq(rev1)
end
end
end
################################################################################
## unique constraints during update
################################################################################
context "updating:" do
context "dealing with unique constraints:" do
before do
@cn = "UnitTestsCollectionIndexes"
AvocadoDB.drop_collection(@cn)
@cid = AvocadoDB.create_collection(@cn)
end
after do
AvocadoDB.drop_collection(@cn)
end
it "rolls back in case of violation" do
cmd = "/_api/index/#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body)
doc.code.should eq(201)
doc.parsed_response['type'].should eq("hash")
doc.parsed_response['unique'].should eq(true)
# create a document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc.code.should eq(201)
id1 = doc.parsed_response['_id']
id1.should be_kind_of(String)
rev1 = doc.parsed_response['_rev']
rev1.should be_kind_of(Integer)
# check it
cmd2 = "/document/#{id1}"
doc = AvocadoDB.log_get("#{prefix}", cmd2)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(1)
doc.parsed_response['b'].should eq(1)
doc.parsed_response['_id'].should eq(id1)
doc.parsed_response['_rev'].should eq(rev1)
# create a second document
body = "{ \"a\" : 2, \"b\" : 2 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc.code.should eq(201)
id2 = doc.parsed_response['_id']
id2.should be_kind_of(String)
rev2 = doc.parsed_response['_rev']
rev2.should be_kind_of(Integer)
# create a unique constraint violation during update
body = "{ \"a\" : 2, \"b\" : 3 }"
doc = AvocadoDB.log_put("#{prefix}", cmd2, :body => body)
doc.code.should eq(409)
# check first document again
doc = AvocadoDB.log_get("#{prefix}", cmd2)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(1)
doc.parsed_response['b'].should eq(1)
doc.parsed_response['_id'].should eq(id1)
doc.parsed_response['_rev'].should_not eq(rev1)
rev3 = doc.parsed_response['_rev']
rev3.should be_kind_of(Integer)
# check second document again
cmd3 = "/document/#{id2}"
doc = AvocadoDB.log_get("#{prefix}", cmd3)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(2)
doc.parsed_response['b'].should eq(2)
doc.parsed_response['_id'].should eq(id2)
doc.parsed_response['_rev'].should eq(rev2)
# third try (make sure the rollback has not destroyed anything)
body = "{ \"a\" : 2, \"b\" : 4 }"
doc = AvocadoDB.log_put("#{prefix}", cmd2, :body => body)
doc.code.should eq(409)
# check the first document again
doc = AvocadoDB.log_get("#{prefix}", cmd2)
doc.code.should eq(200)
doc.parsed_response['a'].should eq(1)
doc.parsed_response['b'].should eq(1)
doc.parsed_response['_id'].should eq(id1)
doc.parsed_response['_rev'].should_not eq(rev1)
doc.parsed_response['_rev'].should_not eq(rev3)
end
end
end
end

View File

@ -51,6 +51,19 @@ static int DeleteImmediateIndexes (TRI_sim_collection_t* collection,
TRI_doc_mptr_t const* header,
TRI_voc_tick_t);
static TRI_doc_mptr_t const UpdateDocument (TRI_sim_collection_t* collection,
TRI_doc_mptr_t const* header,
TRI_doc_document_marker_t* marker,
size_t markerSize,
void const* body,
TRI_voc_size_t bodySize,
TRI_voc_rid_t rid,
TRI_voc_rid_t* oldRid,
TRI_doc_update_policy_e policy,
TRI_df_marker_t** result,
bool release,
bool allowRollback);
static int DeleteDocument (TRI_sim_collection_t* collection,
TRI_doc_deletion_marker_t* marker,
TRI_voc_rid_t rid,
@ -433,6 +446,54 @@ static void UpdateHeader (TRI_doc_collection_t* c,
update->_document._data.data = ((char*) marker) + markerSize;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief roll backs an update
////////////////////////////////////////////////////////////////////////////////
static TRI_doc_mptr_t const RollbackUpdate (TRI_sim_collection_t* sim,
TRI_doc_mptr_t const* header,
TRI_df_marker_t const* originalMarker,
TRI_df_marker_t** result) {
TRI_doc_document_marker_t* marker;
TRI_doc_document_marker_t documentUpdate;
TRI_doc_edge_marker_t edgeUpdate;
char* data;
size_t dataLength;
size_t markerLength;
if (originalMarker->_type == TRI_DOC_MARKER_DOCUMENT) {
memcpy(&documentUpdate, originalMarker, sizeof(TRI_doc_document_marker_t));
marker = &documentUpdate;
markerLength = sizeof(TRI_doc_document_marker_t);
data = ((char*) originalMarker) + sizeof(TRI_doc_document_marker_t);
dataLength = originalMarker->_size - sizeof(TRI_doc_document_marker_t);
}
else if (originalMarker->_type == TRI_DOC_MARKER_EDGE) {
memcpy(&edgeUpdate, originalMarker, sizeof(TRI_doc_document_marker_t));
marker = &edgeUpdate.base;
markerLength = sizeof(TRI_doc_edge_marker_t);
data = ((char*) originalMarker) + sizeof(TRI_doc_edge_marker_t);
dataLength = originalMarker->_size - sizeof(TRI_doc_edge_marker_t);
}
else {
TRI_doc_mptr_t mptr;
TRI_set_errno(TRI_ERROR_INTERNAL);
mptr._did = 0;
return mptr;
}
return UpdateDocument(sim,
header,
marker, markerLength,
data, dataLength,
header->_rid,
NULL,
TRI_DOC_UPDATE_LAST_WRITE,
result,
false,
false);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief updates an existing document splitted into marker and body to file
////////////////////////////////////////////////////////////////////////////////
@ -453,12 +514,9 @@ static TRI_doc_mptr_t const UpdateDocument (TRI_sim_collection_t* collection,
TRI_datafile_t* journal;
TRI_df_marker_t const* originalMarker;
TRI_doc_mptr_t resUpd;
TRI_shaped_json_t originalJson;
TRI_voc_size_t total;
int originalRes;
int res;
originalJson = header->_document;
originalMarker = header->_data;
// .............................................................................
@ -567,86 +625,16 @@ static TRI_doc_mptr_t const UpdateDocument (TRI_sim_collection_t* collection,
// update immediate indexes
res = UpdateImmediateIndexes(collection, header, &update);
originalRes = res;
// check for constraint error
if (allowRollback && res != TRI_ERROR_NO_ERROR) {
LOG_WARNING("encountered index violating during update, rolling back");
LOG_DEBUG("encountered index violating during update, rolling back");
// .............................................................................
// rollback
// .............................................................................
resUpd = RollbackUpdate(collection, header, originalMarker, result);
if (originalMarker->_type == TRI_DOC_MARKER_DOCUMENT) {
TRI_doc_document_marker_t markerUpd;
// create an update
memset(&markerUpd, 0, sizeof(markerUpd));
markerUpd.base._size = sizeof(markerUpd) + originalJson._data.length;
markerUpd.base._type = originalMarker->_type;
markerUpd._did = header->_did;
markerUpd._sid = 0;
markerUpd._shape = originalJson._sid;
resUpd = UpdateDocument(collection,
header,
&markerUpd, sizeof(markerUpd),
originalJson._data.data, originalJson._data.length,
header->_rid,
NULL,
TRI_DOC_UPDATE_LAST_WRITE,
result,
false,
false);
if (resUpd._did == 0) {
LOG_ERROR("encountered error '%s' during rollback of update", TRI_last_error());
}
}
else if (originalMarker->_type == TRI_DOC_MARKER_EDGE) {
TRI_doc_edge_marker_t markerUpd;
TRI_doc_edge_marker_t const* originalEdge;
originalEdge = (TRI_doc_edge_marker_t*) originalMarker;
// create an update
memset(&markerUpd, 0, sizeof(markerUpd));
markerUpd.base.base._size = sizeof(markerUpd) + originalJson._data.length;
markerUpd.base.base._type = originalMarker->_type;
markerUpd.base._did = header->_did;
markerUpd.base._sid = 0;
markerUpd.base._shape = originalJson._sid;
markerUpd._fromCid = originalEdge->_fromCid;
markerUpd._fromDid = originalEdge->_fromDid;
markerUpd._toCid = originalEdge->_toCid;
markerUpd._toDid = originalEdge->_toDid;
resUpd = UpdateDocument(collection,
header,
&markerUpd.base, sizeof(markerUpd),
originalJson._data.data, originalJson._data.length,
header->_rid,
NULL,
TRI_DOC_UPDATE_LAST_WRITE,
result,
false,
false);
if (resUpd._did == 0) {
LOG_ERROR("encountered error '%s' during rollback of update", TRI_last_error());
}
}
else {
LOG_ERROR("unknown document marker type '%lu' in document '%lu:%lu' revision '%lu'",
(unsigned long) originalMarker->_type,
(unsigned long) collection->base.base._cid,
(unsigned long) header->_did,
(unsigned long) header->_rid);
if (resUpd._did == 0) {
LOG_ERROR("encountered error '%s' during rollback of update", TRI_last_error());
TRI_set_errno(res);
}
}
@ -654,7 +642,7 @@ static TRI_doc_mptr_t const UpdateDocument (TRI_sim_collection_t* collection,
// create result
// .............................................................................
if (originalRes == TRI_ERROR_NO_ERROR) {
if (res == TRI_ERROR_NO_ERROR) {
mptr = *header;
// release lock, header might be invalid after this
@ -673,7 +661,6 @@ static TRI_doc_mptr_t const UpdateDocument (TRI_sim_collection_t* collection,
collection->base.endWrite(&collection->base);
}
TRI_set_errno(originalRes);
mptr._did = 0;
return mptr;
}