1
0
Fork 0

Let TRI_document_collection_t inherit from TRI_collection_t.

This commit is contained in:
Max Neunhoeffer 2014-06-11 10:51:47 +02:00
parent af17694ec1
commit c2bddb1c19
22 changed files with 165 additions and 139 deletions

View File

@ -124,8 +124,8 @@ static int InitialiseCap (TRI_cap_constraint_t* cap,
TRI_voc_cid_t cid;
int res;
vocbase = document->base._vocbase;
cid = document->base._info._cid;
vocbase = document->_vocbase;
cid = document->_info._cid;
trx = TRI_CreateTransaction(vocbase, TRI_GetIdServer(), true, 0.0, false);

View File

@ -703,7 +703,7 @@ int ContinuousSyncer::changeCollection (TRI_json_t const* json) {
parameters._maximalSize = maximalSize;
parameters._waitForSync = waitForSync;
res = TRI_UpdateCollectionInfo(_vocbase, &col->_collection->base, &parameters);
res = TRI_UpdateCollectionInfo(_vocbase, col->_collection, &parameters);
TRI_ReleaseCollectionVocBase(_vocbase, col);

View File

@ -277,7 +277,7 @@ int Syncer::applyCollectionDumpMarker (TRI_transaction_collection_t* trxCollecti
if (type == MARKER_EDGE) {
// edge
if (document->base._info._type != TRI_COL_TYPE_EDGE) {
if (document->_info._type != TRI_COL_TYPE_EDGE) {
res = TRI_ERROR_ARANGO_COLLECTION_TYPE_INVALID;
}
else {
@ -306,7 +306,7 @@ int Syncer::applyCollectionDumpMarker (TRI_transaction_collection_t* trxCollecti
}
else {
// document
if (document->base._info._type != TRI_COL_TYPE_DOCUMENT) {
if (document->_info._type != TRI_COL_TYPE_DOCUMENT) {
res = TRI_ERROR_ARANGO_COLLECTION_TYPE_INVALID;
}
else {

View File

@ -331,7 +331,7 @@ bool RestDocumentHandler::createDocument () {
return false;
}
if (trx.documentCollection()->base._info._type != TRI_COL_TYPE_DOCUMENT) {
if (trx.documentCollection()->_info._type != TRI_COL_TYPE_DOCUMENT) {
// check if we are inserting with the DOCUMENT handler into a non-DOCUMENT collection
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);
generateError(HttpResponse::BAD, TRI_ERROR_ARANGO_COLLECTION_TYPE_INVALID);
@ -745,7 +745,7 @@ bool RestDocumentHandler::readAllDocuments () {
res = trx.read(ids);
TRI_col_type_e typ = trx.documentCollection()->base._info._type;
TRI_col_type_e typ = trx.documentCollection()->_info._type;
res = trx.finish(res);
@ -1327,7 +1327,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
TRI_ASSERT(document != nullptr);
TRI_shaper_t* shaper = document->_shaper;
string const cidString = StringUtils::itoa(document->base._info._planId);
string const cidString = StringUtils::itoa(document->_info._planId);
if (trx.orderBarrier(trx.trxCollection()) == nullptr) {
generateTransactionError(collection, TRI_ERROR_OUT_OF_MEMORY);

View File

@ -250,7 +250,7 @@ bool RestEdgeHandler::createDocument () {
TRI_document_collection_t* primary = trx.documentCollection();
if (primary->base._info._type != TRI_COL_TYPE_EDGE) {
if (primary->_info._type != TRI_COL_TYPE_EDGE) {
// check if we are inserting with the EDGE handler into a non-EDGE collection
generateError(HttpResponse::BAD, TRI_ERROR_ARANGO_COLLECTION_TYPE_INVALID);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);

View File

@ -616,7 +616,7 @@ bool RestImportHandler::createFromJson (string const& type) {
}
TRI_document_collection_t* document = trx.documentCollection();
bool const isEdgeCollection = (document->base._info._type == TRI_COL_TYPE_EDGE);
bool const isEdgeCollection = (document->_info._type == TRI_COL_TYPE_EDGE);
trx.lockWrite();
@ -1069,7 +1069,7 @@ bool RestImportHandler::createFromKeyValueList () {
}
TRI_document_collection_t* document = trx.documentCollection();
bool const isEdgeCollection = (document->base._info._type == TRI_COL_TYPE_EDGE);
bool const isEdgeCollection = (document->_info._type == TRI_COL_TYPE_EDGE);
trx.lockWrite();

View File

@ -2456,7 +2456,7 @@ int RestReplicationHandler::applyCollectionDumpMarker (CollectionNameResolver co
if (type == MARKER_EDGE) {
// edge
if (document->base._info._type != TRI_COL_TYPE_EDGE) {
if (document->_info._type != TRI_COL_TYPE_EDGE) {
res = TRI_ERROR_ARANGO_COLLECTION_TYPE_INVALID;
}
else {
@ -2484,7 +2484,7 @@ int RestReplicationHandler::applyCollectionDumpMarker (CollectionNameResolver co
}
else {
// document
if (document->base._info._type != TRI_COL_TYPE_DOCUMENT) {
if (document->_info._type != TRI_COL_TYPE_DOCUMENT) {
res = TRI_ERROR_ARANGO_COLLECTION_TYPE_INVALID;
}
else {

View File

@ -2269,7 +2269,7 @@ static v8::Handle<v8::Value> JS_ChecksumCollection (v8::Arguments const& argv) {
trx.lockRead();
// get last tick
const string rid = StringUtils::itoa(document->base._info._revision);
const string rid = StringUtils::itoa(document->_info._revision);
if (withData) {
TRI_InitStringBuffer(&helper._buffer, TRI_CORE_MEM_ZONE);

View File

@ -2282,7 +2282,7 @@ static v8::Handle<v8::Value> ReplaceVocbaseCol (bool useCollection,
if (ServerState::instance()->isDBserver()) {
// compare attributes in shardKeys
const string cidString = StringUtils::itoa(document->base._info._planId);
const string cidString = StringUtils::itoa(document->_info._planId);
TRI_json_t* json = TRI_ObjectToJson(argv[1]);
@ -2702,7 +2702,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
if (ServerState::instance()->isDBserver()) {
// compare attributes in shardKeys
const string cidString = StringUtils::itoa(document->base._info._planId);
const string cidString = StringUtils::itoa(document->_info._planId);
if (shardKeysChanged(col->_dbName, cidString, old, json, true)) {
TRI_FreeJson(document->_shaper->_memoryZone, old);
@ -5557,7 +5557,7 @@ static v8::Handle<v8::Value> JS_UpgradeVocbaseCol (v8::Arguments const& argv) {
return scope.Close(v8::ThrowException(err));
}
TRI_collection_t* col = &collection->_collection->base;
TRI_collection_t* col = collection->_collection;
#ifdef TRI_ENABLE_LOGGER
const char* name = col->_info._name;
@ -7032,7 +7032,7 @@ static v8::Handle<v8::Value> JS_PropertiesVocbaseCol (v8::Arguments const& argv)
}
TRI_document_collection_t* document = collection->_collection;
TRI_collection_t* base = &document->base;
TRI_collection_t* base = document;
// check if we want to change some parameters
if (0 < argv.Length()) {
@ -7355,7 +7355,7 @@ static int GetRevision (TRI_vocbase_col_t* collection,
// READ-LOCK start
trx.lockRead();
rid = collection->_collection->base._info._revision;
rid = collection->_collection->_info._revision;
trx.finish(res);
// READ-LOCK end
@ -9709,7 +9709,7 @@ static void WeakBarrierCallback (v8::Isolate* isolate,
persistent.Clear();
// get the vocbase pointer from the barrier
TRI_vocbase_t* vocbase = barrier->base._container->_collection->base._vocbase;
TRI_vocbase_t* vocbase = barrier->base._container->_collection->_vocbase;
// mark that we don't need the barrier anymore
barrier->_usedByExternal = false;
@ -10182,7 +10182,7 @@ v8::Handle<v8::Value> TRI_WrapShapedJson (T& trx,
// increase the reference-counter for the database
TRI_ASSERT(barrier->_container != nullptr);
TRI_ASSERT(barrier->_container->_collection != nullptr);
TRI_UseVocBase(barrier->_container->_collection->base._vocbase);
TRI_UseVocBase(barrier->_container->_collection->_vocbase);
v8::Persistent<v8::Value> persistent = v8::Persistent<v8::Value>::New(isolate, v8::External::New(barrier));
result->SetInternalField(SLOT_BARRIER, persistent);

View File

@ -1142,9 +1142,14 @@ TRI_collection_t* TRI_CreateCollection (TRI_vocbase_t* vocbase,
// create collection structure
if (collection == NULL) {
collection = static_cast<TRI_collection_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_collection_t), false));
try {
collection = new TRI_collection_t();
}
catch (std::exception e) {
collection = nullptr;
}
if (collection == NULL) {
if (collection == nullptr) {
TRI_FreeString(TRI_CORE_MEM_ZONE, filename);
LOG_ERROR("cannot create collection '%s', out of memory", path);

View File

@ -206,6 +206,12 @@ struct TRI_collection_t {
TRI_vector_pointer_t _journals; // all journals
TRI_vector_pointer_t _compactors; // all compactor files
TRI_vector_string_t _indexFiles; // all index filenames
TRI_collection_t () {
}
~TRI_collection_t () {
}
};
// -----------------------------------------------------------------------------

View File

@ -173,7 +173,7 @@ static inline int64_t AlignedSize (TRI_df_marker_t const* marker) {
static TRI_datafile_t* CreateCompactor (TRI_document_collection_t* document,
TRI_voc_fid_t fid,
int64_t maximalSize) {
TRI_collection_t* collection = &document->base;
TRI_collection_t* collection = document;
// reserve room for one additional entry
if (TRI_ReserveVectorPointer(&collection->_compactors, 1) != TRI_ERROR_NO_ERROR) {
@ -208,7 +208,7 @@ static int CopyMarker (TRI_document_collection_t* document,
int res = TRI_ReserveElementDatafile(compactor, marker->_size, result, 0);
if (res != TRI_ERROR_NO_ERROR) {
document->base._lastError = TRI_set_errno(TRI_ERROR_ARANGO_NO_JOURNAL);
document->_lastError = TRI_set_errno(TRI_ERROR_ARANGO_NO_JOURNAL);
return TRI_ERROR_ARANGO_NO_JOURNAL;
}
@ -255,7 +255,7 @@ static void DropDatafileCallback (TRI_datafile_t* datafile, void* data) {
number = TRI_StringUInt64(fid);
name = TRI_Concatenate3String("deleted-", number, ".db");
filename = TRI_Concatenate2File(document->base._directory, name);
filename = TRI_Concatenate2File(document->_directory, name);
TRI_FreeString(TRI_CORE_MEM_ZONE, number);
TRI_FreeString(TRI_CORE_MEM_ZONE, name);
@ -358,7 +358,7 @@ static void RenameDatafileCallback (TRI_datafile_t* datafile,
// construct a suitable tempname
number = TRI_StringUInt64(datafile->_fid);
jname = TRI_Concatenate3String("temp-", number, ".db");
tempFilename = TRI_Concatenate2File(document->base._directory, jname);
tempFilename = TRI_Concatenate2File(document->_directory, jname);
TRI_FreeString(TRI_CORE_MEM_ZONE, number);
TRI_FreeString(TRI_CORE_MEM_ZONE, jname);
@ -389,7 +389,7 @@ static void RenameDatafileCallback (TRI_datafile_t* datafile,
// must acquire a write-lock as we're about to change the datafiles vector
TRI_WRITE_LOCK_DATAFILES_DOC_COLLECTION(document);
if (! LocateDatafile(&document->base._datafiles, datafile->_fid, &i)) {
if (! LocateDatafile(&document->_datafiles, datafile->_fid, &i)) {
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
LOG_ERROR("logic error: could not locate datafile");
@ -398,7 +398,7 @@ static void RenameDatafileCallback (TRI_datafile_t* datafile,
}
// put the compactor in place of the datafile
document->base._datafiles._buffer[i] = compactor;
document->_datafiles._buffer[i] = compactor;
// update dfi
dfi = TRI_FindDatafileInfoDocumentCollection(document, compactor->_fid, false);
@ -410,7 +410,7 @@ static void RenameDatafileCallback (TRI_datafile_t* datafile,
LOG_ERROR("logic error: could not find compactor file information");
}
if (! LocateDatafile(&document->base._compactors, compactor->_fid, &i)) {
if (! LocateDatafile(&document->_compactors, compactor->_fid, &i)) {
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
LOG_ERROR("logic error: could not locate compactor");
@ -419,7 +419,7 @@ static void RenameDatafileCallback (TRI_datafile_t* datafile,
}
// remove the compactor from the list of compactors
TRI_RemoveVectorPointer(&document->base._compactors, i);
TRI_RemoveVectorPointer(&document->_compactors, i);
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
@ -611,7 +611,7 @@ static int RemoveCompactor (TRI_document_collection_t* document,
TRI_WRITE_LOCK_DATAFILES_DOC_COLLECTION(document);
// remove the compactor from the list of compactors
if (! LocateDatafile(&document->base._compactors, compactor->_fid, &i)) {
if (! LocateDatafile(&document->_compactors, compactor->_fid, &i)) {
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
LOG_ERROR("logic error: could not locate compactor");
@ -619,7 +619,7 @@ static int RemoveCompactor (TRI_document_collection_t* document,
return TRI_ERROR_INTERNAL;
}
TRI_RemoveVectorPointer(&document->base._compactors, i);
TRI_RemoveVectorPointer(&document->_compactors, i);
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
@ -656,7 +656,7 @@ static int RemoveDatafile (TRI_document_collection_t* document,
// remove the datafile from the list of datafiles
TRI_WRITE_LOCK_DATAFILES_DOC_COLLECTION(document);
if (! LocateDatafile(&document->base._datafiles, df->_fid, &i)) {
if (! LocateDatafile(&document->_datafiles, df->_fid, &i)) {
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
LOG_ERROR("logic error: could not locate datafile");
@ -664,7 +664,7 @@ static int RemoveDatafile (TRI_document_collection_t* document,
return TRI_ERROR_INTERNAL;
}
TRI_RemoveVectorPointer(&document->base._datafiles, i);
TRI_RemoveVectorPointer(&document->_datafiles, i);
// update dfi
dfi = TRI_FindDatafileInfoDocumentCollection(document, df->_fid, false);
@ -804,7 +804,7 @@ static void CompactifyDatafiles (TRI_document_collection_t* document,
}
LOG_TRACE("compactify called for collection '%llu' for %d datafiles of total size %llu",
(unsigned long long) document->base._info._cid,
(unsigned long long) document->_info._cid,
(int) n,
(unsigned long long) initial._targetSize);
@ -858,7 +858,7 @@ static void CompactifyDatafiles (TRI_document_collection_t* document,
// must acquire a write-lock as we're about to change the datafiles vector
TRI_WRITE_LOCK_DATAFILES_DOC_COLLECTION(document);
if (! LocateDatafile(&document->base._compactors, compactor->_fid, &j)) {
if (! LocateDatafile(&document->_compactors, compactor->_fid, &j)) {
// not found
TRI_WRITE_UNLOCK_DATAFILES_DOC_COLLECTION(document);
@ -991,9 +991,9 @@ static bool CompactifyDocumentCollection (TRI_document_collection_t* document) {
return false;
}
n = document->base._datafiles._length;
n = document->_datafiles._length;
if (document->base._compactors._length > 0 || n == 0) {
if (document->_compactors._length > 0 || n == 0) {
// we already have created a compactor file in progress.
// if this happens, then a previous compaction attempt for this collection failed
@ -1004,7 +1004,7 @@ static bool CompactifyDocumentCollection (TRI_document_collection_t* document) {
}
// get maximum size of result file
maxSize = (uint64_t) COMPACTOR_MAX_SIZE_FACTOR * (uint64_t) document->base._info._maximalSize;
maxSize = (uint64_t) COMPACTOR_MAX_SIZE_FACTOR * (uint64_t) document->_info._maximalSize;
if (maxSize < 8 * 1024 * 1024) {
maxSize = 8 * 1024 * 1024;
}
@ -1022,7 +1022,7 @@ static bool CompactifyDocumentCollection (TRI_document_collection_t* document) {
uint64_t totalSize = 0;
bool shouldCompact;
TRI_datafile_t* df = static_cast<TRI_datafile_t*>(document->base._datafiles._buffer[i]);
TRI_datafile_t* df = static_cast<TRI_datafile_t*>(document->_datafiles._buffer[i]);
TRI_ASSERT(df != NULL);
@ -1428,7 +1428,7 @@ void TRI_CompactorVocBase (void* data) {
}
worked = false;
doCompact = document->base._info._doCompact;
doCompact = document->_info._doCompact;
// for document collection, compactify datafiles
if (collection->_status == TRI_VOC_COL_STATUS_LOADED && doCompact) {

View File

@ -201,7 +201,7 @@ static TRI_datafile_t* CreateCompactor (TRI_document_collection_t* document,
TRI_df_marker_t* position;
int res;
collection = &document->base;
collection = document;
if (collection->_info._isVolatile) {
// in-memory collection
@ -292,7 +292,7 @@ static bool CloseJournalDocumentCollection (TRI_document_collection_t* document,
TRI_vector_pointer_t* vector;
int res;
collection = &document->base;
collection = document;
// either use a journal or a compactor
if (compactor) {
@ -387,7 +387,7 @@ static TRI_voc_size_t Count (TRI_document_collection_t* document) {
static inline void SetRevision (TRI_document_collection_t* document,
TRI_voc_rid_t rid,
bool force) {
TRI_col_info_t* info = &document->base._info;
TRI_col_info_t* info = &document->_info;
if (force || rid > info->_revision) {
info->_revision = rid;
@ -601,7 +601,7 @@ static void SetIndexCleanupFlag (TRI_document_collection_t* document,
document->_cleanupIndexes = value;
LOG_DEBUG("setting cleanup indexes flag for collection '%s' to %d",
document->base._info._name,
document->_info._name,
(int) value);
}
@ -617,7 +617,7 @@ static int AddIndex (TRI_document_collection_t* document,
LOG_DEBUG("adding index of type %s for collection '%s'",
TRI_TypeNameIndex(idx->_type),
document->base._info._name);
document->_info._name);
int res = TRI_PushBackVectorPointer(&document->_allIndexes, idx);
@ -832,8 +832,8 @@ static int InsertDocumentShapedJson (TRI_transaction_collection_t* trxCollection
// document
TRI_ASSERT(edge == nullptr);
marker = new triagens::wal::DocumentMarker(document->base._vocbase->_id,
document->base._info._cid,
marker = new triagens::wal::DocumentMarker(document->_vocbase->_id,
document->_info._cid,
rid,
trxCollection->_transaction->_id,
keyString,
@ -844,8 +844,8 @@ static int InsertDocumentShapedJson (TRI_transaction_collection_t* trxCollection
// edge
TRI_ASSERT(edge != nullptr);
marker = new triagens::wal::EdgeMarker(document->base._vocbase->_id,
document->base._info._cid,
marker = new triagens::wal::EdgeMarker(document->_vocbase->_id,
document->_info._cid,
rid,
trxCollection->_transaction->_id,
keyString,
@ -1067,8 +1067,8 @@ static int UpdateDocumentShapedJson (TRI_transaction_collection_t* trxCollection
// create a WAL document marker
marker = triagens::wal::DocumentMarker::clone(original,
document->base._vocbase->_id,
document->base._info._cid,
document->_vocbase->_id,
document->_info._cid,
rid,
trxCollection->_transaction->_id,
legend,
@ -1079,8 +1079,8 @@ static int UpdateDocumentShapedJson (TRI_transaction_collection_t* trxCollection
// create a WAL edge marker
marker = triagens::wal::EdgeMarker::clone(original,
document->base._vocbase->_id,
document->base._info._cid,
document->_vocbase->_id,
document->_info._cid,
rid,
trxCollection->_transaction->_id,
legend,
@ -1122,8 +1122,8 @@ static int RemoveDocumentShapedJson (TRI_transaction_collection_t* trxCollection
TRI_document_collection_t* document = trxCollection->_collection->_collection;
triagens::wal::Marker* marker = new triagens::wal::RemoveMarker(document->base._vocbase->_id,
document->base._info._cid,
triagens::wal::Marker* marker = new triagens::wal::RemoveMarker(document->_vocbase->_id,
document->_info._cid,
rid,
trxCollection->_transaction->_id,
std::string(key));
@ -2044,8 +2044,8 @@ static bool OpenIterator (TRI_df_marker_t const* marker,
}
TRI_document_collection_t* document = static_cast<open_iterator_state_t*>(data)->_document;
if (document->base._tickMax < tick) {
document->base._tickMax = tick;
if (document->_tickMax < tick) {
document->_tickMax = tick;
}
@ -2147,7 +2147,7 @@ static TRI_doc_collection_info_t* Figures (TRI_document_collection_t* document)
}
// add the file sizes for datafiles and journals
TRI_collection_t* base = &document->base;
TRI_collection_t* base = document;
for (size_t i = 0; i < base->_datafiles._length; ++i) {
TRI_datafile_t* df = (TRI_datafile_t*) base->_datafiles._buffer[i];
@ -2264,7 +2264,7 @@ static void DestroyBaseDocumentCollection (TRI_document_collection_t* document)
TRI_DestroyBarrierList(&document->_barrierList);
TRI_DestroyCollection(&document->base);
TRI_DestroyCollection(document);
}
////////////////////////////////////////////////////////////////////////////////
@ -2281,7 +2281,7 @@ static bool InitDocumentCollection (TRI_document_collection_t* document,
int res = InitBaseDocumentCollection(document, shaper);
if (res != TRI_ERROR_NO_ERROR) {
TRI_DestroyCollection(&document->base);
TRI_DestroyCollection(document);
TRI_set_errno(res);
return false;
@ -2327,10 +2327,10 @@ static bool InitDocumentCollection (TRI_document_collection_t* document,
}
// create edges index
if (document->base._info._type == TRI_COL_TYPE_EDGE) {
if (document->_info._type == TRI_COL_TYPE_EDGE) {
TRI_index_t* edgesIndex;
edgesIndex = TRI_CreateEdgeIndex(document, document->base._info._cid);
edgesIndex = TRI_CreateEdgeIndex(document, document->_info._cid);
if (edgesIndex == nullptr) {
TRI_FreeIndex(primaryIndex);
@ -2452,7 +2452,12 @@ TRI_document_collection_t* TRI_CreateDocumentCollection (TRI_vocbase_t* vocbase,
// first create the document collection
document = static_cast<TRI_document_collection_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_document_collection_t), false));
try {
document = new TRI_document_collection_t();
}
catch (std::exception e) {
document = nullptr;
}
if (document == nullptr) {
TRI_FreeKeyGenerator(keyGenerator);
@ -2462,7 +2467,7 @@ TRI_document_collection_t* TRI_CreateDocumentCollection (TRI_vocbase_t* vocbase,
return nullptr;
}
collection = TRI_CreateCollection(vocbase, &document->base, path, parameter);
collection = TRI_CreateCollection(vocbase, document, path, parameter);
if (collection == nullptr) {
TRI_FreeKeyGenerator(keyGenerator);
@ -2615,7 +2620,7 @@ TRI_datafile_t* TRI_CreateJournalDocumentCollection (TRI_document_collection_t*
TRI_voc_fid_t fid;
int res;
collection = &document->base;
collection = document;
fid = (TRI_voc_fid_t) TRI_NewTickServer();
@ -2983,13 +2988,19 @@ TRI_document_collection_t* TRI_OpenDocumentCollection (TRI_vocbase_t* vocbase,
char const* path = col->_path;
// first open the document collection
TRI_document_collection_t* document = static_cast<TRI_document_collection_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_document_collection_t), false));
TRI_document_collection_t* document;
try {
document = new TRI_document_collection_t();
}
catch (std::exception e) {
document = nullptr;
}
if (document == nullptr) {
return nullptr;
}
collection = TRI_OpenCollection(vocbase, &document->base, path);
collection = TRI_OpenCollection(vocbase, document, path);
if (collection == nullptr) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, document);
@ -3073,7 +3084,7 @@ TRI_document_collection_t* TRI_OpenDocumentCollection (TRI_vocbase_t* vocbase,
int TRI_CloseDocumentCollection (TRI_document_collection_t* document) {
// closes all open compactors, journals, datafiles
int res = TRI_CloseCollection(&document->base);
int res = TRI_CloseCollection(document);
TRI_FreeVocShaper(document->_shaper);
document->_shaper = nullptr;
@ -3234,7 +3245,7 @@ static bool DropIndex (TRI_document_collection_t* document,
TRI_index_t* found = nullptr;
TRI_vocbase_t* vocbase = document->base._vocbase;
TRI_vocbase_t* vocbase = document->_vocbase;
TRI_ReadLockReadWriteLock(&vocbase->_inventoryLock);
// .............................................................................
@ -3283,8 +3294,8 @@ static bool DropIndex (TRI_document_collection_t* document,
// it is safe to use _name as we hold a read-lock on the collection status
TRI_LogDropIndexReplication(vocbase,
document->base._info._cid,
document->base._info._name,
document->_info._cid,
document->_info._name,
iid,
generatingServer);
}
@ -3329,7 +3340,7 @@ static int FillIndex (TRI_document_collection_t* document,
if (res != TRI_ERROR_NO_ERROR) {
LOG_WARNING("failed to insert document '%llu/%s' for index %llu",
(unsigned long long) document->base._info._cid,
(unsigned long long) document->_info._cid,
(char*) TRI_EXTRACT_MARKER_KEY(mptr), // ONLY IN INDEX
(unsigned long long) idx->_iid);
@ -3342,7 +3353,7 @@ static int FillIndex (TRI_document_collection_t* document,
LOG_TRACE("indexed %llu documents of collection %llu",
(unsigned long long) (LoopSize * loops),
(unsigned long long) document->base._info._cid);
(unsigned long long) document->_info._cid);
}
}
}
@ -3729,7 +3740,7 @@ void TRI_UpdateStatisticsDocumentCollection (TRI_document_collection_t* document
SetRevision(document, rid, force);
}
if (! document->base._info._isVolatile) {
if (! document->_info._isVolatile) {
// only count logfileEntries if the collection is durable
document->_uncollectedLogfileEntries += logfileEntries;
}
@ -4054,7 +4065,7 @@ TRI_index_t* TRI_EnsureCapConstraintDocumentCollection (TRI_document_collection_
// inside write-lock
// .............................................................................
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
TRI_WRITE_LOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION(document);
@ -4076,7 +4087,7 @@ TRI_index_t* TRI_EnsureCapConstraintDocumentCollection (TRI_document_collection_
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
return idx;
}
@ -4459,7 +4470,7 @@ TRI_index_t* TRI_EnsureGeoIndex1DocumentCollection (TRI_document_collection_t* d
TRI_server_id_t generatingServer) {
TRI_index_t* idx;
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// inside write-lock
@ -4485,7 +4496,7 @@ TRI_index_t* TRI_EnsureGeoIndex1DocumentCollection (TRI_document_collection_t* d
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
return idx;
}
@ -4504,7 +4515,7 @@ TRI_index_t* TRI_EnsureGeoIndex2DocumentCollection (TRI_document_collection_t* d
TRI_server_id_t generatingServer) {
TRI_index_t* idx;
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// inside write-lock
@ -4530,7 +4541,7 @@ TRI_index_t* TRI_EnsureGeoIndex2DocumentCollection (TRI_document_collection_t* d
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
return idx;
}
@ -4700,7 +4711,7 @@ TRI_index_t* TRI_EnsureHashIndexDocumentCollection (TRI_document_collection_t* d
TRI_server_id_t generatingServer) {
TRI_index_t* idx;
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// inside write-lock
@ -4727,7 +4738,7 @@ TRI_index_t* TRI_EnsureHashIndexDocumentCollection (TRI_document_collection_t* d
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
return idx;
}
@ -4888,7 +4899,7 @@ TRI_index_t* TRI_EnsureSkiplistIndexDocumentCollection (TRI_document_collection_
TRI_server_id_t generatingServer) {
TRI_index_t* idx;
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// inside write-lock the collection
@ -4914,7 +4925,7 @@ TRI_index_t* TRI_EnsureSkiplistIndexDocumentCollection (TRI_document_collection_
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
return idx;
}
@ -5140,7 +5151,7 @@ TRI_index_t* TRI_EnsureFulltextIndexDocumentCollection (TRI_document_collection_
TRI_server_id_t generatingServer) {
TRI_index_t* idx;
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// inside write-lock the collection
@ -5166,7 +5177,7 @@ TRI_index_t* TRI_EnsureFulltextIndexDocumentCollection (TRI_document_collection_
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
return idx;
}
@ -5371,7 +5382,7 @@ TRI_index_t* TRI_EnsureBitarrayIndexDocumentCollection (TRI_document_collection_
*errorCode = TRI_ERROR_NO_ERROR;
*errorStr = NULL;
TRI_ReadLockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// inside write-lock the collection
@ -5404,7 +5415,7 @@ TRI_index_t* TRI_EnsureBitarrayIndexDocumentCollection (TRI_document_collection_
// outside write-lock
// .............................................................................
TRI_ReadUnlockReadWriteLock(&document->base._vocbase->_inventoryLock);
TRI_ReadUnlockReadWriteLock(&document->_vocbase->_inventoryLock);
// .............................................................................
// Index already exists so simply return it

View File

@ -417,13 +417,11 @@ TRI_doc_collection_info_t;
/// lock is used to coordinate the read and write transactions.
////////////////////////////////////////////////////////////////////////////////
struct TRI_document_collection_t {
TRI_collection_t base;
// .............................................................................
struct TRI_document_collection_t : public TRI_collection_t {
// ...........................................................................
// this lock protects the _primaryIndex plus the _allIndexes
// and _headers attributes in derived types
// .............................................................................
// ...........................................................................
TRI_read_write_lock_t _lock;
@ -444,9 +442,9 @@ struct TRI_document_collection_t {
TRI_read_write_lock_t _compactionLock;
double _lastCompaction;
// .............................................................................
// ...........................................................................
// this condition variable protects the _journalsCondition
// .............................................................................
// ...........................................................................
TRI_condition_t _journalsCondition;
@ -481,6 +479,12 @@ struct TRI_document_collection_t {
// function that is called to garbage-collect the collection's indexes
int (*cleanupIndexes)(struct TRI_document_collection_t*);
TRI_document_collection_t () {
}
~TRI_document_collection_t () {
}
};
// -----------------------------------------------------------------------------

View File

@ -48,7 +48,7 @@
static TRI_edge_index_t* FindEdgesIndex (
TRI_document_collection_t* const document) {
if (document->base._info._type == TRI_COL_TYPE_EDGE) {
if (document->_info._type == TRI_COL_TYPE_EDGE) {
size_t const n = document->_allIndexes._length;
for (size_t i = 0; i < n; ++i) {

View File

@ -341,7 +341,7 @@ bool TRI_RemoveIndexFile (TRI_document_collection_t* collection, TRI_index_t* id
return false;
}
filename = TRI_Concatenate2File(collection->base._directory, name);
filename = TRI_Concatenate2File(collection->_directory, name);
if (filename == NULL) {
TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
@ -391,12 +391,12 @@ int TRI_SaveIndex (TRI_document_collection_t* document,
// construct filename
number = TRI_StringUInt64(idx->_iid);
name = TRI_Concatenate3String("index-", number, ".json");
filename = TRI_Concatenate2File(document->base._directory, name);
filename = TRI_Concatenate2File(document->_directory, name);
TRI_FreeString(TRI_CORE_MEM_ZONE, name);
TRI_FreeString(TRI_CORE_MEM_ZONE, number);
vocbase = document->base._vocbase;
vocbase = document->_vocbase;
// and save
ok = TRI_SaveJson(filename, json, vocbase->_settings.forceSyncProperties);
@ -412,8 +412,8 @@ int TRI_SaveIndex (TRI_document_collection_t* document,
// it is safe to use _name as we hold a read-lock on the collection status
TRI_LogCreateIndexReplication(vocbase,
document->base._info._cid,
document->base._info._name,
document->_info._cid,
document->_info._name,
idx->_iid,
json,
generatingServer);

View File

@ -318,8 +318,8 @@ static TRI_vector_t GetRangeDatafiles (TRI_document_collection_t* document,
TRI_READ_LOCK_DATAFILES_DOC_COLLECTION(document);
IterateDatafiles(&document->base._datafiles, &datafiles, dataMin, dataMax, false);
IterateDatafiles(&document->base._journals, &datafiles, dataMin, dataMax, true);
IterateDatafiles(&document->_datafiles, &datafiles, dataMin, dataMax, false);
IterateDatafiles(&document->_journals, &datafiles, dataMin, dataMax, true);
TRI_READ_UNLOCK_DATAFILES_DOC_COLLECTION(document);
@ -607,7 +607,7 @@ static int DumpCollection (TRI_replication_dump_t* dump,
bool ignoreMarkers;
LOG_TRACE("dumping collection %llu, tick range %llu - %llu, chunk size %llu",
(unsigned long long) document->base._info._cid,
(unsigned long long) document->_info._cid,
(unsigned long long) dataMin,
(unsigned long long) dataMax,
(unsigned long long) chunkSize);
@ -805,7 +805,7 @@ static int DumpLog (TRI_replication_dump_t* dump,
bool bufferFull;
LOG_TRACE("dumping collection %llu, tick range %llu - %llu, chunk size %llu",
(unsigned long long) document->base._info._cid,
(unsigned long long) document->_info._cid,
(unsigned long long) dataMin,
(unsigned long long) dataMax,
(unsigned long long) chunkSize);

View File

@ -245,7 +245,7 @@ static size_t BufferSize = 256;
static TRI_replication_operation_e TranslateDocumentOperation (TRI_voc_document_operation_e type,
TRI_document_collection_t const* document) {
if (type == TRI_VOC_DOCUMENT_OPERATION_INSERT || type == TRI_VOC_DOCUMENT_OPERATION_UPDATE) {
const bool isEdgeCollection = (document->base._info._type == TRI_COL_TYPE_EDGE);
const bool isEdgeCollection = (document->_info._type == TRI_COL_TYPE_EDGE);
return isEdgeCollection ? MARKER_EDGE : MARKER_DOCUMENT;
}
@ -753,7 +753,7 @@ static bool StringifyDocumentOperation (TRI_replication_logger_t* logger,
APPEND_CHAR(buffer, '{');
if (withCid) {
if (! StringifyCollection(buffer, document->base._info._cid, document->base._info._name)) {
if (! StringifyCollection(buffer, document->_info._cid, document->_info._name)) {
return false;
}
APPEND_CHAR(buffer, ',');
@ -878,10 +878,10 @@ static bool StringifyMetaTransaction (TRI_string_buffer_t* buffer,
}
APPEND_STRING(buffer, "{\"cid\":\"");
APPEND_UINT64(buffer, (uint64_t) document->base._info._cid);
APPEND_UINT64(buffer, (uint64_t) document->_info._cid);
APPEND_STRING(buffer, "\",\"name\":\"");
// no escaping needed for collection name
APPEND_STRING(buffer, document->base._info._name);
APPEND_STRING(buffer, document->_info._name);
APPEND_STRING(buffer, "\",\"operations\":");
APPEND_UINT64(buffer, (uint64_t) trxCollection->_operations->size());
APPEND_CHAR(buffer, '}');
@ -1103,7 +1103,7 @@ static int GetStateInactive (TRI_replication_logger_t* logger,
TRI_document_collection_t* document = col->_collection;
dst->_lastLogTick = document->base._info._revision;
dst->_lastLogTick = document->_info._revision;
dst->_totalEvents = 0;
dst->_active = false;
@ -2183,7 +2183,7 @@ int TRI_LogDocumentReplication (TRI_vocbase_t* vocbase,
char* name;
int res;
name = document->base._info._name;
name = document->_info._name;
if (TRI_ExcludeCollectionReplication(name)) {
return TRI_ERROR_NO_ERROR;

View File

@ -174,7 +174,7 @@ static void FreeOperations (TRI_transaction_t* trx) {
}
if (mustRollback) {
document->base._info._revision = trxCollection->_originalRevision;
document->_info._revision = trxCollection->_originalRevision;
}
delete trxCollection->_operations;
@ -418,7 +418,7 @@ static int UseCollections (TRI_transaction_t* trx,
}
// store the waitForSync property
trxCollection->_waitForSync = trxCollection->_collection->_collection->base._info._waitForSync;
trxCollection->_waitForSync = trxCollection->_collection->_collection->_info._waitForSync;
}
TRI_ASSERT(trxCollection->_collection != nullptr);
@ -434,7 +434,7 @@ static int UseCollections (TRI_transaction_t* trx,
if (trxCollection->_accessType == TRI_TRANSACTION_WRITE && trxCollection->_originalRevision == 0) {
// store original revision at transaction start
trxCollection->_originalRevision = trxCollection->_collection->_collection->base._info._revision;
trxCollection->_originalRevision = trxCollection->_collection->_collection->_info._revision;
}
shouldLock = ((trx->_hints & (TRI_transaction_hint_t) TRI_TRANSACTION_HINT_LOCK_ENTIRELY) != 0);

View File

@ -178,7 +178,7 @@ static TRI_shape_aid_t FindOrCreateAttributeByName (TRI_shaper_t* shaper,
TRI_document_collection_t* document = s->_collection;
triagens::wal::AttributeMarker marker(document->base._vocbase->_id, document->base._info._cid, aid, std::string(name));
triagens::wal::AttributeMarker marker(document->_vocbase->_id, document->_info._cid, aid, std::string(name));
// lock the index and check that the element is still missing
{
@ -323,7 +323,7 @@ static TRI_shape_t const* FindShape (TRI_shaper_t* shaper,
shape->_sid = sid;
TRI_document_collection_t* document = s->_collection;
triagens::wal::ShapeMarker marker(document->base._vocbase->_id, document->base._info._cid, shape);
triagens::wal::ShapeMarker marker(document->_vocbase->_id, document->_info._cid, shape);
TRI_shape_t const* result;
@ -722,7 +722,7 @@ int TRI_InsertAttributeVocShaper (TRI_shaper_t* s,
void* f = TRI_InsertKeyAssociativeSynced(&shaper->_attributeNames, p, m, false);
if (f != nullptr) {
char const* name = shaper->_collection->base._info._name;
char const* name = shaper->_collection->_info._name;
#ifdef TRI_ENABLE_MAINTAINER_MODE
LOG_WARNING("found duplicate attribute name '%s' in collection '%s'", p, name);
@ -734,7 +734,7 @@ int TRI_InsertAttributeVocShaper (TRI_shaper_t* s,
f = TRI_InsertKeyAssociativeSynced(&shaper->_attributeIds, &m->_aid, m, false);
if (f != nullptr) {
char const* name = shaper->_collection->base._info._name;
char const* name = shaper->_collection->_info._name;
#ifdef TRI_ENABLE_MAINTAINER_MODE
LOG_WARNING("found duplicate attribute id '%llu' in collection '%s'", (unsigned long long) m->_aid, name);

View File

@ -597,7 +597,7 @@ static TRI_vocbase_col_t* CreateCollection (TRI_vocbase_t* vocbase,
return NULL;
}
col = &document->base;
col = document;
// add collection container
collection = AddCollection(vocbase,
@ -626,7 +626,7 @@ static TRI_vocbase_col_t* CreateCollection (TRI_vocbase_t* vocbase,
collection->_status = TRI_VOC_COL_STATUS_LOADED;
collection->_collection = document;
TRI_CopyString(collection->_path,
document->base._directory,
document->_directory,
sizeof(collection->_path) - 1);
json = TRI_CreateJsonCollectionInfo(&col->_info);
@ -725,7 +725,7 @@ static int RenameCollection (TRI_vocbase_t* vocbase,
collection->_status == TRI_VOC_COL_STATUS_UNLOADING ||
collection->_status == TRI_VOC_COL_STATUS_LOADING) {
res = TRI_RenameCollection(&collection->_collection->base, newName);
res = TRI_RenameCollection(collection->_collection, newName);
if (res != TRI_ERROR_NO_ERROR) {
TRI_WRITE_UNLOCK_COLLECTIONS_VOCBASE(vocbase);
@ -1115,7 +1115,7 @@ static int LoadCollectionVocBase (TRI_vocbase_t* vocbase,
collection->_collection = document;
collection->_status = TRI_VOC_COL_STATUS_LOADED;
TRI_CopyString(collection->_path, document->base._directory, sizeof(collection->_path) - 1);
TRI_CopyString(collection->_path, document->_directory, sizeof(collection->_path) - 1);
// release the WRITE lock and try again
TRI_WRITE_UNLOCK_STATUS_VOCBASE_COL(collection);
@ -1969,7 +1969,7 @@ int TRI_UnloadCollectionVocBase (TRI_vocbase_t* vocbase,
// added callback for unload
TRI_CreateBarrierUnloadCollection(&collection->_collection->_barrierList,
&collection->_collection->base,
collection->_collection,
UnloadCollectionCallback,
collection);
@ -2100,9 +2100,9 @@ int TRI_DropCollectionVocBase (TRI_vocbase_t* vocbase,
// .............................................................................
else if (collection->_status == TRI_VOC_COL_STATUS_LOADED || collection->_status == TRI_VOC_COL_STATUS_UNLOADING) {
collection->_collection->base._info._deleted = true;
collection->_collection->_info._deleted = true;
res = TRI_UpdateCollectionInfo(vocbase, &collection->_collection->base, nullptr);
res = TRI_UpdateCollectionInfo(vocbase, collection->_collection, nullptr);
if (res != TRI_ERROR_NO_ERROR) {
TRI_WRITE_UNLOCK_STATUS_VOCBASE_COL(collection);
@ -2122,7 +2122,7 @@ int TRI_DropCollectionVocBase (TRI_vocbase_t* vocbase,
// added callback for dropping
TRI_CreateBarrierDropCollection(&collection->_collection->_barrierList,
&collection->_collection->base,
collection->_collection,
DropCollectionCallback,
collection);

View File

@ -490,13 +490,13 @@ int CollectorThread::processCollectionOperations (CollectorCache* cache) {
// try to acquire the write lock on the collection
if (! TRI_TRY_WRITE_LOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION(document)) {
LOG_TRACE("wal collector couldn't acquire write lock for collection '%llu'", (unsigned long long) document->base._info._cid);
LOG_TRACE("wal collector couldn't acquire write lock for collection '%llu'", (unsigned long long) document->_info._cid);
return TRI_ERROR_LOCK_TIMEOUT;
}
// now we have the write lock on the collection
LOG_TRACE("wal collector processing operations for collection '%s'", document->base._info._name);
LOG_TRACE("wal collector processing operations for collection '%s'", document->_info._name);
for (auto it = cache->operations->begin(); it != cache->operations->end(); ++it) {
auto operation = (*it);
@ -582,7 +582,7 @@ int CollectorThread::processCollectionOperations (CollectorCache* cache) {
}
// finally update all datafile statistics
LOG_TRACE("updating datafile statistics for collection '%s'", document->base._info._name);
LOG_TRACE("updating datafile statistics for collection '%s'", document->_info._name);
updateDatafileStatistics(document, cache);
// TODO: the following assertion is only true in a running system
@ -595,7 +595,7 @@ int CollectorThread::processCollectionOperations (CollectorCache* cache) {
TRI_WRITE_UNLOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION(document);
LOG_TRACE("wal collector successfully processed operations for collection '%s'", document->base._info._name);
LOG_TRACE("wal collector successfully processed operations for collection '%s'", document->_info._name);
return TRI_ERROR_NO_ERROR;
}
@ -732,7 +732,7 @@ int CollectorThread::transferMarkers (Logfile* logfile,
TRI_vocbase_col_t* collection = collectionGuard.collection();
TRI_ASSERT(collection != nullptr);
if (collection->_collection->base._info._isVolatile) {
if (collection->_collection->_info._isVolatile) {
// don't need to collect data for volatile collections
return TRI_ERROR_NO_ERROR;
}
@ -781,7 +781,7 @@ int CollectorThread::executeTransferMarkers (TRI_document_collection_t* document
CollectorCache* cache,
OperationsType const& operations) {
TRI_voc_tick_t const minTransferTick = document->base._tickMax;
TRI_voc_tick_t const minTransferTick = document->_tickMax;
for (auto it2 = operations.begin(); it2 != operations.end(); ++it2) {
TRI_df_marker_t const* source = (*it2);
@ -1039,7 +1039,7 @@ int CollectorThread::updateDatafileStatistics (TRI_document_collection_t* docume
////////////////////////////////////////////////////////////////////////////////
int CollectorThread::syncDatafileCollection (TRI_document_collection_t* document) {
TRI_collection_t* collection = &document->base;
TRI_collection_t* collection = document;
int res = TRI_ERROR_NO_ERROR;
TRI_LOCK_JOURNAL_ENTRIES_DOC_COLLECTION(document);
@ -1088,7 +1088,7 @@ char* CollectorThread::nextFreeMarkerPosition (TRI_document_collection_t* docume
TRI_df_marker_type_e type,
TRI_voc_size_t size,
CollectorCache* cache) {
TRI_collection_t* collection = &document->base;
TRI_collection_t* collection = document;
size = TRI_DF_ALIGN_BLOCK(size);
char* dst = nullptr;
@ -1096,7 +1096,7 @@ char* CollectorThread::nextFreeMarkerPosition (TRI_document_collection_t* docume
TRI_LOCK_JOURNAL_ENTRIES_DOC_COLLECTION(document);
// start with configured journal size
TRI_voc_size_t targetSize = document->base._info._maximalSize;
TRI_voc_size_t targetSize = document->_info._maximalSize;
while (collection->_state == TRI_COL_STATE_WRITE) {
size_t const n = collection->_journals._length;
@ -1195,8 +1195,8 @@ void CollectorThread::finishMarker (char const* walPosition,
crc = TRI_BlockCrc32(crc, const_cast<char*>(datafilePosition), marker->_size);
marker->_crc = TRI_FinalCrc32(crc);
TRI_ASSERT(document->base._tickMax < tick);
document->base._tickMax = tick;
TRI_ASSERT(document->_tickMax < tick);
document->_tickMax = tick;
cache->operations->emplace_back(CollectorOperation(datafilePosition, walPosition, cache->lastFid));
}