1
0
Fork 0

added restrictions for export API

This commit is contained in:
Jan Steemann 2015-04-17 00:24:43 +02:00
parent d9add3a493
commit 6e23c5330f
8 changed files with 450 additions and 30 deletions

View File

@ -50,6 +50,61 @@ describe ArangoDB do
doc.parsed_response['code'].should eq(404)
doc.parsed_response['errorNum'].should eq(1600)
end
it "returns an error if restrict has an invalid type" do
cmd = api + "?collection=whatever"
doc = ArangoDB.log_post("#{prefix}-missing-restrict-type", cmd, :body => "{ \"restrict\" : \"foo\" }")
doc.code.should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(17)
end
it "returns an error if restrict type is missing" do
cmd = api + "?collection=whatever"
doc = ArangoDB.log_post("#{prefix}-missing-restrict-type", cmd, :body => "{ \"restrict\" : { \"fields\" : [ ] } }")
doc.code.should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(10)
end
it "returns an error if restrict type is invalid" do
cmd = api + "?collection=whatever"
doc = ArangoDB.log_post("#{prefix}-invalid-restrict-type", cmd, :body => "{ \"restrict\" : { \"type\" : \"foo\", \"fields\" : [ ] } }")
doc.code.should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(10)
end
it "returns an error if restrict fields are missing" do
cmd = api + "?collection=whatever"
doc = ArangoDB.log_post("#{prefix}-missing-restrict-fields", cmd, :body => "{ \"restrict\" : { \"type\" : \"include\" } }")
doc.code.should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(10)
end
it "returns an error if restrict fields are invalid" do
cmd = api + "?collection=whatever"
doc = ArangoDB.log_post("#{prefix}-invalid-restrict-fields", cmd, :body => "{ \"restrict\" : { \"type\" : \"include\", \"fields\" : \"foo\" } }")
doc.code.should eq(400)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(10)
end
end
@ -494,7 +549,7 @@ describe ArangoDB do
doc.parsed_response['errorNum'].should eq(1600)
doc.parsed_response['code'].should eq(404)
end
it "consumes a cursor, while compaction is running" do
cmd = api + "?collection=#{@cn}"
body = "{ \"count\" : true, \"batchSize\" : 700, \"flush\" : true }"
@ -565,5 +620,186 @@ describe ArangoDB do
end
################################################################################
## using restrictions
################################################################################
context "handling restrictions:" do
before do
@cn = "users"
ArangoDB.drop_collection(@cn)
@cid = ArangoDB.create_collection(@cn, false)
ArangoDB.post("/_admin/execute", :body => "var db = require('internal').db, c = db.#{@cn}; for (var i = 0; i < 2000; ++i) { c.save({ a: i, b: i, c: i }); }")
end
after do
ArangoDB.drop_collection(@cn)
end
it "includes a single attribute" do
cmd = api + "?collection=#{@cn}"
body = "{ \"count\" : true, \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"include\", \"fields\" : [ \"b\" ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-include-single", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['count'].should eq(2000)
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(1)
oneDoc.should have_key('b')
}
end
it "includes a few attributes" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"include\", \"fields\" : [ \"b\", \"_id\", \"_rev\", \"a\" ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-include-few", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(4)
oneDoc.should have_key('a')
oneDoc.should have_key('b')
oneDoc.should_not have_key('c')
oneDoc.should_not have_key('_key')
oneDoc.should have_key('_id')
oneDoc.should have_key('_rev')
}
end
it "includes non-existing attributes" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"include\", \"fields\" : [ \"c\", \"xxxx\", \"A\" ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-include-non-existing", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(1)
oneDoc.should have_key('c')
}
end
it "includes no attributes" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"include\", \"fields\" : [ ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-include-none", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(0)
}
end
it "excludes a single attribute" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"exclude\", \"fields\" : [ \"b\" ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-exclude-single", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(5)
oneDoc.should_not have_key('b')
oneDoc.should have_key('a')
oneDoc.should have_key('c')
oneDoc.should have_key('_id')
oneDoc.should have_key('_key')
oneDoc.should have_key('_rev')
}
end
it "excludes a few attributes" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"exclude\", \"fields\" : [ \"b\", \"_id\", \"_rev\", \"a\" ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-exclude-few", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(2)
oneDoc.should have_key('c')
oneDoc.should have_key('_key')
}
end
it "excludes non-existing attributes" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"exclude\", \"fields\" : [ \"c\", \"xxxx\", \"A\" ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-exclude-non-existing", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(5)
oneDoc.should_not have_key('c')
}
end
it "excludes no attributes" do
cmd = api + "?collection=#{@cn}"
body = "{ \"batchSize\" : 2000, \"restrict\" : { \"type\" : \"exclude\", \"fields\" : [ ] }, \"flush\" : true }"
doc = ArangoDB.log_post("#{prefix}-exclude-none", cmd, :body => body)
doc.code.should eq(201)
doc.headers['content-type'].should eq("application/json; charset=utf-8")
doc.parsed_response['error'].should eq(false)
doc.parsed_response['code'].should eq(201)
doc.parsed_response['id'].should be_nil
doc.parsed_response['hasMore'].should eq(false)
doc.parsed_response['result'].length.should eq(2000)
doc.parsed_response['result'].each{|oneDoc|
oneDoc.size.should eq(6)
}
end
end
end
end

View File

@ -48,7 +48,8 @@ using namespace triagens::rest;
////////////////////////////////////////////////////////////////////////////////
RestExportHandler::RestExportHandler (HttpRequest* request)
: RestVocbaseBaseHandler(request) {
: RestVocbaseBaseHandler(request),
_restrictions() {
}
@ -61,6 +62,13 @@ RestExportHandler::RestExportHandler (HttpRequest* request)
////////////////////////////////////////////////////////////////////////////////
HttpHandler::status_t RestExportHandler::execute () {
if (ServerState::instance()->isCoordinator()) {
generateError(HttpResponse::NOT_IMPLEMENTED,
TRI_ERROR_CLUSTER_UNSUPPORTED,
"'/_api/export' is not yet supported in a cluster");
return status_t(HANDLER_DONE);
}
// extract the sub-request type
HttpRequest::HttpRequestType type = _request->requestType();
@ -105,7 +113,7 @@ triagens::basics::Json RestExportHandler::buildOptions (TRI_json_t const* json)
options.set("batchSize", triagens::basics::Json(TRI_IsNumberJson(attribute) ? attribute->_value._number : 1000.0));
if (TRI_IsNumberJson(attribute) && static_cast<size_t>(attribute->_value._number) == 0) {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_TYPE_ERROR, "expecting non-zero value for <batchSize>");
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_TYPE_ERROR, "expecting non-zero value for 'batchSize'");
}
attribute = getAttribute("flush");
@ -115,6 +123,49 @@ triagens::basics::Json RestExportHandler::buildOptions (TRI_json_t const* json)
attribute = getAttribute("ttl");
options.set("ttl", triagens::basics::Json(TRI_IsNumberJson(attribute) ? attribute->_value._number : 30.0));
}
attribute = getAttribute("flushWait");
options.set("flushWait", triagens::basics::Json(TRI_IsNumberJson(attribute) ? attribute->_value._number : 10.0));
// handle "restrict" parameter
attribute = getAttribute("restrict");
if (attribute != nullptr) {
if (! TRI_IsObjectJson(attribute)) {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_TYPE_ERROR, "expecting object for 'restrict'");
}
// "restrict"."type"
auto type = TRI_LookupObjectJson(attribute, "type");
if (! TRI_IsStringJson(type)) {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_BAD_PARAMETER, "expecting string for 'restrict.type'");
}
std::string typeString = std::string(type->_value._string.data, type->_value._string.length - 1);
if (typeString == "include") {
_restrictions.type = CollectionExport::Restrictions::RESTRICTION_INCLUDE;
}
else if (typeString == "exclude") {
_restrictions.type = CollectionExport::Restrictions::RESTRICTION_EXCLUDE;
}
else {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_BAD_PARAMETER, "expecting either 'include' or 'exclude' for 'restrict.type'");
}
// "restrict"."fields"
auto fields = TRI_LookupObjectJson(attribute, "fields");
if (! TRI_IsArrayJson(fields)) {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_BAD_PARAMETER, "expecting array for 'restrict.fields'");
}
size_t const n = TRI_LengthArrayJson(fields);
for (size_t i = 0; i < n; ++i) {
auto name = TRI_LookupArrayJson(fields, i);
if (TRI_IsStringJson(name)) {
_restrictions.fields.emplace(std::string(name->_value._string.data, name->_value._string.length - 1));
}
}
}
return options;
}
@ -146,24 +197,31 @@ triagens::basics::Json RestExportHandler::buildOptions (TRI_json_t const* json)
/// object's *hasMore* attribute will be set to *true*, and the *id* attribute
/// of the result will contain a cursor id.
///
/// By default, only those documents from the collection will be returned that are
/// stored in the collection's datafiles. Documents that are present in the write-ahead
/// log (WAL) only will not be exported. To force an export of these documents, too,
/// there is a *flush* option. This will trigger a WAL flush so documents get copied
/// from the WAL to the collection datafiles.
///
/// The order in which the documents are returned is not specified.
///
/// The following attributes can be used inside the JSON request object:
/// By default, only those documents from the collection will be returned that are
/// stored in the collection's datafiles. Documents that are present in the write-ahead
/// log (WAL) at the time the export is run will not be exported.
///
/// To export these documents as well, the caller can issue a WAL flush request
/// before calling the export API or set the *flush* attribute. Setting the *flush*
/// option will trigger a WAL flush before the export so documents get copied from
/// the WAL to the collection datafiles.
///
/// The following attributes can be used inside the JSON request object to control
/// the export behavior:
///
/// - *flush*: if set to *true*, a WAL flush operation will be executed prior to the
/// export. The flush operation will ensure all documents have been copied from the
/// WAL to the collection's datafiles. There will be an additional wait time of up
/// to 10 seconds after the flush to allow the WAL collector to change adjust
/// document meta-data to point to the datafiles, too.
/// export. The flush operation will start copying documents from the WAL to the
/// collection's datafiles. There will be an additional wait time of up
/// to *flushWait* seconds after the flush to allow the WAL collector to change
/// the adjusted document meta-data to point into the datafiles, too.
/// The default value is *false* (i.e. no flush) so most recently inserted or updated
/// documents from the collection might be missing in the export.
///
/// - *flushWait*: maximum wait time in seconds after a flush operation. The default
/// value is 10. This option only has an effect when *flush* is set to *true*.
///
/// - *count*: boolean flag that indicates whether the number of documents
/// in the result set should be returned in the "count" attribute of the result (optional).
/// Calculating the "count" attribute might in the future have a performance
@ -179,6 +237,16 @@ triagens::basics::Json RestExportHandler::buildOptions (TRI_json_t const* json)
/// is useful to ensure garbage collection of cursors that are not fully fetched
/// by clients. If not set, a server-defined value will be used.
///
/// - *restrict*: an optional object containing an array of attribute names that will be
/// included or excluded when returning result documents. If specified, *fields* must
/// be an object and contain a *type* attribute which must be set to either *include*
/// or *exclude*. It must also contain a *fields* attribute containing an array of
/// attribute names to include or exclude. Matching of attribute names for inclusion
/// or exclusion will be done on the top level only. Specifying names of nested attributes
/// is not supported at the moment.
///
/// Not specifying *restrict* will by default return all attributes of each document.
///
/// If the result set can be created by the server, the server will respond with
/// *HTTP 201*. The body of the response will contain a JSON object with the
/// result set.
@ -214,12 +282,14 @@ triagens::basics::Json RestExportHandler::buildOptions (TRI_json_t const* json)
///
/// - *errorMessage*: a descriptive error message
///
/// Note: clients should always delete a cursor result as early as possible because a
/// lingering export cursor will prevent the underlying collection from being being
/// Clients should always delete an export cursor result as early as possible because a
/// lingering export cursor will prevent the underlying collection from being
/// compacted or unloaded. By default, unused cursors will be deleted automatically
/// after a server-defined idle time, and clients can adjust this idle time by setting
/// the *ttl* value.
///
/// Note: this API is currently not supported on cluster coordinators.
///
/// @RESTRETURNCODES
///
/// @RESTRETURNCODE{201}
@ -236,6 +306,10 @@ triagens::basics::Json RestExportHandler::buildOptions (TRI_json_t const* json)
/// @RESTRETURNCODE{405}
/// The server will respond with *HTTP 405* if an unsupported HTTP method is used.
///
/// @RESTRETURNCODE{501}
/// The server will respond with *HTTP 501* if this API is called on a cluster
/// coordinator.
///
/// @endDocuBlock
////////////////////////////////////////////////////////////////////////////////
@ -262,6 +336,10 @@ void RestExportHandler::createCursor () {
try {
std::unique_ptr<TRI_json_t> json(parseJsonBody());
if (json.get() == nullptr) {
return;
}
triagens::basics::Json options;
@ -289,11 +367,13 @@ void RestExportHandler::createCursor () {
THROW_ARANGO_EXCEPTION(res);
}
waitTime = 10 * 1000 * 1000; // wait at most 10 seconds for full logfile collection
double flushWait = triagens::basics::JsonHelper::getNumericValue<double>(options.json(), "flushWait", 10.0);
waitTime = static_cast<uint64_t>(flushWait * 1000 * 1000); // flushWait is specified in s, but we need ns
}
// this may throw!
std::unique_ptr<CollectionExport> collectionExport(new CollectionExport(_vocbase, name));
std::unique_ptr<CollectionExport> collectionExport(new CollectionExport(_vocbase, name, _restrictions));
collectionExport->run(waitTime);
{

View File

@ -32,6 +32,7 @@
#include "Basics/Common.h"
#include "Basics/Mutex.h"
#include "Utils/CollectionExport.h"
#include "RestHandler/RestVocbaseBaseHandler.h"
// -----------------------------------------------------------------------------
@ -41,7 +42,6 @@
namespace triagens {
namespace arango {
class Cursor;
////////////////////////////////////////////////////////////////////////////////
/// @brief document request handler
@ -109,6 +109,12 @@ namespace triagens {
private:
////////////////////////////////////////////////////////////////////////////////
/// @brief restrictions for export
////////////////////////////////////////////////////////////////////////////////
CollectionExport::Restrictions _restrictions;
};
}
}

View File

@ -364,7 +364,7 @@ int RestImportHandler::handleSingleDocument (RestImportTransaction& trx,
///
/// @RESTBODYPARAM{documents,string,required}
/// The body must either be a JSON-encoded array of objects or a string with
/// multiple JSON object separated by newlines.
/// multiple JSON objects separated by newlines.
///
/// @RESTQUERYPARAMETERS
///
@ -451,6 +451,8 @@ int RestImportHandler::handleSingleDocument (RestImportTransaction& trx,
/// contain a `details` attribute which is an array with more detailed
/// information about which documents could not be inserted.
///
/// Note: this API is currently not supported on cluster coordinators.
///
/// @RESTRETURNCODES
///
/// @RESTRETURNCODE{201}
@ -473,6 +475,10 @@ int RestImportHandler::handleSingleDocument (RestImportTransaction& trx,
/// is returned if the server cannot auto-generate a document key (out of keys
/// error) for a document with no user-defined key.
///
/// @RESTRETURNCODE{501}
/// The server will respond with *HTTP 501* if this API is called on a cluster
/// coordinator.
///
/// @EXAMPLES
///
/// Importing documents with heterogenous attributes from a JSON array:
@ -1001,6 +1007,8 @@ bool RestImportHandler::createFromJson (string const& type) {
/// contain a `details` attribute which is an array with more detailed
/// information about which documents could not be inserted.
///
/// Note: this API is currently not supported on cluster coordinators.
///
/// @RESTRETURNCODES
///
/// @RESTRETURNCODE{201}
@ -1023,6 +1031,10 @@ bool RestImportHandler::createFromJson (string const& type) {
/// is returned if the server cannot auto-generate a document key (out of keys
/// error) for a document with no user-defined key.
///
/// @RESTRETURNCODE{501}
/// The server will respond with *HTTP 501* if this API is called on a cluster
/// coordinator.
///
/// @EXAMPLES
///
/// Importing two documents, with attributes `_key`, `value1` and `value2` each. One

View File

@ -31,6 +31,7 @@
#include "Basics/JsonHelper.h"
#include "Utils/CollectionGuard.h"
#include "Utils/CollectionReadLocker.h"
#include "Utils/transactions.h"
#include "VocBase/barrier.h"
#include "VocBase/compactor.h"
#include "VocBase/vocbase.h"
@ -46,15 +47,19 @@ using namespace triagens::arango;
// -----------------------------------------------------------------------------
CollectionExport::CollectionExport (TRI_vocbase_t* vocbase,
std::string const& name)
std::string const& name,
Restrictions const& restrictions)
: _guard(nullptr),
_document(nullptr),
_barrier(nullptr),
_name(name),
_resolver(vocbase),
_restrictions(restrictions),
_documents(nullptr) {
// prevent the collection from being unloaded while the export is ongoing
// this may throw
_guard = new triagens::arango::CollectionGuard(vocbase, name.c_str(), false);
_guard = new triagens::arango::CollectionGuard(vocbase, _name.c_str(), false);
_document = _guard->collection()->_collection;
TRI_ASSERT(_document != nullptr);
@ -75,15 +80,13 @@ CollectionExport::~CollectionExport () {
// -----------------------------------------------------------------------------
void CollectionExport::run (uint64_t maxWaitTime) {
// create a fake transaction for iterating over the collection
TransactionBase trx(true);
// try to acquire the exclusive lock on the compaction
while (! TRI_CheckAndLockCompactorVocBase(_document->_vocbase)) {
// didn't get it. try again...
usleep(5000);
}
// create a barrier under the compaction lock
_barrier = TRI_CreateBarrierElement(&_document->_barrierList);
// release the lock
@ -112,13 +115,18 @@ void CollectionExport::run (uint64_t maxWaitTime) {
}
}
// RAII read-lock
{
triagens::arango::CollectionReadLocker lock(_document, true);
SingleCollectionReadOnlyTransaction trx(new StandaloneTransactionContext(), _document->_vocbase, _name);
int res = trx.begin();
if (res != TRI_ERROR_NO_ERROR) {
THROW_ARANGO_EXCEPTION(res);
}
size_t const n = _document->_primaryIndex._nrAlloc;
_documents->reserve(n);
_documents->reserve(_document->_primaryIndex._nrUsed);
for (size_t i = 0; i < n; ++i) {
auto ptr = _document->_primaryIndex._table[i];
@ -132,6 +140,8 @@ void CollectionExport::run (uint64_t maxWaitTime) {
}
}
}
trx.finish(res);
}
}

View File

@ -51,13 +51,32 @@ namespace triagens {
friend class ExportCursor;
public:
struct Restrictions {
enum Type {
RESTRICTION_NONE,
RESTRICTION_INCLUDE,
RESTRICTION_EXCLUDE
};
Restrictions ()
: fields(),
type(RESTRICTION_NONE) {
}
std::unordered_set<std::string> fields;
Type type;
};
public:
CollectionExport (CollectionExport const&) = delete;
CollectionExport& operator= (CollectionExport const&) = delete;
CollectionExport (TRI_vocbase_s*,
std::string const&);
std::string const&,
Restrictions const&);
~CollectionExport ();
@ -78,7 +97,9 @@ namespace triagens {
triagens::arango::CollectionGuard* _guard;
struct TRI_document_collection_t* _document;
struct TRI_barrier_s* _barrier;
std::string const _name;
triagens::arango::CollectionNameResolver _resolver;
Restrictions _restrictions;
std::vector<void const*>* _documents;
};

View File

@ -276,6 +276,7 @@ void ExportCursor::dump (triagens::basics::StringBuffer& buffer) {
TRI_ASSERT(_ex != nullptr);
TRI_shaper_t* shaper = _ex->_document->getShaper();
auto const restrictionType = _ex->_restrictions.type;
buffer.appendText("\"result\":[");
@ -303,6 +304,7 @@ void ExportCursor::dump (triagens::basics::StringBuffer& buffer) {
std::string id(_ex->_resolver.getCollectionName(_ex->_document->_info._cid));
id.push_back('/');
id.append(key);
json(TRI_VOC_ATTRIBUTE_ID, triagens::basics::Json(id));
json(TRI_VOC_ATTRIBUTE_REV, triagens::basics::Json(std::to_string(TRI_EXTRACT_MARKER_RID(marker))));
json(TRI_VOC_ATTRIBUTE_KEY, triagens::basics::Json(key));
@ -321,6 +323,58 @@ void ExportCursor::dump (triagens::basics::StringBuffer& buffer) {
json(TRI_VOC_ATTRIBUTE_TO, triagens::basics::Json(to));
}
if (restrictionType == CollectionExport::Restrictions::RESTRICTION_INCLUDE ||
restrictionType == CollectionExport::Restrictions::RESTRICTION_EXCLUDE) {
// only include the specified fields
// for this we'll modify the JSON that we already have, in place
// we'll scan through the JSON attributs from left to right and
// keep all those that we want to keep. we'll overwrite existing
// other values in the JSON
TRI_json_t* obj = json.json();
TRI_ASSERT(TRI_IsObjectJson(obj));
size_t const n = obj->_value._objects._length;
size_t j = 0;
for (size_t i = 0; i < n; i += 2) {
auto key = static_cast<TRI_json_t const*>(TRI_AtVector(&obj->_value._objects, i));
if (! TRI_IsStringJson(key)) {
continue;
}
bool const keyContainedInRestrictions = (_ex->_restrictions.fields.find(key->_value._string.data) != _ex->_restrictions.fields.end());
if ((restrictionType == CollectionExport::Restrictions::RESTRICTION_INCLUDE && keyContainedInRestrictions) ||
(restrictionType == CollectionExport::Restrictions::RESTRICTION_EXCLUDE && ! keyContainedInRestrictions)) {
// include the field
if (i != j) {
// steal the key and the value
void* src = TRI_AddressVector(&obj->_value._objects, i);
void* dst = TRI_AddressVector(&obj->_value._objects, j);
memcpy(dst, src, 2 * sizeof(TRI_json_t));
}
j += 2;
}
else {
// do not include the field
// key
auto src = static_cast<TRI_json_t*>(TRI_AddressVector(&obj->_value._objects, i));
TRI_DestroyJson(TRI_UNKNOWN_MEM_ZONE, src);
// value
TRI_DestroyJson(TRI_UNKNOWN_MEM_ZONE, src + 1);
}
}
// finally adjust the length of the patched JSON so the NULL fields at
// the end will not be dumped
obj->_value._objects._length = j;
}
else {
// no restrictions
TRI_ASSERT(restrictionType == CollectionExport::Restrictions::RESTRICTION_NONE);
}
int res = TRI_StringifyJson(buffer.stringBuffer(), json.json());
if (res != TRI_ERROR_NO_ERROR) {

View File

@ -204,6 +204,7 @@ HttpResponse::HttpResponseCode HttpResponse::responseCode (int code) {
case TRI_ERROR_ARANGO_DOCUMENT_TYPE_INVALID:
case TRI_ERROR_CLUSTER_MUST_NOT_CHANGE_SHARDING_ATTRIBUTES:
case TRI_ERROR_CLUSTER_MUST_NOT_SPECIFY_KEY:
case TRI_ERROR_TYPE_ERROR:
return BAD;
case TRI_ERROR_ARANGO_READ_ONLY: