mirror of https://gitee.com/bigwinds/arangodb
Bug fix/permission tests (#8890)
This commit is contained in:
parent
4837c7b864
commit
f3a2eaf6d9
|
@ -266,6 +266,7 @@ void V8DealerFeature::validateOptions(std::shared_ptr<ProgramOptions> options) {
|
||||||
ctx->normalizePath(_appPath, "javascript.app-path", false);
|
ctx->normalizePath(_appPath, "javascript.app-path", false);
|
||||||
v8security->addToInternalWhitelist(_appPath, FSAccessType::READ);
|
v8security->addToInternalWhitelist(_appPath, FSAccessType::READ);
|
||||||
v8security->addToInternalWhitelist(_appPath, FSAccessType::WRITE);
|
v8security->addToInternalWhitelist(_appPath, FSAccessType::WRITE);
|
||||||
|
v8security->dumpAccessLists();
|
||||||
|
|
||||||
// use a minimum of 1 second for GC
|
// use a minimum of 1 second for GC
|
||||||
if (_gcFrequency < 1) {
|
if (_gcFrequency < 1) {
|
||||||
|
|
|
@ -1067,6 +1067,8 @@ void V8ShellFeature::initGlobals() {
|
||||||
v8security->addToInternalWhitelist(FileUtils::currentDirectory().result(), FSAccessType::READ);
|
v8security->addToInternalWhitelist(FileUtils::currentDirectory().result(), FSAccessType::READ);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
v8security->dumpAccessLists();
|
||||||
|
|
||||||
// we take the last entry in _startupDirectory as global path;
|
// we take the last entry in _startupDirectory as global path;
|
||||||
// all the other entries are only used for the modules
|
// all the other entries are only used for the modules
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,6 @@ const RESET = require('internal').COLORS.COLOR_RESET;
|
||||||
|
|
||||||
const functionsDocumentation = {
|
const functionsDocumentation = {
|
||||||
'arangosh': 'arangosh exit codes tests',
|
'arangosh': 'arangosh exit codes tests',
|
||||||
'permissions': 'arangosh javascript access permissions'
|
|
||||||
};
|
};
|
||||||
const optionsDocumentation = [
|
const optionsDocumentation = [
|
||||||
' - `skipShebang`: if set, the shebang tests are skipped.'
|
' - `skipShebang`: if set, the shebang tests are skipped.'
|
||||||
|
@ -56,7 +55,6 @@ const optionsDocumentation = [
|
||||||
|
|
||||||
const testPaths = {
|
const testPaths = {
|
||||||
'arangosh': [],
|
'arangosh': [],
|
||||||
'permissions': [tu.pathForTesting('client/permissions')]
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -250,47 +248,10 @@ function arangosh (options) {
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
function permissions(options) {
|
|
||||||
let res = {};
|
|
||||||
let filtered = {};
|
|
||||||
let rootDir = fs.join(fs.getTempPath(), 'permissions');
|
|
||||||
const tests = tu.scanTestPaths(testPaths.permissions);
|
|
||||||
|
|
||||||
fs.makeDirectoryRecursive(rootDir);
|
|
||||||
|
|
||||||
tests.forEach(function (f, i) {
|
|
||||||
if (tu.filterTestcaseByOptions(f, options, filtered)) {
|
|
||||||
let content = fs.read(f);
|
|
||||||
content = `(function(){ const getOptions = true; ${content}
|
|
||||||
}())`; // DO NOT JOIN WITH THE LINE ABOVE -- because of content could contain '//' at the very EOF
|
|
||||||
|
|
||||||
let testOptions = executeScript(content, true, f);
|
|
||||||
res[f] = tu.runInArangosh(options,
|
|
||||||
{
|
|
||||||
endpoint: 'tcp://127.0.0.1:8888',
|
|
||||||
rootDir: rootDir
|
|
||||||
},
|
|
||||||
f,
|
|
||||||
testOptions
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
if (options.extremeVerbosity) {
|
|
||||||
print('Skipped ' + f + ' because of ' + filtered.filter);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.setup = function (testFns, defaultFns, opts, fnDocs, optionsDoc, allTestPaths) {
|
exports.setup = function (testFns, defaultFns, opts, fnDocs, optionsDoc, allTestPaths) {
|
||||||
Object.assign(allTestPaths, testPaths);
|
Object.assign(allTestPaths, testPaths);
|
||||||
testFns['arangosh'] = arangosh;
|
testFns['arangosh'] = arangosh;
|
||||||
testFns['permissions'] = permissions;
|
|
||||||
|
|
||||||
defaultFns.push('arangosh');
|
defaultFns.push('arangosh');
|
||||||
defaultFns.push('permissions');
|
|
||||||
|
|
||||||
opts['skipShebang'] = false;
|
opts['skipShebang'] = false;
|
||||||
|
|
||||||
for (var attrname in functionsDocumentation) { fnDocs[attrname] = functionsDocumentation[attrname]; }
|
for (var attrname in functionsDocumentation) { fnDocs[attrname] = functionsDocumentation[attrname]; }
|
||||||
|
|
|
@ -0,0 +1,110 @@
|
||||||
|
/* jshint strict: false, sub: true */
|
||||||
|
/* global print */
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
// //////////////////////////////////////////////////////////////////////////////
|
||||||
|
// / DISCLAIMER
|
||||||
|
// /
|
||||||
|
// / Copyright 2016 ArangoDB GmbH, Cologne, Germany
|
||||||
|
// / Copyright 2014 triagens GmbH, Cologne, Germany
|
||||||
|
// /
|
||||||
|
// / Licensed under the Apache License, Version 2.0 (the "License")
|
||||||
|
// / you may not use this file except in compliance with the License.
|
||||||
|
// / You may obtain a copy of the License at
|
||||||
|
// /
|
||||||
|
// / http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
// /
|
||||||
|
// / Unless required by applicable law or agreed to in writing, software
|
||||||
|
// / distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// / See the License for the specific language governing permissions and
|
||||||
|
// / limitations under the License.
|
||||||
|
// /
|
||||||
|
// / Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||||
|
// /
|
||||||
|
// / @author Max Neunhoeffer
|
||||||
|
// //////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
const _ = require('lodash');
|
||||||
|
const time = require('internal').time;
|
||||||
|
const fs = require('fs');
|
||||||
|
const yaml = require('js-yaml');
|
||||||
|
|
||||||
|
const pu = require('@arangodb/process-utils');
|
||||||
|
const tu = require('@arangodb/test-utils');
|
||||||
|
|
||||||
|
const toArgv = require('internal').toArgv;
|
||||||
|
const executeScript = require('internal').executeScript;
|
||||||
|
const executeExternalAndWait = require('internal').executeExternalAndWait;
|
||||||
|
|
||||||
|
const platform = require('internal').platform;
|
||||||
|
|
||||||
|
// const BLUE = require('internal').COLORS.COLOR_BLUE;
|
||||||
|
const CYAN = require('internal').COLORS.COLOR_CYAN;
|
||||||
|
const GREEN = require('internal').COLORS.COLOR_GREEN;
|
||||||
|
const RED = require('internal').COLORS.COLOR_RED;
|
||||||
|
const RESET = require('internal').COLORS.COLOR_RESET;
|
||||||
|
// const YELLOW = require('internal').COLORS.COLOR_YELLOW;
|
||||||
|
|
||||||
|
const functionsDocumentation = {
|
||||||
|
'arangosh': 'arangosh exit codes tests',
|
||||||
|
'permissions': 'arangosh javascript access permissions'
|
||||||
|
};
|
||||||
|
const optionsDocumentation = [
|
||||||
|
' - `skipShebang`: if set, the shebang tests are skipped.'
|
||||||
|
];
|
||||||
|
|
||||||
|
const testPaths = {
|
||||||
|
'permissions': [tu.pathForTesting('client/permissions')]
|
||||||
|
};
|
||||||
|
|
||||||
|
function permissions(options) {
|
||||||
|
let res = {};
|
||||||
|
let filtered = {};
|
||||||
|
let rootDir = fs.join(fs.getTempPath(), 'permissions');
|
||||||
|
const tests = tu.scanTestPaths(testPaths.permissions);
|
||||||
|
|
||||||
|
fs.makeDirectoryRecursive(rootDir);
|
||||||
|
|
||||||
|
tests.forEach(function (f, i) {
|
||||||
|
if (tu.filterTestcaseByOptions(f, options, filtered)) {
|
||||||
|
let t = f.split(fs.pathSeparator);
|
||||||
|
let testName = t[t.length - 1].replace(/\.js/, '');
|
||||||
|
let instanceRoot = fs.join(rootDir, testName);
|
||||||
|
let testResultJson = fs.join(instanceRoot, 'testresult.json');;
|
||||||
|
process.env['RESULT'] = testResultJson;
|
||||||
|
fs.makeDirectoryRecursive(instanceRoot);
|
||||||
|
pu.cleanupDBDirectoriesAppend(instanceRoot);
|
||||||
|
|
||||||
|
let content = fs.read(f);
|
||||||
|
content = `(function(){ const getOptions = true; ${content}
|
||||||
|
}())`; // DO NOT JOIN WITH THE LINE ABOVE -- because of content could contain '//' at the very EOF
|
||||||
|
let testOptions = executeScript(content, true, f);
|
||||||
|
|
||||||
|
res[f] = tu.runInArangosh(options,
|
||||||
|
{
|
||||||
|
endpoint: 'tcp://127.0.0.1:8888',
|
||||||
|
rootDir: instanceRoot
|
||||||
|
},
|
||||||
|
f,
|
||||||
|
testOptions
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
if (options.extremeVerbosity) {
|
||||||
|
print('Skipped ' + f + ' because of ' + filtered.filter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.setup = function (testFns, defaultFns, opts, fnDocs, optionsDoc, allTestPaths) {
|
||||||
|
Object.assign(allTestPaths, testPaths);
|
||||||
|
testFns['permissions'] = permissions;
|
||||||
|
defaultFns.push('permissions');
|
||||||
|
opts['skipShebang'] = false;
|
||||||
|
|
||||||
|
for (var attrname in functionsDocumentation) { fnDocs[attrname] = functionsDocumentation[attrname]; }
|
||||||
|
for (var i = 0; i < optionsDocumentation.length; i++) { optionsDoc.push(optionsDocumentation[i]); }
|
||||||
|
};
|
|
@ -574,5 +574,7 @@ global.DEFINE_MODULE('fs', (function () {
|
||||||
delete global.FS_ZIP_FILE;
|
delete global.FS_ZIP_FILE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.escapePath = function (s) { return s.replace(/\\/g,'\\\\'); };
|
||||||
|
|
||||||
return exports;
|
return exports;
|
||||||
}()));
|
}()));
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
/* global print */
|
/* global print */
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
var runTest = require('jsunity').runTest,
|
var runTest = require('jsunity').runTest,
|
||||||
_ = require('lodash'),
|
_ = require('lodash'),
|
||||||
internal = require('internal');
|
internal = require('internal');
|
||||||
|
@ -66,7 +68,7 @@ function runJSUnityTests (tests) {
|
||||||
|
|
||||||
internal.wait(0); // force GC
|
internal.wait(0); // force GC
|
||||||
});
|
});
|
||||||
require('fs').write(instanceinfo.rootDir + '/testresult.json', JSON.stringify(allResults));
|
fs.write(fs.join(instanceinfo.rootDir, 'testresult.json'), JSON.stringify(allResults));
|
||||||
|
|
||||||
if (failed.length > 1) {
|
if (failed.length > 1) {
|
||||||
print('The following ' + failed.length + ' test files produced errors: ', failed.join(', '));
|
print('The following ' + failed.length + ' test files produced errors: ', failed.join(', '));
|
||||||
|
|
|
@ -278,8 +278,16 @@ void V8SecurityFeature::start() {
|
||||||
|
|
||||||
_filesWhitelistRegex =
|
_filesWhitelistRegex =
|
||||||
std::regex(_filesWhitelist, std::regex::nosubs | std::regex::ECMAScript);
|
std::regex(_filesWhitelist, std::regex::nosubs | std::regex::ECMAScript);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void V8SecurityFeature::dumpAccessLists() const {
|
||||||
|
LOG_TOPIC("2cafe", DEBUG, arangodb::Logger::SECURITY) << "files whitelisted by user:" << _filesWhitelist;
|
||||||
|
LOG_TOPIC("2bad4", DEBUG, arangodb::Logger::SECURITY) << "interal read whitelist:" << _readWhitelist;
|
||||||
|
LOG_TOPIC("beef2", DEBUG, arangodb::Logger::SECURITY) << "internal write whitelist:" << _writeWhitelist;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void V8SecurityFeature::addToInternalWhitelist(std::string const& inItem, FSAccessType type) {
|
void V8SecurityFeature::addToInternalWhitelist(std::string const& inItem, FSAccessType type) {
|
||||||
// This function is not efficient and we would not need the _readWhitelist
|
// This function is not efficient and we would not need the _readWhitelist
|
||||||
// to be persistent. But the persistence will help in debugging and
|
// to be persistent. But the persistence will help in debugging and
|
||||||
|
@ -294,7 +302,11 @@ void V8SecurityFeature::addToInternalWhitelist(std::string const& inItem, FSAcce
|
||||||
re = &_writeWhitelistRegex;
|
re = &_writeWhitelistRegex;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto item = canonicalpath(inItem + TRI_DIR_SEPARATOR_STR);
|
auto item = canonicalpath(inItem);
|
||||||
|
if ((item.length() > 0) &&
|
||||||
|
(item[item.length() - 1] != TRI_DIR_SEPARATOR_CHAR)) {
|
||||||
|
item += TRI_DIR_SEPARATOR_STR;
|
||||||
|
}
|
||||||
auto path = "^" + arangodb::basics::StringUtils::escapeRegexParams(item);
|
auto path = "^" + arangodb::basics::StringUtils::escapeRegexParams(item);
|
||||||
set->emplace(std::move(path));
|
set->emplace(std::move(path));
|
||||||
expression->clear();
|
expression->clear();
|
||||||
|
|
|
@ -45,6 +45,7 @@ class V8SecurityFeature final : public application_features::ApplicationFeature
|
||||||
void validateOptions(std::shared_ptr<options::ProgramOptions>) override final;
|
void validateOptions(std::shared_ptr<options::ProgramOptions>) override final;
|
||||||
void prepare() override final;
|
void prepare() override final;
|
||||||
void start() override final;
|
void start() override final;
|
||||||
|
void dumpAccessLists() const;
|
||||||
|
|
||||||
/// @brief tests if in the current security context it is allowed to
|
/// @brief tests if in the current security context it is allowed to
|
||||||
/// start, collect and kill external processes
|
/// start, collect and kill external processes
|
||||||
|
|
|
@ -335,6 +335,7 @@ void ArangoGlobalContext::normalizePath(std::vector<std::string>& paths,
|
||||||
void ArangoGlobalContext::normalizePath(std::string& path, char const* whichPath, bool fatal) {
|
void ArangoGlobalContext::normalizePath(std::string& path, char const* whichPath, bool fatal) {
|
||||||
StringUtils::rTrimInPlace(path, TRI_DIR_SEPARATOR_STR);
|
StringUtils::rTrimInPlace(path, TRI_DIR_SEPARATOR_STR);
|
||||||
|
|
||||||
|
arangodb::basics::FileUtils::normalizePath(path);
|
||||||
if (!arangodb::basics::FileUtils::exists(path)) {
|
if (!arangodb::basics::FileUtils::exists(path)) {
|
||||||
std::string directory = arangodb::basics::FileUtils::buildFilename(_runRoot, path);
|
std::string directory = arangodb::basics::FileUtils::buildFilename(_runRoot, path);
|
||||||
if (!arangodb::basics::FileUtils::exists(directory)) {
|
if (!arangodb::basics::FileUtils::exists(directory)) {
|
||||||
|
|
|
@ -45,10 +45,10 @@
|
||||||
|
|
||||||
#include "Basics/Common.h"
|
#include "Basics/Common.h"
|
||||||
#include "Basics/StringUtils.h"
|
#include "Basics/StringUtils.h"
|
||||||
|
#include "Basics/Utf8Helper.h"
|
||||||
#include "Basics/directories.h"
|
#include "Basics/directories.h"
|
||||||
#include "Basics/files.h"
|
#include "Basics/files.h"
|
||||||
#include "Basics/tri-strings.h"
|
#include "Basics/tri-strings.h"
|
||||||
#include "Basics/Utf8Helper.h"
|
|
||||||
#include "Logger/Logger.h"
|
#include "Logger/Logger.h"
|
||||||
|
|
||||||
#ifdef ARANGODB_ENABLE_MAINTAINER_MODE
|
#ifdef ARANGODB_ENABLE_MAINTAINER_MODE
|
||||||
|
@ -100,9 +100,10 @@ static void InvalidParameterHandler(const wchar_t* expression, // expression se
|
||||||
buf[sizeof(buf) - 1] = '\0';
|
buf[sizeof(buf) - 1] = '\0';
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
LOG_TOPIC("e4644", ERR, arangodb::Logger::FIXME) << "Invalid handle parameter passed"
|
LOG_TOPIC("e4644", ERR, arangodb::Logger::FIXME)
|
||||||
|
<< "Invalid handle parameter passed"
|
||||||
#ifdef ARANGODB_ENABLE_MAINTAINER_MODE
|
#ifdef ARANGODB_ENABLE_MAINTAINER_MODE
|
||||||
<< buf;
|
<< buf;
|
||||||
|
|
||||||
std::string bt;
|
std::string bt;
|
||||||
TRI_GetBacktrace(bt);
|
TRI_GetBacktrace(bt);
|
||||||
|
@ -199,7 +200,8 @@ int TRI_createFile(char const* filename, int openFlags, int modeFlags) {
|
||||||
icu::UnicodeString fn(filename);
|
icu::UnicodeString fn(filename);
|
||||||
|
|
||||||
fileHandle =
|
fileHandle =
|
||||||
CreateFileW(reinterpret_cast<const wchar_t*>(fn.getTerminatedBuffer()), GENERIC_READ | GENERIC_WRITE,
|
CreateFileW(reinterpret_cast<const wchar_t*>(fn.getTerminatedBuffer()),
|
||||||
|
GENERIC_READ | GENERIC_WRITE,
|
||||||
FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, NULL,
|
FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, NULL,
|
||||||
(openFlags & O_APPEND) ? OPEN_ALWAYS : CREATE_NEW, 0, NULL);
|
(openFlags & O_APPEND) ? OPEN_ALWAYS : CREATE_NEW, 0, NULL);
|
||||||
|
|
||||||
|
@ -245,8 +247,8 @@ int TRI_OPEN_WIN32(char const* filename, int openFlags) {
|
||||||
}
|
}
|
||||||
|
|
||||||
icu::UnicodeString fn(filename);
|
icu::UnicodeString fn(filename);
|
||||||
fileHandle = CreateFileW(reinterpret_cast<const wchar_t*>(fn.getTerminatedBuffer()), mode,
|
fileHandle = CreateFileW(reinterpret_cast<const wchar_t*>(fn.getTerminatedBuffer()),
|
||||||
FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE,
|
mode, FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE,
|
||||||
NULL, OPEN_EXISTING, 0, NULL);
|
NULL, OPEN_EXISTING, 0, NULL);
|
||||||
|
|
||||||
if (fileHandle == INVALID_HANDLE_VALUE) {
|
if (fileHandle == INVALID_HANDLE_VALUE) {
|
||||||
|
@ -278,26 +280,23 @@ int TRI_STAT(char const* path, TRI_stat_t* buffer) {
|
||||||
|
|
||||||
char* TRI_GETCWD(char* buffer, int maxlen) {
|
char* TRI_GETCWD(char* buffer, int maxlen) {
|
||||||
char* rc = nullptr;
|
char* rc = nullptr;
|
||||||
wchar_t* rcw;
|
try {
|
||||||
int wBufLen = maxlen;
|
auto wbuf = std::make_unique<wchar_t[]>(maxlen);
|
||||||
wchar_t* wbuf = (wchar_t*)malloc(wBufLen * sizeof(wchar_t));
|
auto* rcw = ::_wgetcwd(wbuf.get(), maxlen);
|
||||||
|
if (rcw != nullptr) {
|
||||||
|
std::string rcs = fromWString(rcw);
|
||||||
|
if (rcs.length() + 1 < maxlen) {
|
||||||
|
memcpy(buffer, rcs.c_str(), rcs.length() + 1);
|
||||||
|
|
||||||
if (wbuf == nullptr) {
|
// tolower on hard-drive letter
|
||||||
return nullptr;
|
if ((rcs.length() >= 2) && (buffer[1] == ':') &&
|
||||||
}
|
(::isupper(static_cast<unsigned char>(buffer[0])))) {
|
||||||
rcw = ::_wgetcwd(wbuf, wBufLen);
|
buffer[0] = ::tolower(static_cast<unsigned char>(buffer[0]));
|
||||||
|
}
|
||||||
if (rcw != nullptr) {
|
rc = buffer;
|
||||||
std::string rcs;
|
}
|
||||||
|
|
||||||
icu::UnicodeString d(wbuf, static_cast<int32_t>(wcslen(wbuf)));
|
|
||||||
d.toUTF8String<std::string>(rcs);
|
|
||||||
if (rcs.length() + 1 < maxlen) {
|
|
||||||
memcpy(buffer, rcs.c_str(), rcs.length() + 1);
|
|
||||||
rc = buffer;
|
|
||||||
}
|
}
|
||||||
}
|
} catch (...) { }
|
||||||
free(wbuf);
|
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,18 +323,18 @@ arangodb::Result translateWindowsError(DWORD error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string windowsErrorToUTF8(DWORD errorNum) {
|
std::string windowsErrorToUTF8(DWORD errorNum) {
|
||||||
LPWSTR buffer = nullptr;
|
LPWSTR buffer = nullptr;
|
||||||
TRI_DEFER(::LocalFree(buffer);)
|
TRI_DEFER(::LocalFree(buffer);)
|
||||||
size_t size =
|
size_t size =
|
||||||
FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM |
|
FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM |
|
||||||
FORMAT_MESSAGE_IGNORE_INSERTS,
|
FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||||
nullptr, errorNum, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
|
nullptr, errorNum, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
|
||||||
(LPWSTR)&buffer, 0, nullptr);
|
(LPWSTR)&buffer, 0, nullptr);
|
||||||
if (size) {
|
if (size) {
|
||||||
std::wstring out(buffer, size);
|
std::wstring out(buffer, size);
|
||||||
return fromWString(out);
|
return fromWString(out);
|
||||||
}
|
}
|
||||||
return "error translation failed";
|
return "error translation failed";
|
||||||
}
|
}
|
||||||
|
|
||||||
int TRI_MapSystemError(DWORD error) {
|
int TRI_MapSystemError(DWORD error) {
|
||||||
|
@ -533,11 +532,12 @@ void TRI_LogWindowsEventlog(char const* func, char const* file, int line,
|
||||||
buf[sizeof(buf) - 1] = '\0';
|
buf[sizeof(buf) - 1] = '\0';
|
||||||
|
|
||||||
icu::UnicodeString ubufs[]{icu::UnicodeString(buf, len), icu::UnicodeString(file),
|
icu::UnicodeString ubufs[]{icu::UnicodeString(buf, len), icu::UnicodeString(file),
|
||||||
icu::UnicodeString(func), icu::UnicodeString(linebuf)};
|
icu::UnicodeString(func), icu::UnicodeString(linebuf)};
|
||||||
LPCWSTR buffers[] = {reinterpret_cast<const wchar_t*>(ubufs[0].getTerminatedBuffer()),
|
LPCWSTR buffers[] = {
|
||||||
reinterpret_cast<const wchar_t*>(ubufs[1].getTerminatedBuffer()),
|
reinterpret_cast<const wchar_t*>(ubufs[0].getTerminatedBuffer()),
|
||||||
reinterpret_cast<const wchar_t*>(ubufs[2].getTerminatedBuffer()),
|
reinterpret_cast<const wchar_t*>(ubufs[1].getTerminatedBuffer()),
|
||||||
reinterpret_cast<const wchar_t*>(ubufs[3].getTerminatedBuffer()), nullptr};
|
reinterpret_cast<const wchar_t*>(ubufs[2].getTerminatedBuffer()),
|
||||||
|
reinterpret_cast<const wchar_t*>(ubufs[3].getTerminatedBuffer()), nullptr};
|
||||||
// Try to get messages through to windows syslog...
|
// Try to get messages through to windows syslog...
|
||||||
if (!ReportEventW(hEventLog, EVENTLOG_ERROR_TYPE, UI_CATEGORY,
|
if (!ReportEventW(hEventLog, EVENTLOG_ERROR_TYPE, UI_CATEGORY,
|
||||||
MSG_INVALID_COMMAND, NULL, 4, 0, buffers, NULL)) {
|
MSG_INVALID_COMMAND, NULL, 4, 0, buffers, NULL)) {
|
||||||
|
@ -558,11 +558,12 @@ void TRI_LogWindowsEventlog(char const* func, char const* file, int line,
|
||||||
buf[sizeof(buf) - 1] = '\0';
|
buf[sizeof(buf) - 1] = '\0';
|
||||||
|
|
||||||
icu::UnicodeString ubufs[]{icu::UnicodeString(buf, len), icu::UnicodeString(file),
|
icu::UnicodeString ubufs[]{icu::UnicodeString(buf, len), icu::UnicodeString(file),
|
||||||
icu::UnicodeString(func), icu::UnicodeString(linebuf)};
|
icu::UnicodeString(func), icu::UnicodeString(linebuf)};
|
||||||
LPCWSTR buffers[] = {reinterpret_cast<const wchar_t*>(ubufs[0].getTerminatedBuffer()),
|
LPCWSTR buffers[] = {
|
||||||
reinterpret_cast<const wchar_t*>(ubufs[1].getTerminatedBuffer()),
|
reinterpret_cast<const wchar_t*>(ubufs[0].getTerminatedBuffer()),
|
||||||
reinterpret_cast<const wchar_t*>(ubufs[2].getTerminatedBuffer()),
|
reinterpret_cast<const wchar_t*>(ubufs[1].getTerminatedBuffer()),
|
||||||
reinterpret_cast<const wchar_t*>(ubufs[3].getTerminatedBuffer()), nullptr};
|
reinterpret_cast<const wchar_t*>(ubufs[2].getTerminatedBuffer()),
|
||||||
|
reinterpret_cast<const wchar_t*>(ubufs[3].getTerminatedBuffer()), nullptr};
|
||||||
// Try to get messages through to windows syslog...
|
// Try to get messages through to windows syslog...
|
||||||
if (!ReportEventW(hEventLog, EVENTLOG_ERROR_TYPE, UI_CATEGORY,
|
if (!ReportEventW(hEventLog, EVENTLOG_ERROR_TYPE, UI_CATEGORY,
|
||||||
MSG_INVALID_COMMAND, NULL, 4, 0, buffers, NULL)) {
|
MSG_INVALID_COMMAND, NULL, 4, 0, buffers, NULL)) {
|
||||||
|
|
|
@ -132,6 +132,7 @@ LogTopic Logger::REPLICATION("replication", LogLevel::INFO);
|
||||||
LogTopic Logger::REQUESTS("requests", LogLevel::FATAL); // suppress
|
LogTopic Logger::REQUESTS("requests", LogLevel::FATAL); // suppress
|
||||||
LogTopic Logger::RESTORE("restore", LogLevel::INFO);
|
LogTopic Logger::RESTORE("restore", LogLevel::INFO);
|
||||||
LogTopic Logger::ROCKSDB("rocksdb", LogLevel::WARN);
|
LogTopic Logger::ROCKSDB("rocksdb", LogLevel::WARN);
|
||||||
|
LogTopic Logger::SECURITY("security", LogLevel::INFO);
|
||||||
LogTopic Logger::SSL("ssl", LogLevel::WARN);
|
LogTopic Logger::SSL("ssl", LogLevel::WARN);
|
||||||
LogTopic Logger::STARTUP("startup", LogLevel::INFO);
|
LogTopic Logger::STARTUP("startup", LogLevel::INFO);
|
||||||
LogTopic Logger::STATISTICS("statistics", LogLevel::INFO);
|
LogTopic Logger::STATISTICS("statistics", LogLevel::INFO);
|
||||||
|
|
|
@ -154,6 +154,7 @@ class Logger {
|
||||||
static LogTopic REQUESTS;
|
static LogTopic REQUESTS;
|
||||||
static LogTopic RESTORE;
|
static LogTopic RESTORE;
|
||||||
static LogTopic ROCKSDB;
|
static LogTopic ROCKSDB;
|
||||||
|
static LogTopic SECURITY;
|
||||||
static LogTopic SSL;
|
static LogTopic SSL;
|
||||||
static LogTopic STARTUP;
|
static LogTopic STARTUP;
|
||||||
static LogTopic STATISTICS;
|
static LogTopic STATISTICS;
|
||||||
|
|
|
@ -44,23 +44,15 @@ const internal = require('internal');
|
||||||
|
|
||||||
let rootDir = fs.join(fs.getTempPath(), '..');
|
let rootDir = fs.join(fs.getTempPath(), '..');
|
||||||
let subInstanceTemp; //not set for subinstance
|
let subInstanceTemp; //not set for subinstance
|
||||||
let testResults = fs.join(fs.getTempPath(), 'testresult.json'); // where we want to put our results ;-)
|
|
||||||
let testFilesDir = fs.join(rootDir, 'test_file_tree');
|
let testFilesDir = fs.join(rootDir, 'test_file_tree');
|
||||||
|
|
||||||
if (getOptions === true) {
|
if (getOptions === true) {
|
||||||
rootDir = fs.join(fs.getTempPath(), 'permissions');
|
rootDir = fs.join(fs.getTempPath(), 'permissions');
|
||||||
subInstanceTemp = fs.join(rootDir, 'subinstance_temp_directory');
|
subInstanceTemp = fs.join(rootDir, 'subinstance_temp_directory');
|
||||||
testResults = fs.join(subInstanceTemp, 'testresult.json'); // where we want to put our results ;-)
|
|
||||||
testFilesDir = fs.join(rootDir, 'test_file_tree');
|
testFilesDir = fs.join(rootDir, 'test_file_tree');
|
||||||
fs.makeDirectoryRecursive(subInstanceTemp);
|
fs.makeDirectoryRecursive(subInstanceTemp);
|
||||||
fs.makeDirectoryRecursive(testFilesDir);
|
fs.makeDirectoryRecursive(testFilesDir);
|
||||||
|
|
||||||
//create al symlink from subinstance test result to test result expecte by calling arangosh
|
|
||||||
let callerResult = fs.join(rootDir, 'testresult.json');
|
|
||||||
try {
|
|
||||||
fs.remove(callerResult);
|
|
||||||
} catch(ex) {}
|
|
||||||
fs.linkFile(testResults, callerResult);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -134,9 +126,12 @@ if (getOptions === true) {
|
||||||
fs.write(subLevelAllowedCopyFile, 'this file is allowed.\n');
|
fs.write(subLevelAllowedCopyFile, 'this file is allowed.\n');
|
||||||
// N/A fs.write(subLevelForbiddenFile, 'forbidden fruits are tasty!\n');
|
// N/A fs.write(subLevelForbiddenFile, 'forbidden fruits are tasty!\n');
|
||||||
|
|
||||||
|
try {
|
||||||
fs.linkFile(topLevelForbiddenFile, intoTopLevelForbidden);
|
fs.linkFile(topLevelForbiddenFile, intoTopLevelForbidden);
|
||||||
fs.linkFile(topLevelAllowedFile, intoTopLevelAllowed);
|
fs.linkFile(topLevelAllowedFile, intoTopLevelAllowed);
|
||||||
|
} catch (ex) {
|
||||||
|
internal.print("unable to create symlinks" + ex);
|
||||||
|
}
|
||||||
|
|
||||||
fs.write(topLevelAllowedReadCSVFile, CSV);
|
fs.write(topLevelAllowedReadCSVFile, CSV);
|
||||||
fs.write(topLevelForbiddenReadCSVFile, CSV);
|
fs.write(topLevelForbiddenReadCSVFile, CSV);
|
||||||
|
@ -146,14 +141,13 @@ if (getOptions === true) {
|
||||||
fs.write(topLevelForbiddenReadJSONFile, JSONText);
|
fs.write(topLevelForbiddenReadJSONFile, JSONText);
|
||||||
fs.write(subLevelAllowedReadJSONFile, JSONText);
|
fs.write(subLevelAllowedReadJSONFile, JSONText);
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'temp.path': subInstanceTemp, // Adjust the temp-path to match our current temp path
|
'temp.path': subInstanceTemp, // Adjust the temp-path to match our current temp path
|
||||||
'javascript.files-whitelist': [
|
'javascript.files-whitelist': [
|
||||||
'^' + testResults,
|
fs.escapePath('^' + process.env['RESULT']),
|
||||||
'^' + topLevelAllowed,
|
fs.escapePath('^' + topLevelAllowed),
|
||||||
'^' + subLevelAllowed,
|
fs.escapePath('^' + subLevelAllowed),
|
||||||
'^' + topLevelAllowedRecursive
|
fs.escapePath('^' + topLevelAllowedRecursive)
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ if (getOptions === true) {
|
||||||
'server.jwt-secret': 'abc123',
|
'server.jwt-secret': 'abc123',
|
||||||
'javascript.harden' : 'true',
|
'javascript.harden' : 'true',
|
||||||
'javascript.files-whitelist': [
|
'javascript.files-whitelist': [
|
||||||
'^' + testPath, // we need to call isDirectory (internal.pathForTesting) in
|
'^' + fs.escapePath(testPath), // we need to call isDirectory (internal.pathForTesting) in
|
||||||
// the server which is for bidden in not whitelisted paths
|
// the server which is for bidden in not whitelisted paths
|
||||||
],
|
],
|
||||||
// tests/js/common/test-data/apps/server-security/index.js
|
// tests/js/common/test-data/apps/server-security/index.js
|
||||||
|
|
Loading…
Reference in New Issue