1
0
Fork 0

Feature/jenkins pipeline (#2669)

* added resilience
This commit is contained in:
Frank Celler 2017-06-25 23:21:54 +02:00 committed by GitHub
parent 91d08645f7
commit f20549959e
26 changed files with 440 additions and 205 deletions

View File

@ -2,13 +2,28 @@
properties([
parameters([
booleanParam(
defaultValue: false,
description: 'build and run tests on Linux',
name: 'Linux'
),
booleanParam(
defaultValue: false,
description: 'build and run tests on Mac',
name: 'Mac'
),
booleanParam(
defaultValue: false,
description: 'build and run tests in Windows',
name: 'Windows'
),
booleanParam(
defaultValue: false,
description: 'clean build directories',
name: 'cleanBuild'
),
booleanParam(
defaultValue: true,
defaultValue: false,
description: 'build and run tests for community',
name: 'buildCommunity'
),
@ -17,21 +32,6 @@ properties([
description: 'build and run tests for enterprise',
name: 'buildEnterprise'
),
booleanParam(
defaultValue: true,
description: 'build and run tests on Linux',
name: 'buildLinux'
),
booleanParam(
defaultValue: false,
description: 'build and run tests on Mac',
name: 'buildMac'
),
booleanParam(
defaultValue: false,
description: 'build and run tests in Windows',
name: 'buildWindows'
),
booleanParam(
defaultValue: false,
description: 'run jslint',
@ -63,13 +63,13 @@ buildCommunity = params.buildCommunity
buildEnterprise = params.buildEnterprise
// build linux
buildLinux = params.buildLinux
buildLinux = params.Linux
// build mac
buildMac = params.buildMac
buildMac = params.Mac
// build windows
buildWindows = params.buildWindows
buildWindows = params.Windows
// run jslint
runJslint = params.runJslint
@ -93,10 +93,10 @@ enterpriseRepo = 'https://github.com/arangodb/enterprise'
// Jenkins credentials for enterprise repositiory
credentials = '8d893d23-6714-4f35-a239-c847c798e080'
// binaries to copy for testing
binariesCommunity = 'build/**,etc/**,Installation/Pipeline/**,js/**,scripts/**,UnitTests/**,utils/**'
binariesEnterprise = binariesCommunity + ',enterprise/js/**'
// jenkins cache
cacheDir = '/vol/cache/' + env.JOB_NAME.replaceAll('%', '_')
// execute a powershell
def PowerShell(psCmd) {
bat "powershell.exe -NonInteractive -ExecutionPolicy Bypass -Command \"\$ErrorActionPreference='Stop';[Console]::OutputEncoding=[System.Text.Encoding]::UTF8;$psCmd;EXIT \$global:LastExitCode\""
}
@ -209,12 +209,12 @@ def checkCommitMessages() {
}
}
echo 'Linux: ' + (buildLinux ? 'true' : 'false')
echo 'Mac: ' + (buildMac ? 'true' : 'false')
echo 'Windows: ' + (buildWindows ? 'true' : 'false')
echo 'Clean Build: ' + (cleanBuild ? 'true' : 'false')
echo 'Build Community: ' + (buildCommunity ? 'true' : 'false')
echo 'Build Enterprise: ' + (buildEnterprise ? 'true' : 'false')
echo 'Build Linux: ' + (buildLinux ? 'true' : 'false')
echo 'Build Mac: ' + (buildMac ? 'true' : 'false')
echo 'Build Windows: ' + (buildWindows ? 'true' : 'false')
echo 'Run Jslint: ' + (runJslint ? 'true' : 'false')
echo 'Run Resilience: ' + (runResilience ? 'true' : 'false')
echo 'Run Tests: ' + (runTests ? 'true' : 'false')
@ -226,14 +226,12 @@ def checkCommitMessages() {
def stashSourceCode() {
sh 'rm -f source.*'
sh 'find -L . -type l -delete'
sh 'zip -r -1 -x "*tmp" -x ".git" -y -q source.zip *'
if (buildLinux || buildMac) {
sh 'tar -c -f source.tar --exclude "source.*" --exclude "*tmp" --exclude ".git" *'
stash includes: 'source.*', name: 'sourceTar'
}
if (buildWindows) {
stash includes: '**', excludes: '*tmp,.git,source.*', name: 'source'
lock('cache') {
sh 'mkdir -p ' + cacheDir
sh 'mv -f source.zip ' + cacheDir + '/source.zip'
}
}
@ -245,32 +243,95 @@ def unstashSourceCode(os) {
bat 'del /F /Q *'
}
def name = env.JOB_NAME
if (os == 'linux' || os == 'mac') {
unstash 'sourceTar'
sh 'tar -x -p -f source.tar'
sh 'mkdir -p artefacts'
lock('cache') {
sh 'scp "jenkins@c1:' + cacheDir + '/source.zip" source.zip'
}
sh 'unzip -o -q source.zip'
}
else if (os == 'windows') {
unstash 'source'
lock('cache') {
bat 'scp -F c:/Users/jenkins/ssh_config "jenkins@c1:' + cacheDir + '/source.zip" source.zip'
}
if (!fileExists('artefacts')) {
bat 'mkdir artefacts'
bat 'c:\\cmake\\bin\\cmake -E tar xf source.zip'
}
}
def stashBuild(edition, os) {
def name = 'build-' + edition + '-' + os + '.zip'
if (os == 'linux' || os == 'mac') {
sh 'rm -f ' + name
sh 'zip -r -1 -y -q ' + name + ' build-' + edition
lock('cache') {
sh 'scp ' + name + ' "jenkins@c1:' + cacheDir + '"'
}
}
else if (os == 'windows') {
bat 'del /F /q ' + name
PowerShell('Compress -Archive -Path build-' + edition + ' -DestinationPath ' + name)
lock('cache') {
bat 'scp -F c:/Users/jenkins/ssh_config ' + name + ' "jenkins@c1:' + cacheDir + '"'
}
}
}
def stashBinaries(edition, os) {
if (edition == 'community') {
stash includes: binariesCommunity, name: 'build-' + edition + '-' + os
def unstashBuild(edition, os) {
def name = 'build-' + edition + '-' + os + '.zip'
if (os == 'linux' || os == 'mac') {
lock('cache') {
sh 'scp "jenkins@c1:' + cacheDir + '/' + name + '" ' + name
}
sh 'unzip -o -q ' + name
}
else if (edition == 'enterprise') {
stash includes: binariesEnterprise, name: 'build-' + edition + '-' + os
else if (os == 'windows') {
lock('cache') {
bat 'scp -F c:/Users/jenkins/ssh_config "jenkins@c1:' + cacheDir + '/' + name + '" ' + name
}
bat 'c:\\cmake\\bin\\cmake -E tar xf ' + name
}
}
def stashBinaries(edition, os) {
def name = 'binaries-' + edition + '-' + os + '.zip'
if (os == 'linux' || os == 'mac') {
def dirs = 'build etc Installation/Pipeline js scripts UnitTests utils resilience'
if (edition == 'community') {
sh 'zip -r -1 -y -q ' + name + ' ' + dirs
}
else if (edition == 'enterprise') {
sh 'zip -r -1 -y -q ' + name + ' ' + dirs + ' enterprise/js'
}
lock('cache') {
sh 'scp ' + name + ' "jenkins@c1:' + cacheDir + '"'
}
}
}
def unstashBinaries(edition, os) {
def name = 'binaries-' + edition + '-' + os + '.zip'
sh 'rm -rf *'
unstash 'build-' + edition + '-' + os
if (os == 'linux' || os == 'mac') {
lock('cache') {
sh 'scp "jenkins@c1:' + cacheDir + '/' + name + '" ' + name
}
sh 'unzip -o -q ' + name
}
}
// -----------------------------------------------------------------------------
@ -280,30 +341,44 @@ def unstashBinaries(edition, os) {
def buildEdition(edition, os) {
try {
if (os == 'linux' || os == 'mac') {
/*
def tarfile = 'build-' + edition + '-' + os + '.tar.gz'
cache(maxCacheSize: 50000, caches: [
[$class: 'ArbitraryFileCache',
includes: tarfile,
path: 'artefacts']]) {
if (!cleanBuild && fileExists('artefacts/' + tarfile)) {
sh 'tar -x -z -p -f artefacts/' + tarfile
}
sh 'rm -f artefacts/' + tarfile
sh './Installation/Pipeline/build_' + edition + '_' + os + '.sh 64'
sh 'GZIP=--fast tar -c -z -f artefacts/' + tarfile + ' build-' + edition
if (! cleanBuild) {
try {
unstashBuild(edition, os)
}
catch (exc) {
echo exc.toString()
}
}
*/
sh './Installation/Pipeline/build_' + edition + '_' + os + '.sh 64'
stashBuild(edition, os)
}
else if (os == 'windows') {
def builddir = 'build-' + edition + '-' + os
if (cleanBuild) {
bat 'del /F /Q build'
}
else {
try {
step($class: 'hudson.plugins.copyartifact.CopyArtifact',
projectName: "/" + "${env.JOB_NAME}",
filter: builddir + '/**')
bat 'move ' + builddir + ' build'
}
catch (exc) {
echo exc.toString()
}
}
PowerShell('. .\\Installation\\Pipeline\\build_' + edition + '_windows.ps1')
bat 'move build ' + builddir
archiveArtifacts allowEmptyArchive: true, artifacts: builddir + '/**', defaultExcludes: false
}
}
catch (exc) {
@ -315,49 +390,69 @@ def buildEdition(edition, os) {
}
}
def buildStep(os, edition, full) {
def buildStepCheck(edition, os, full) {
if (full && ! buildFull) {
echo "Not building combination " + os + " " + edition + " "
return
return false
}
if (os == 'linux' && ! buildLinux) {
echo "Not building " + os + " version"
return
return false
}
if (os == 'mac' && ! buildMac) {
echo "Not building " + os + " version"
return
return false
}
if (os == 'windows' && ! buildWindows) {
echo "Not building " + os + " version"
return
return false
}
if (edition == 'enterprise' && ! buildEnterprise) {
echo "Not building " + edition + " version"
return
return false
}
if (edition == 'community' && ! buildCommunity) {
echo "Not building " + edition + " version"
return
return false
}
node(os) {
unstashSourceCode(os)
buildEdition(edition, os)
return true
}
if (runTests || runResilience) {
def buildStep(edition, os) {
return {
node(os) {
unstashSourceCode(os)
buildEdition(edition, os)
stashBinaries(edition, os)
}
}
}
def buildStepParallel() {
def branches = [:]
def full = false
for (edition in ['community', 'enterprise']) {
for (os in ['linux', 'mac', 'windows']) {
if (buildStepCheck(edition, os, full)) {
def name = 'build-' + edition + '-' + os
branches[name] = buildStep(edition, os)
}
}
}
parallel branches
}
// -----------------------------------------------------------------------------
// --SECTION-- SCRIPTS TESTS
// --SECTION-- SCRIPTS JSLINT
// -----------------------------------------------------------------------------
def jslint() {
@ -371,27 +466,36 @@ def jslint() {
}
def jslintStep() {
def edition = 'community'
def os = 'linux'
if (runJslint) {
if (buildLinux) {
return {
node(os) {
echo "Running jslint test"
unstashBinaries('community', os)
try {
unstashBinaries(edition, os)
}
catch (exc) {
echo exc.toString()
currentBuild.result = 'UNSTABLE'
return
}
jslint()
}
}
else {
echo "Not building " + os + " version"
}
}
else {
echo "Not running tests"
}
}
def testEdition(edition, os, type, engine) {
// -----------------------------------------------------------------------------
// --SECTION-- SCRIPTS TESTS
// -----------------------------------------------------------------------------
def testEdition(edition, os, mode, engine) {
try {
sh './Installation/Pipeline/test_' + type + '_' + edition + '_' + engine + '_' + os + '.sh 8'
sh './Installation/Pipeline/test_' + mode + '_' + edition + '_' + engine + '_' + os + '.sh 10'
}
catch (exc) {
echo exc.toString()
@ -402,92 +506,166 @@ def testEdition(edition, os, type, engine) {
}
}
def testStep(edition, os, mode, engine, full) {
def testCheck(edition, os, mode, engine, full) {
if (! runTests) {
echo "Not running tests"
return
return false
}
if (full && ! buildFull) {
echo "Not building combination " + os + " " + edition + " "
return
return false
}
if (os == 'linux' && ! buildLinux) {
echo "Not building " + os + " version"
return
return false
}
if (os == 'mac' && ! buildMac) {
echo "Not building " + os + " version"
return
return false
}
if (os == 'windows' && ! buildWindows) {
echo "Not building " + os + " version"
return
return false
}
if (edition == 'enterprise' && ! buildEnterprise) {
echo "Not building " + edition + " version"
return
return false
}
if (edition == 'community' && ! buildCommunity) {
echo "Not building " + edition + " version"
return
return false
}
node(os) {
echo "Running " + mode + " " + edition + " " + engine + " " + os + " test"
return true
}
unstashBinaries(edition, os)
testEdition(edition, os, mode, engine)
def testName(edition, os, mode, engine, full) {
def name = "test-" + mode + '-' + edition + '-' + engine + '-' + os;
if (! testCheck(edition, os, mode, engine, full)) {
name = "DISABLED-" + name
}
return name
}
def testStep(edition, os, mode, engine) {
return {
node(os) {
echo "Running " + mode + " " + edition + " " + engine + " " + os + " test"
unstashBinaries(edition, os)
testEdition(edition, os, mode, engine)
}
}
}
def testEditionResilience(edition, os, engine) {
echo "missing"
sh "ls -l"
def testStepParallel() {
def branches = [:]
def full = false
for (edition in ['community', 'enterprise']) {
for (os in ['linux', 'mac', 'windows']) {
for (mode in ['cluster', 'singleserver']) {
for (engine in ['mmfiles', 'rocksdb']) {
if (testCheck(edition, os, mode, engine, full)) {
def name = testName(edition, os, mode, engine, full)
branches[name] = testStep(edition, os, mode, engine)
}
}
}
}
}
if (runJslint) {
branches['jslint'] = jslintStep()
}
parallel branches
}
def testResilienceStep(os, edition, engine, full) {
// -----------------------------------------------------------------------------
// --SECTION-- SCRIPTS RESILIENCE
// -----------------------------------------------------------------------------
def testResilience(os, engine, foxx) {
sh './Installation/Pipeline/test_resilience_' + foxx + '_' + engine + '_' + os + '.sh'
}
def testResilienceCheck(os, engine, foxx, full) {
if (! runResilience) {
echo "Not running resilience tests"
return
return false
}
if (os == 'linux' && ! buildLinux) {
echo "Not building " + os + " version"
return
return false
}
if (os == 'mac' && ! buildMac) {
echo "Not building " + os + " version"
return
return false
}
if (os == 'windows' && ! buildWindows) {
echo "Not building " + os + " version"
return
return false
}
if (edition == 'enterprise' && ! buildEnterprise) {
echo "Not building " + edition + " version"
return
if (! buildCommunity) {
echo "Not building community version"
return false
}
if (edition == 'community' && ! buildCommunity) {
echo "Not building " + edition + " version"
return
return true
}
def testResilienceName(os, engine, foxx, full) {
def name = 'test-resilience' + '-' + foxx + '_' + engine + '-' + os;
if (! testResilienceCheck(os, engine, foxx, full)) {
name = "DISABLED-" + name
}
node(os) {
unstashBinaries(edition, os)
testEditionResilience(edition, os, engine)
return name
}
def testResilienceStep(os, engine, foxx) {
return {
node(os) {
unstashBinaries('community', os)
testResilience(os, engine, foxx)
}
}
}
def testResilienceParallel() {
def branches = [:]
def full = false
for (foxx in ['foxx', 'nofoxx']) {
for (os in ['linux', 'mac', 'windows']) {
for (engine in ['mmfiles', 'rocksdb']) {
if (testResilienceCheck(os, engine, foxx, full)) {
def name = testResilienceName(os, engine, foxx, full)
branches[name] = testResilienceStep(os, engine, foxx)
}
}
}
}
parallel branches
}
// -----------------------------------------------------------------------------
// --SECTION-- PIPELINE
// -----------------------------------------------------------------------------
@ -502,38 +680,14 @@ stage('checkout') {
}
}
// cmake is very picky about the absolute path. Therefore never put a stage
// into an `if`
stage('build') {
parallel(
'build-community-linux': {
buildStep('linux', 'community', false)
},
'build-community-mac': { buildStep('mac', 'community', false) },
'build-community-windows': { buildStep('windows', 'community', false) },
'build-enterprise-linux': { buildStep('linux', 'enterprise', false) },
'build-enterprise-mac': { buildStep('mac', 'enterprise', true) },
'build-enterprise-windows': { buildStep('windows', 'enterprise', true) },
)
buildStepParallel()
}
stage('test') {
parallel(
'test-cluster-community-mmfiles-linux': { testStep('community', 'linux', 'cluster', 'mmfiles', false) },
'test-cluster-community-rocksdb-linux': { testStep('community', 'linux', 'cluster', 'rocksdb', true) },
'test-cluster-enterprise-mmfiles-linux': { testStep('enterprise', 'linux', 'cluster', 'mmfiles', true) },
'test-cluster-enterprise-rocksdb-linux': { testStep('enterprise', 'linux', 'cluster', 'rocksdb', false) },
'test-singleserver-community-mmfiles-linux': { testStep('community', 'linux', 'singleserver', 'mmfiles', true) },
'test-singleserver-community-rocksdb-linux': { testStep('community', 'linux', 'singleserver', 'rocksdb', false) },
'test-singleserver-enterprise-mmfiles-linux': { testStep('enterprise', 'linux', 'singleserver', 'mmfiles', false) },
'test-singleserver-enterprise-rocksdb-linux': { testStep('enterprise', 'linux', 'singleserver', 'rocksdb', true) },
'test-resilience-community-rocksdb': { testResilienceStep('linux', 'community', 'rocksdb', false) },
'test-resilience-community-mmfiles': { testResilienceStep('linux', 'community', 'mmfiles', false) },
'jslint': { jslintStep() }
)
stage('tests') {
testStepParallel()
}
stage('resilience') {
testResilienceParallel();
}

View File

@ -1,5 +1,3 @@
#!/bin/bash
load=32
concurrency=$1
edition=$2
@ -31,12 +29,29 @@ echo "CONCURRENY: $concurrency"
echo "HOST: `hostname`"
echo "PWD: `pwd`"
mkdir -p build-$edition
if [ ! -f build-$edition/location ]; then
if [ "$os" == mac ]; then
(ls -l && echo "$edition $os") | md5 | awk '{print $1}' > build-$edition/location
else
(ls -l && echo "$edition $os") | md5sum | awk '{print $1}' > build-$edition/location
fi
fi
GENPATH="/tmp/`cat build-$edition/location`"
rm -f $GENPATH
ln -s `pwd` $GENPATH
cd $GENPATH
echo "GENPATH: `pwd`"
rm -rf log-output/$type.log
mkdir -p log-output
touch log-output/$type.log
(
mkdir -p build-$edition
cd build-$edition
echo "`date +%T` configuring..."
@ -52,7 +67,9 @@ touch log-output/$type.log
if [ "$?" != 0 ]; then
if fgrep 'Re-run cmake with a different source directory' ../log-output/$type.log; then
mv location ..
rm -rf *
mv ../location .
CXXFLAGS=-fno-omit-frame-pointer \
cmake \

View File

@ -2,6 +2,7 @@ concurrency="$1"
mode="$2"
edition="$3"
engine="$4"
os="$5"
type="test"
@ -32,7 +33,14 @@ else
exit 1
fi
type="${type}_linux"
if [ "$os" == linux ]; then
type="${type}_${os}linux"
elif [ "$os" == mac ]; then
type="${type}_${os}"
else
echo "$0: unknown engine '$os', expecting 'linux' or 'mac'"
exit 1
fi
. ./Installation/Pipeline/include/test_log_info.inc
. ./Installation/Pipeline/include/test_setup_tmp.inc
@ -41,8 +49,8 @@ type="${type}_linux"
OPTS="--storageEngine $engine --skipNondeterministic true --skipTimeCritical true --configDir etc/jenkins --skipLogAnalysis true"
if [ "$mode" == singleserver ]; then
if [ "$edition" == enterprise ]; then
ENTERPRISE_TESTS="scripts/unittest ldap --caCertFilePath $(pwd)/ldap/ca_server.pem --ldapHost 127.0.0.1 --ldapPort $PORTLDAP --minPort `expr $PORT03 + 60` --maxPort `expr $PORT03 + 69` $OPTS 2>&1"
if [ "$edition" == enterprise -a "$os" == linux ]; then
ENTERPRISE_TESTS="scripts/unittest ldap --caCertFilePath $(pwd)/ldap/ca_server.pem --ldapHost 127.0.0.1 --ldapPort $PORTLDAP --minPort `expr $PORT01 + 1000` --maxPort `expr $PORT01 + 1069` $OPTS 2>&1"
fi
echo "$type
@ -57,30 +65,30 @@ if [ "$mode" == singleserver ]; then
scripts/unittest dfdb --minPort `expr $PORT01 + 70` --maxPort `expr $PORT01 + 79` $OPTS 2>&1
scripts/unittest dump --minPort `expr $PORT01 + 80` --maxPort `expr $PORT01 + 89` $OPTS 2>&1
scripts/unittest dump_authentication --minPort `expr $PORT01 + 90` --maxPort `expr $PORT01 + 99` $OPTS 2>&1
scripts/unittest endpoints --minPort `expr $PORT02 + 100` --maxPort `expr $PORT02 + 109` $OPTS 2>&1
scripts/unittest http_replication --minPort `expr $PORT02 + 110` --maxPort `expr $PORT02 + 119` $OPTS 2>&1
scripts/unittest http_server --minPort `expr $PORT02 + 120` --maxPort `expr $PORT02 + 129` $OPTS 2>&1
scripts/unittest replication_ongoing --minPort `expr $PORT02 + 130` --maxPort `expr $PORT02 + 139` $OPTS 2>&1
scripts/unittest replication_static --minPort `expr $PORT02 + 140` --maxPort `expr $PORT02 + 149` $OPTS 2>&1
scripts/unittest replication_sync --minPort `expr $PORT02 + 150` --maxPort `expr $PORT02 + 159` $OPTS 2>&1
scripts/unittest server_http --minPort `expr $PORT02 + 160` --maxPort `expr $PORT02 + 169` $OPTS 2>&1
scripts/unittest shell_client --minPort `expr $PORT02 + 170` --maxPort `expr $PORT02 + 179` $OPTS 2>&1
scripts/unittest shell_replication --minPort `expr $PORT02 + 180` --maxPort `expr $PORT02 + 189` $OPTS 2>&1
scripts/unittest shell_server --minPort `expr $PORT02 + 190` --maxPort `expr $PORT02 + 199` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/0 --minPort `expr $PORT03 + 0` --maxPort `expr $PORT03 + 9` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/1 --minPort `expr $PORT03 + 10` --maxPort `expr $PORT03 + 19` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/2 --minPort `expr $PORT03 + 20` --maxPort `expr $PORT03 + 29` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/3 --minPort `expr $PORT03 + 30` --maxPort `expr $PORT03 + 39` $OPTS 2>&1
scripts/unittest ssl_server --minPort `expr $PORT03 + 40` --maxPort `expr $PORT03 + 49` $OPTS 2>&1
scripts/unittest upgrade --minPort `expr $PORT03 + 50` --maxPort `expr $PORT03 + 59` $OPTS 2>&1
scripts/unittest endpoints --minPort `expr $PORT01 + 100` --maxPort `expr $PORT01 + 109` $OPTS 2>&1
scripts/unittest http_replication --minPort `expr $PORT01 + 110` --maxPort `expr $PORT01 + 119` $OPTS 2>&1
scripts/unittest http_server --minPort `expr $PORT01 + 120` --maxPort `expr $PORT01 + 129` $OPTS 2>&1
scripts/unittest replication_ongoing --minPort `expr $PORT01 + 130` --maxPort `expr $PORT01 + 139` $OPTS 2>&1
scripts/unittest replication_static --minPort `expr $PORT01 + 140` --maxPort `expr $PORT01 + 149` $OPTS 2>&1
scripts/unittest replication_sync --minPort `expr $PORT01 + 150` --maxPort `expr $PORT01 + 159` $OPTS 2>&1
scripts/unittest server_http --minPort `expr $PORT01 + 160` --maxPort `expr $PORT01 + 169` $OPTS 2>&1
scripts/unittest shell_client --minPort `expr $PORT01 + 170` --maxPort `expr $PORT01 + 179` $OPTS 2>&1
scripts/unittest shell_replication --minPort `expr $PORT01 + 180` --maxPort `expr $PORT01 + 189` $OPTS 2>&1
scripts/unittest shell_server --minPort `expr $PORT01 + 190` --maxPort `expr $PORT01 + 199` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/0 --minPort `expr $PORT01 + 200` --maxPort `expr $PORT01 + 209` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/1 --minPort `expr $PORT01 + 210` --maxPort `expr $PORT01 + 219` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/2 --minPort `expr $PORT01 + 220` --maxPort `expr $PORT01 + 229` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/3 --minPort `expr $PORT01 + 230` --maxPort `expr $PORT01 + 239` $OPTS 2>&1
scripts/unittest ssl_server --minPort `expr $PORT01 + 240` --maxPort `expr $PORT01 + 249` $OPTS 2>&1
scripts/unittest upgrade --minPort `expr $PORT01 + 250` --maxPort `expr $PORT01 + 259` $OPTS 2>&1
$ENTERPRISE_TESTS
" | parallel --header 1 --results log-output --files --no-notice --load 10 --jobs $concurrency > log-output/${type}.log
elif [ "$mode" == cluster ]; then
OPTS="$OPTS --cluster true"
if [ "$edition" == enterprise ]; then
ENTERPRISE_TESTS="scripts/unittest ldap --caCertFilePath $(pwd)/ldap/ca_server.pem --ldapHost 127.0.0.1 --ldapPort $PORTLDAP --minPort `expr $PORT04 + 120` --maxPort `expr $PORT04 + 159` $OPTS 2>&1"
if [ "$edition" == enterprise -a "$os" == linux ]; then
ENTERPRISE_TESTS="scripts/unittest ldap --caCertFilePath $(pwd)/ldap/ca_server.pem --ldapHost 127.0.0.1 --ldapPort $PORTLDAP --minPort `expr $PORT01 + 1000` --maxPort `expr $PORT01 + 1039` $OPTS 2>&1"
fi
echo "$type
@ -89,19 +97,19 @@ elif [ "$mode" == cluster ]; then
scripts/unittest authentication --minPort `expr $PORT01 + 80` --maxPort `expr $PORT01 + 119` $OPTS 2>&1
scripts/unittest authentication_parameters --minPort `expr $PORT01 + 120` --maxPort `expr $PORT01 + 159` $OPTS 2>&1
scripts/unittest config --minPort `expr $PORT01 + 160` --maxPort `expr $PORT01 + 199` $OPTS 2>&1
scripts/unittest dump --minPort `expr $PORT02 + 0` --maxPort `expr $PORT02 + 39` $OPTS 2>&1
scripts/unittest dump_authentication --minPort `expr $PORT02 + 40` --maxPort `expr $PORT02 + 79` $OPTS 2>&1
scripts/unittest endpoints --minPort `expr $PORT02 + 80` --maxPort `expr $PORT02 + 119` $OPTS 2>&1
scripts/unittest http_server --minPort `expr $PORT02 + 120` --maxPort `expr $PORT02 + 159` $OPTS 2>&1
scripts/unittest server_http --minPort `expr $PORT02 + 160` --maxPort `expr $PORT02 + 199` $OPTS 2>&1
scripts/unittest shell_client --minPort `expr $PORT03 + 0` --maxPort `expr $PORT03 + 39` $OPTS 2>&1
scripts/unittest shell_server --minPort `expr $PORT03 + 40` --maxPort `expr $PORT03 + 79` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/0 --minPort `expr $PORT03 + 80` --maxPort `expr $PORT03 + 119` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/1 --minPort `expr $PORT03 + 120` --maxPort `expr $PORT03 + 159` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/2 --minPort `expr $PORT03 + 160` --maxPort `expr $PORT03 + 199` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/3 --minPort `expr $PORT04 + 0` --maxPort `expr $PORT04 + 39` $OPTS 2>&1
scripts/unittest ssl_server --minPort `expr $PORT04 + 40` --maxPort `expr $PORT04 + 79` $OPTS 2>&1
scripts/unittest upgrade --minPort `expr $PORT04 + 80` --maxPort `expr $PORT04 + 119` $OPTS 2>&1
scripts/unittest dump --minPort `expr $PORT01 + 200` --maxPort `expr $PORT01 + 239` $OPTS 2>&1
scripts/unittest dump_authentication --minPort `expr $PORT01 + 240` --maxPort `expr $PORT01 + 279` $OPTS 2>&1
scripts/unittest endpoints --minPort `expr $PORT01 + 280` --maxPort `expr $PORT01 + 319` $OPTS 2>&1
scripts/unittest http_server --minPort `expr $PORT01 + 320` --maxPort `expr $PORT01 + 359` $OPTS 2>&1
scripts/unittest server_http --minPort `expr $PORT01 + 360` --maxPort `expr $PORT01 + 399` $OPTS 2>&1
scripts/unittest shell_client --minPort `expr $PORT01 + 400` --maxPort `expr $PORT01 + 439` $OPTS 2>&1
scripts/unittest shell_server --minPort `expr $PORT01 + 440` --maxPort `expr $PORT01 + 479` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/0 --minPort `expr $PORT01 + 480` --maxPort `expr $PORT01 + 519` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/1 --minPort `expr $PORT01 + 520` --maxPort `expr $PORT01 + 559` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/2 --minPort `expr $PORT01 + 560` --maxPort `expr $PORT01 + 599` $OPTS 2>&1
scripts/unittest shell_server_aql --testBuckets 4/3 --minPort `expr $PORT01 + 600` --maxPort `expr $PORT01 + 639` $OPTS 2>&1
scripts/unittest ssl_server --minPort `expr $PORT01 + 640` --maxPort `expr $PORT01 + 679` $OPTS 2>&1
scripts/unittest upgrade --minPort `expr $PORT01 + 680` --maxPort `expr $PORT01 + 719` $OPTS 2>&1
$ENTERPRISE_TESTS
" | parallel --header 1 --results log-output --files --no-notice --load 10 --jobs $concurrency > log-output/${type}.log
fi

View File

@ -0,0 +1,33 @@
foxx="$1"
engine="$2"
os="$3"
PORT01=`./Installation/Pipeline/port.sh`
PORTTRAP="./Installation/Pipeline/port.sh --clean $PORT01 ;"
trap "$PORTTRAP" EXIT
(
cd resilience
rm -f core.* build etc js
ln -s ../build .
ln -s ../etc .
ln -s ../js .
npm install
./build/bin/arangod --version
if [ "$foxx" == yes ]; then
TESTS=$(find test/* -name "*foxx*")
else
TESTS=$(find test/* -not -name "*foxx*")
fi
MIN_PORT=`expr $PORT01 + 0` \
MAX_PORT=`expr $PORT01 + 1999` \
PORT_OFFSET=10 \
RESILIENCE_ARANGO_BASEPATH=. \
ARANGO_STORAGE_ENGINE=rocksdb \
npm run test-jenkins -- $TESTS
)

View File

@ -1,28 +1,12 @@
PORTTRAP=""
LDAPTRAP=""
PORTLDAP=""
PORT01=`./Installation/Pipeline/port.sh`
PORTLDAP=`expr $PORT01 + 199`
if [ "$mode" == singleserver ]; then
PORT01=`./Installation/Pipeline/port.sh`
PORT02=`./Installation/Pipeline/port.sh`
PORT03=`./Installation/Pipeline/port.sh`
PORTTRAP="./Installation/Pipeline/port.sh --clean $PORT01 ;"
PORTLDAP=`expr $PORT03 + 199`
PORTTRAP="./Installation/Pipeline/port.sh --clean $PORT01 $PORT02 $PORT03 ;"
elif [ "$mode" == cluster ]; then
PORT01=`./Installation/Pipeline/port.sh`
PORT02=`./Installation/Pipeline/port.sh`
PORT03=`./Installation/Pipeline/port.sh`
PORT04=`./Installation/Pipeline/port.sh`
PORTLDAP=`expr $PORT04 + 199`
PORTTRAP="./Installation/Pipeline/port.sh --clean $PORT01 $PORT02 $PORT03 $PORT04 ;"
fi
if [ "$edition" == enterprise ]; then
if [ "$edition" == enterprise -a "$os" == linux ]; then
docker rm -f ldap-$PORTLDAP || echo
mkdir -p ldap
docker pull arangodb/openldap-test-container:jessie

View File

@ -17,8 +17,8 @@ if test "$1" == "--clean"; then
exit
fi
port=5000
INCR=200
port=10000
INCR=2000
find $PORTDIR -type f -cmin +$TIMEOUT -exec rm "{}" ";"

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" cluster community mmfiles
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community mmfiles linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community mmfiles mac

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" cluster community rocksdb
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community rocksdb linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community rocksdb mac

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" cluster enterprise mmfiles
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise mmfiles linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise mmfiles mac

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" cluster enterprise rocksdb
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise rocksdb linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise rocksdb mac

View File

@ -0,0 +1,3 @@
#!/bin/bash
. Installation/Pipeline/include/test_resilience_FOXX_ENGINE_OS.inc yes mmfiles linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. Installation/Pipeline/include/test_resilience_FOXX_ENGINE_OS.inc yes rocksdb linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. Installation/Pipeline/include/test_resilience_FOXX_ENGINE_OS.inc no mmfiles linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. Installation/Pipeline/include/test_resilience_FOXX_ENGINE_OS.inc no rocksdb linux

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" singleserver community mmfiles
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community mmfiles linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community mmfiles mac

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" singleserver community rocksdb
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community rocksdb linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community rocksdb mac

View File

@ -1,3 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_linux.inc "$1" singleserver enterprise mmfiles
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise mmfiles linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise mmfiles mac

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise rocksdb linux

View File

@ -0,0 +1,3 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise rocksdb mac