mirror of https://gitee.com/bigwinds/arangodb
Jenkins pipeline for feature branches
This commit is contained in:
parent
2f04981fe9
commit
e10bdb9bbb
|
@ -0,0 +1,194 @@
|
|||
// -*- mode: groovy-mode
|
||||
|
||||
def binaries = 'build/**,etc/**,Installation/Pipeline/**,js/**,scripts/**,tests/**,UnitTests/**,utils/**'
|
||||
def enterpriseRepo = 'https://github.com/arangodb/enterprise'
|
||||
def credentialsId = '8d893d23-6714-4f35-a239-c847c798e080'
|
||||
|
||||
def PowerShell(psCmd) {
|
||||
bat "powershell.exe -NonInteractive -ExecutionPolicy Bypass -Command \"\$ErrorActionPreference='Stop';[Console]::OutputEncoding=[System.Text.Encoding]::UTF8;$psCmd;EXIT \$global:LastExitCode\""
|
||||
}
|
||||
|
||||
def cleanBuild = false
|
||||
def cleanAll = false
|
||||
|
||||
stage('checkout') {
|
||||
node('master') {
|
||||
milestone(1)
|
||||
|
||||
retry(3) {
|
||||
try {
|
||||
checkout scm
|
||||
|
||||
script {
|
||||
def changeLogSets = currentBuild.changeSets
|
||||
|
||||
for (int i = 0; i < changeLogSets.size(); i++) {
|
||||
def entries = changeLogSets[i].items
|
||||
|
||||
for (int j = 0; j < entries.length; j++) {
|
||||
def entry = entries[j]
|
||||
|
||||
def author = entry.author
|
||||
def commitId = entry.commitId
|
||||
def msg = entry.msg
|
||||
def timestamp = new Date(entry.timestamp)
|
||||
|
||||
if (msg ==~ /(?i).*ci: *clean[ \\]].*/) {
|
||||
echo "using clean build because message contained 'ci: clean'"
|
||||
cleanBuild = true
|
||||
}
|
||||
|
||||
if (msg ==~ /(?i).*ci: *clean-all[ \\]].*/) {
|
||||
echo "using clean all because message contained 'ci: clean-all'"
|
||||
cleanAll = true
|
||||
}
|
||||
|
||||
def files = new ArrayList(entry.affectedFiles)
|
||||
|
||||
for (int k = 0; k < files.size(); k++) {
|
||||
def file = files[k]
|
||||
def editType = file.editType.name
|
||||
def path = file.path
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (err) {
|
||||
echo "GITHUB checkout failed, retrying in 5min"
|
||||
echo err.toString()
|
||||
sleep 300
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
echo "Trying enterprise branch ${env.BRANCH_NAME}"
|
||||
|
||||
checkout(
|
||||
changelog: false,
|
||||
poll: false,
|
||||
scm: [
|
||||
$class: 'GitSCM',
|
||||
branches: [[name: "*/${env.BRANCH_NAME}"]],
|
||||
doGenerateSubmoduleConfigurations: false,
|
||||
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'enterprise']],
|
||||
submoduleCfg: [],
|
||||
userRemoteConfigs: [[credentialsId: credentialsId, url: enterpriseRepo]]])
|
||||
}
|
||||
catch (err) {
|
||||
echo "Failed ${env.BRANCH_NAME}, trying enterprise branch devel"
|
||||
|
||||
checkout(
|
||||
changelog: false,
|
||||
poll: false,
|
||||
scm: [
|
||||
$class: 'GitSCM',
|
||||
branches: [[name: "*/devel"]],
|
||||
doGenerateSubmoduleConfigurations: false,
|
||||
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'enterprise']],
|
||||
submoduleCfg: [],
|
||||
userRemoteConfigs: [[credentialsId: credentialsId, url: enterpriseRepo]]])
|
||||
}
|
||||
|
||||
stash includes: '**', name: 'source'
|
||||
|
||||
milestone(2)
|
||||
}
|
||||
}
|
||||
|
||||
stage('build & test') {
|
||||
parallel(
|
||||
'build-linux': {
|
||||
node('linux') {
|
||||
if (cleanAll) {
|
||||
sh 'rm -rf *'
|
||||
} else if (cleanBuild) {
|
||||
sh 'rm -rf build-jenkins'
|
||||
}
|
||||
|
||||
unstash 'source'
|
||||
|
||||
sh './Installation/Pipeline/build_enterprise_linux.sh 16'
|
||||
stash includes: binaries, name: 'build-enterprise-linux'
|
||||
}
|
||||
|
||||
parallel(
|
||||
'test-singleserver-community': {
|
||||
node('linux') {
|
||||
if (cleanAll) {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
|
||||
sh 'rm -rf build'
|
||||
unstash 'build-enterprise-linux'
|
||||
echo "Running singleserver comunity mmfiles linux test"
|
||||
sh './Installation/Pipeline/test_singleserver_community_mmfiles_linux.sh 8'
|
||||
}
|
||||
},
|
||||
|
||||
'test-cluster-enterprise': {
|
||||
node('linux') {
|
||||
if (cleanAll) {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
|
||||
sh 'rm -rf build'
|
||||
unstash 'build-enterprise-linux'
|
||||
echo "Running cluster enterprise rocksdb linux test"
|
||||
sh './Installation/Pipeline/test_cluster_enterprise_rocksdb_linux.sh 8'
|
||||
}
|
||||
},
|
||||
|
||||
'jslint': {
|
||||
node('linux') {
|
||||
if (cleanAll) {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
|
||||
sh 'rm -rf build'
|
||||
unstash 'build-enterprise-linux'
|
||||
echo "Running jslint test"
|
||||
|
||||
script {
|
||||
try {
|
||||
sh './Installation/Pipeline/test_jslint.sh'
|
||||
}
|
||||
catch (exc) {
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
'build-mac': {
|
||||
node('mac') {
|
||||
if (cleanAll) {
|
||||
sh 'rm -rf *'
|
||||
} else if (cleanBuild) {
|
||||
sh 'rm -rf build-jenkins'
|
||||
}
|
||||
|
||||
unstash 'source'
|
||||
|
||||
sh './Installation/Pipeline/build_community_mac.sh 16'
|
||||
}
|
||||
},
|
||||
|
||||
'build-windows': {
|
||||
node('windows') {
|
||||
if (cleanAll) {
|
||||
bat 'del /F /Q *'
|
||||
} else if (cleanBuild) {
|
||||
bat 'del /F /Q build'
|
||||
}
|
||||
|
||||
unstash 'source'
|
||||
|
||||
PowerShell(". .\\Installation\\Pipeline\\build_enterprise_windows.ps1")
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
(
|
||||
mkdir -p build-jenkins
|
||||
cd build-jenkins
|
||||
|
||||
CXXFLAGS=-fno-omit-frame-pointer cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo -DUSE_MAINTAINER_MODE=On -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On ..
|
||||
|
||||
make -j$concurrency
|
||||
)
|
||||
|
||||
# copy binaries to preserve them
|
||||
rm -rf build
|
||||
mkdir build
|
||||
|
||||
cp -a build-jenkins/bin build
|
||||
cp -a build-jenkins/etc build
|
||||
cp -a build-jenkins/tests build
|
|
@ -0,0 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
(
|
||||
mkdir -p build-jenkins
|
||||
cd build-jenkins
|
||||
|
||||
CXXFLAGS=-fno-omit-frame-pointer cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo -DUSE_MAINTAINER_MODE=On -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On ..
|
||||
|
||||
make -j$concurrency
|
||||
)
|
||||
|
||||
# copy binaries to preserve them
|
||||
rm -rf build
|
||||
mkdir build
|
||||
|
||||
cp -a build-jenkins/bin build
|
||||
cp -a build-jenkins/etc build
|
||||
cp -a build-jenkins/tests build
|
|
@ -0,0 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
(
|
||||
mkdir -p build-jenkins
|
||||
cd build-jenkins
|
||||
|
||||
CXXFLAGS=-fno-omit-frame-pointer cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo -DUSE_MAINTAINER_MODE=On -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On -DUSE_ENTERPRISE=On ..
|
||||
|
||||
make -j$concurrency
|
||||
)
|
||||
|
||||
# copy binaries to preserve them
|
||||
rm -rf build
|
||||
mkdir build
|
||||
|
||||
cp -a build-jenkins/bin build
|
||||
cp -a build-jenkins/etc build
|
||||
cp -a build-jenkins/tests build
|
|
@ -0,0 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
(
|
||||
mkdir -p build-jenkins
|
||||
cd build-jenkins
|
||||
|
||||
CXXFLAGS=-fno-omit-frame-pointer cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo -DUSE_MAINTAINER_MODE=On -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On -DUSE_ENTERPRISE=On ..
|
||||
|
||||
make -j$concurrency
|
||||
)
|
||||
|
||||
# copy binaries to preserve them
|
||||
rm -rf build
|
||||
mkdir build
|
||||
|
||||
cp -a build-jenkins/bin build
|
||||
cp -a build-jenkins/etc build
|
||||
cp -a build-jenkins/tests build
|
|
@ -0,0 +1,12 @@
|
|||
$volume = "$env:WORKSPACE"
|
||||
$volume += ":C:\arangodb"
|
||||
$build = @'
|
||||
New-Item -ItemType Directory -Force -Path c:\arangodb\build
|
||||
cd c:\arangodb\build
|
||||
cmake .. -G "Visual Studio 14 2015 Win64" -DCMAKE_BUILD_TYPE=RelWithDebInfo -DOPENSSL_INCLUDE_DIR="$env:OPENSSL_INCLUDE_DIR" -DLIB_EAY_RELEASE="$env:LIB_EAY_RELEASE" -DSSL_EAY_RELEASE="$env:SSL_EAY_RELEASE" -DLIB_EAY_RELEASE_DLL="$env:LIB_EAY_RELEASE_DLL" -DSSL_EAY_RELEASE_DLL="$env:SSL_EAY_RELEASE_DLL" -DUSE_ENTERPRISE=On -DUSE_MAINTAINER_MODE=On -DCATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On
|
||||
cmake --build . --config RelWithDebInfo
|
||||
exit $LastExitCode
|
||||
'@
|
||||
$build > buildscript.ps1
|
||||
docker run --rm -v $volume m0ppers/build-container powershell C:\arangodb\buildscript.ps1
|
||||
exit $LastExitCode
|
|
@ -0,0 +1,30 @@
|
|||
#!/bin/bash
|
||||
|
||||
TIMEOUT=60 # in minutes
|
||||
|
||||
PORTDIR=/var/tmp/ports
|
||||
mkdir -p $PORTDIR
|
||||
|
||||
if test "$1" == "--clean"; then
|
||||
shift
|
||||
|
||||
while test $# -gt 0; do
|
||||
echo "freeing port $1"
|
||||
rm -f $PORTDIR/$1
|
||||
shift
|
||||
done
|
||||
|
||||
exit
|
||||
fi
|
||||
|
||||
port=5000
|
||||
INCR=200
|
||||
|
||||
find $PORTDIR -type f -cmin +$TIMEOUT -exec rm "{}" ";"
|
||||
|
||||
while ! ((set -o noclobber ; date > $PORTDIR/$port) 2> /dev/null); do
|
||||
sleep 1
|
||||
port=`expr $port + $INCR`
|
||||
done
|
||||
|
||||
echo $port
|
|
@ -0,0 +1,46 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
echo "ARANGOD VERSION: `build/bin/arangod --version`"
|
||||
echo "CORE PATTERN: `cat /proc/sys/kernel/core_pattern`"
|
||||
echo "CORE LIMIT: `ulimit -c`"
|
||||
|
||||
rm -rf core.* *.log out
|
||||
rm -rf tmp && mkdir tmp
|
||||
export TMPDIR=$(pwd)/tmp
|
||||
export TEMPDIR=$(pwd)/tmp
|
||||
|
||||
PORT01=`./Installation/Pipeline/port.sh`
|
||||
PORT02=`./Installation/Pipeline/port.sh`
|
||||
PORT03=`./Installation/Pipeline/port.sh`
|
||||
PORT04=`./Installation/Pipeline/port.sh`
|
||||
|
||||
trap "./Installation/Pipeline/port.sh --clean $PORT01 $PORT02 $PORT03 $PORT04" EXIT
|
||||
|
||||
# note that: shebang does not work if path contains a '@'
|
||||
|
||||
OPTS="--storageEngine rocksdb --skipNondeterministic true --skipTimeCritical true --configDir etc/jenkins --skipLogAnalysis true"
|
||||
|
||||
echo "
|
||||
scripts/unittest agency --minPort `expr $PORT01 + 0` --maxPort `expr $PORT01 + 39` $OPTS 2>&1
|
||||
scripts/unittest arangobench --minPort `expr $PORT01 + 40` --maxPort `expr $PORT01 + 79` $OPTS --cluster true 2>&1
|
||||
scripts/unittest arangosh --minPort `expr $PORT01 + 80` --maxPort `expr $PORT01 + 119` $OPTS --cluster true --skipShebang true 2>&1
|
||||
scripts/unittest authentication --minPort `expr $PORT01 + 120` --maxPort `expr $PORT01 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest authentication_parameters --minPort `expr $PORT01 + 160` --maxPort `expr $PORT01 + 199` $OPTS --cluster true 2>&1
|
||||
scripts/unittest config --minPort `expr $PORT02 + 0` --maxPort `expr $PORT02 + 39` $OPTS --cluster true 2>&1
|
||||
scripts/unittest dfdb --minPort `expr $PORT02 + 40` --maxPort `expr $PORT02 + 79` $OPTS --cluster true 2>&1
|
||||
scripts/unittest dump --minPort `expr $PORT02 + 80` --maxPort `expr $PORT02 + 119` $OPTS --cluster true 2>&1
|
||||
scripts/unittest dump_authentication --minPort `expr $PORT02 + 120` --maxPort `expr $PORT02 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest endpoints --minPort `expr $PORT02 + 160` --maxPort `expr $PORT02 + 199` $OPTS --cluster true 2>&1
|
||||
scripts/unittest http_server --minPort `expr $PORT03 + 0` --maxPort `expr $PORT03 + 39` $OPTS --cluster true 2>&1
|
||||
scripts/unittest server_http --minPort `expr $PORT03 + 40` --maxPort `expr $PORT03 + 79` $OPTS --cluster true 2>&1
|
||||
scripts/unittest shell_client --minPort `expr $PORT03 + 80` --maxPort `expr $PORT03 + 119` $OPTS --cluster true 2>&1
|
||||
scripts/unittest shell_server --minPort `expr $PORT03 + 120` --maxPort `expr $PORT03 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 160` --maxPort `expr $PORT03 + 199` $OPTS --cluster true --testBuckets 4/0 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 0` --maxPort `expr $PORT04 + 39` $OPTS --cluster true --testBuckets 4/1 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 40` --maxPort `expr $PORT04 + 79` $OPTS --cluster true --testBuckets 4/2 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 80` --maxPort `expr $PORT04 + 119` $OPTS --cluster true --testBuckets 4/3 2>&1
|
||||
scripts/unittest ssl_server --minPort `expr $PORT04 + 120` --maxPort `expr $PORT04 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest upgrade --minPort `expr $PORT04 + 160` --maxPort `expr $PORT04 + 199` $OPTS --cluster true 2>&1
|
||||
" | parallel --no-notice --load 10 --jobs $concurrency
|
|
@ -0,0 +1,55 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
echo "ARANGOD VERSION: `build/bin/arangod --version`"
|
||||
echo "CORE PATTERN: `cat /proc/sys/kernel/core_pattern`"
|
||||
echo "CORE LIMIT: `ulimit -c`"
|
||||
|
||||
rm -rf core.* *.log out
|
||||
rm -rf tmp && mkdir tmp
|
||||
export TMPDIR=$(pwd)/tmp
|
||||
export TEMPDIR=$(pwd)/tmp
|
||||
|
||||
PORT01=`./Installation/Pipeline/port.sh`
|
||||
PORT02=`./Installation/Pipeline/port.sh`
|
||||
PORT03=`./Installation/Pipeline/port.sh`
|
||||
PORT04=`./Installation/Pipeline/port.sh`
|
||||
PORT05=`./Installation/Pipeline/port.sh`
|
||||
|
||||
trap "./Installation/Pipeline/port.sh --clean $PORT01 $PORT02 $PORT03 $PORT04 $PORT05" EXIT
|
||||
|
||||
PORTLDAP=`expr $PORT05 + 199`
|
||||
|
||||
docker rm -f ldap-$JOB_BASE_NAME || echo
|
||||
mkdir -p ldap
|
||||
docker pull arangodb/openldap-test-container:jessie
|
||||
docker run -d -e LDAP_CERT_CN=127.0.0.1 --rm -p $PORTLDAP:389 -v $(pwd)/ldap:/cert --name ldap-$PORTLDAP arangodb/openldap-test-container:jessie
|
||||
|
||||
# note that: shebang does not work if path contains a '@'
|
||||
|
||||
OPTS="--storageEngine rocksdb --skipNondeterministic true --skipTimeCritical true --configDir etc/jenkins --skipLogAnalysis true"
|
||||
|
||||
echo "
|
||||
scripts/unittest agency --minPort `expr $PORT01 + 0` --maxPort `expr $PORT01 + 39` $OPTS 2>&1
|
||||
scripts/unittest arangobench --minPort `expr $PORT01 + 40` --maxPort `expr $PORT01 + 79` $OPTS --cluster true 2>&1
|
||||
scripts/unittest arangosh --minPort `expr $PORT01 + 80` --maxPort `expr $PORT01 + 119` $OPTS --cluster true --skipShebang true 2>&1
|
||||
scripts/unittest authentication --minPort `expr $PORT01 + 120` --maxPort `expr $PORT01 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest authentication_parameters --minPort `expr $PORT01 + 160` --maxPort `expr $PORT01 + 199` $OPTS --cluster true 2>&1
|
||||
scripts/unittest config --minPort `expr $PORT02 + 0` --maxPort `expr $PORT02 + 39` $OPTS --cluster true 2>&1
|
||||
scripts/unittest dfdb --minPort `expr $PORT02 + 40` --maxPort `expr $PORT02 + 79` $OPTS --cluster true 2>&1
|
||||
scripts/unittest dump --minPort `expr $PORT02 + 80` --maxPort `expr $PORT02 + 119` $OPTS --cluster true 2>&1
|
||||
scripts/unittest dump_authentication --minPort `expr $PORT02 + 120` --maxPort `expr $PORT02 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest endpoints --minPort `expr $PORT02 + 160` --maxPort `expr $PORT02 + 199` $OPTS --cluster true 2>&1
|
||||
scripts/unittest http_server --minPort `expr $PORT03 + 0` --maxPort `expr $PORT03 + 39` $OPTS --cluster true 2>&1
|
||||
scripts/unittest ldap --minPort `expr $PORT03 + 40` --maxPort `expr $PORT03 + 79` $OPTS --cluster true --caCertFilePath $(pwd)/ldap/ca_server.pem --ldapHost 127.0.0.1 --ldapPort $PORTLDAP 2>&1
|
||||
scripts/unittest server_http --minPort `expr $PORT03 + 80` --maxPort `expr $PORT03 + 119` $OPTS --cluster true 2>&1
|
||||
scripts/unittest shell_client --minPort `expr $PORT03 + 120` --maxPort `expr $PORT03 + 159` $OPTS --cluster true 2>&1
|
||||
scripts/unittest shell_server --minPort `expr $PORT03 + 160` --maxPort `expr $PORT03 + 199` $OPTS --cluster true 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 0` --maxPort `expr $PORT04 + 39` $OPTS --cluster true --testBuckets 4/0 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 40` --maxPort `expr $PORT04 + 79` $OPTS --cluster true --testBuckets 4/1 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 80` --maxPort `expr $PORT04 + 119` $OPTS --cluster true --testBuckets 4/2 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT04 + 120` --maxPort `expr $PORT04 + 159` $OPTS --cluster true --testBuckets 4/3 2>&1
|
||||
scripts/unittest ssl_server --minPort `expr $PORT04 + 160` --maxPort `expr $PORT04 + 199` $OPTS --cluster true 2>&1
|
||||
scripts/unittest upgrade --minPort `expr $PORT05 + 0` --maxPort `expr $PORT05 + 39` $OPTS --cluster true 2>&1
|
||||
" | parallel --no-notice --load 10 --jobs $concurrency
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
./utils/jslint.sh
|
|
@ -0,0 +1,52 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
echo "ARANGOD VERSION: `build/bin/arangod --version`"
|
||||
echo "CORE PATTERN: `cat /proc/sys/kernel/core_pattern`"
|
||||
echo "CORE LIMIT: `ulimit -c`"
|
||||
|
||||
rm -rf core.* *.log out
|
||||
rm -rf tmp && mkdir tmp
|
||||
export TMPDIR=$(pwd)/tmp
|
||||
export TEMPDIR=$(pwd)/tmp
|
||||
|
||||
PORT01=`./Installation/Pipeline/port.sh`
|
||||
PORT02=`./Installation/Pipeline/port.sh`
|
||||
PORT03=`./Installation/Pipeline/port.sh`
|
||||
|
||||
trap "./Installation/Pipeline/port.sh --clean $PORT01 $PORT02 $PORT03" EXIT
|
||||
|
||||
# note that: shebang does not work if path contains a '@'
|
||||
|
||||
OPTS="--storageEngine mmfiles --skipNondeterministic true --skipTimeCritical true --configDir etc/jenkins --skipLogAnalysis true"
|
||||
|
||||
echo "
|
||||
scripts/unittest boost --skipCache false 2>&1
|
||||
scripts/unittest agency --minPort `expr $PORT01 + 0` --maxPort `expr $PORT01 + 9` $OPTS 2>&1
|
||||
scripts/unittest arangobench --minPort `expr $PORT01 + 10` --maxPort `expr $PORT01 + 19` $OPTS 2>&1
|
||||
scripts/unittest arangosh --minPort `expr $PORT01 + 20` --maxPort `expr $PORT01 + 29` $OPTS --skipShebang true 2>&1
|
||||
scripts/unittest authentication --minPort `expr $PORT01 + 30` --maxPort `expr $PORT01 + 39` $OPTS 2>&1
|
||||
scripts/unittest authentication_parameters --minPort `expr $PORT01 + 40` --maxPort `expr $PORT01 + 49` $OPTS 2>&1
|
||||
scripts/unittest cluster_sync --minPort `expr $PORT01 + 50` --maxPort `expr $PORT01 + 59` $OPTS 2>&1
|
||||
scripts/unittest config --minPort `expr $PORT01 + 60` --maxPort `expr $PORT01 + 69` $OPTS 2>&1
|
||||
scripts/unittest dfdb --minPort `expr $PORT01 + 70` --maxPort `expr $PORT01 + 79` $OPTS 2>&1
|
||||
scripts/unittest dump --minPort `expr $PORT01 + 80` --maxPort `expr $PORT01 + 89` $OPTS 2>&1
|
||||
scripts/unittest dump_authentication --minPort `expr $PORT01 + 90` --maxPort `expr $PORT01 + 99` $OPTS 2>&1
|
||||
scripts/unittest endpoints --minPort `expr $PORT02 + 100` --maxPort `expr $PORT02 + 109` $OPTS 2>&1
|
||||
scripts/unittest http_replication --minPort `expr $PORT02 + 110` --maxPort `expr $PORT02 + 119` $OPTS 2>&1
|
||||
scripts/unittest http_server --minPort `expr $PORT02 + 120` --maxPort `expr $PORT02 + 129` $OPTS 2>&1
|
||||
scripts/unittest replication_ongoing --minPort `expr $PORT02 + 130` --maxPort `expr $PORT02 + 139` $OPTS 2>&1
|
||||
scripts/unittest replication_static --minPort `expr $PORT02 + 140` --maxPort `expr $PORT02 + 149` $OPTS 2>&1
|
||||
scripts/unittest replication_sync --minPort `expr $PORT02 + 150` --maxPort `expr $PORT02 + 159` $OPTS 2>&1
|
||||
scripts/unittest server_http --minPort `expr $PORT02 + 160` --maxPort `expr $PORT02 + 169` $OPTS 2>&1
|
||||
scripts/unittest shell_client --minPort `expr $PORT02 + 170` --maxPort `expr $PORT02 + 179` $OPTS 2>&1
|
||||
scripts/unittest shell_replication --minPort `expr $PORT02 + 180` --maxPort `expr $PORT02 + 189` $OPTS 2>&1
|
||||
scripts/unittest shell_server --minPort `expr $PORT02 + 190` --maxPort `expr $PORT02 + 199` $OPTS 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 0` --maxPort `expr $PORT03 + 9` $OPTS --testBuckets 4/0 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 10` --maxPort `expr $PORT03 + 19` $OPTS --testBuckets 4/1 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 20` --maxPort `expr $PORT03 + 29` $OPTS --testBuckets 4/2 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 30` --maxPort `expr $PORT03 + 39` $OPTS --testBuckets 4/3 2>&1
|
||||
scripts/unittest ssl_server --minPort `expr $PORT03 + 40` --maxPort `expr $PORT03 + 49` $OPTS 2>&1
|
||||
scripts/unittest upgrade --minPort `expr $PORT03 + 50` --maxPort `expr $PORT03 + 59` $OPTS 2>&1
|
||||
" | parallel --no-notice --load 10 --jobs $concurrency
|
|
@ -0,0 +1,52 @@
|
|||
#!/bin/bash
|
||||
|
||||
concurrency=$1
|
||||
|
||||
echo "ARANGOD VERSION: `build/bin/arangod --version`"
|
||||
echo "CORE PATTERN: `cat /proc/sys/kernel/core_pattern`"
|
||||
echo "CORE LIMIT: `ulimit -c`"
|
||||
|
||||
rm -rf core.* *.log out
|
||||
rm -rf tmp && mkdir tmp
|
||||
export TMPDIR=$(pwd)/tmp
|
||||
export TEMPDIR=$(pwd)/tmp
|
||||
|
||||
PORT01=`./Installation/Pipeline/port.sh`
|
||||
PORT02=`./Installation/Pipeline/port.sh`
|
||||
PORT03=`./Installation/Pipeline/port.sh`
|
||||
|
||||
trap "./Installation/Pipeline/port.sh --clean $PORT01 $PORT02 $PORT03" EXIT
|
||||
|
||||
# note that: shebang does not work if path contains a '@'
|
||||
|
||||
OPTS="--storageEngine rocksdb --skipNondeterministic true --skipTimeCritical true --configDir etc/jenkins --skipLogAnalysis true"
|
||||
|
||||
echo "
|
||||
scripts/unittest boost --skipCache false 2>&1
|
||||
scripts/unittest agency --minPort `expr $PORT01 + 0` --maxPort `expr $PORT01 + 9` $OPTS 2>&1
|
||||
scripts/unittest arangobench --minPort `expr $PORT01 + 10` --maxPort `expr $PORT01 + 19` $OPTS 2>&1
|
||||
scripts/unittest arangosh --minPort `expr $PORT01 + 20` --maxPort `expr $PORT01 + 29` $OPTS --skipShebang true 2>&1
|
||||
scripts/unittest authentication --minPort `expr $PORT01 + 30` --maxPort `expr $PORT01 + 39` $OPTS 2>&1
|
||||
scripts/unittest authentication_parameters --minPort `expr $PORT01 + 40` --maxPort `expr $PORT01 + 49` $OPTS 2>&1
|
||||
scripts/unittest cluster_sync --minPort `expr $PORT01 + 50` --maxPort `expr $PORT01 + 59` $OPTS 2>&1
|
||||
scripts/unittest config --minPort `expr $PORT01 + 60` --maxPort `expr $PORT01 + 69` $OPTS 2>&1
|
||||
scripts/unittest dfdb --minPort `expr $PORT01 + 70` --maxPort `expr $PORT01 + 79` $OPTS 2>&1
|
||||
scripts/unittest dump --minPort `expr $PORT01 + 80` --maxPort `expr $PORT01 + 89` $OPTS 2>&1
|
||||
scripts/unittest dump_authentication --minPort `expr $PORT01 + 90` --maxPort `expr $PORT01 + 99` $OPTS 2>&1
|
||||
scripts/unittest endpoints --minPort `expr $PORT02 + 100` --maxPort `expr $PORT02 + 109` $OPTS 2>&1
|
||||
scripts/unittest http_replication --minPort `expr $PORT02 + 110` --maxPort `expr $PORT02 + 119` $OPTS 2>&1
|
||||
scripts/unittest http_server --minPort `expr $PORT02 + 120` --maxPort `expr $PORT02 + 129` $OPTS 2>&1
|
||||
scripts/unittest replication_ongoing --minPort `expr $PORT02 + 130` --maxPort `expr $PORT02 + 139` $OPTS 2>&1
|
||||
scripts/unittest replication_static --minPort `expr $PORT02 + 140` --maxPort `expr $PORT02 + 149` $OPTS 2>&1
|
||||
scripts/unittest replication_sync --minPort `expr $PORT02 + 150` --maxPort `expr $PORT02 + 159` $OPTS 2>&1
|
||||
scripts/unittest server_http --minPort `expr $PORT02 + 160` --maxPort `expr $PORT02 + 169` $OPTS 2>&1
|
||||
scripts/unittest shell_client --minPort `expr $PORT02 + 170` --maxPort `expr $PORT02 + 179` $OPTS 2>&1
|
||||
scripts/unittest shell_replication --minPort `expr $PORT02 + 180` --maxPort `expr $PORT02 + 189` $OPTS 2>&1
|
||||
scripts/unittest shell_server --minPort `expr $PORT02 + 190` --maxPort `expr $PORT02 + 199` $OPTS 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 0` --maxPort `expr $PORT03 + 9` $OPTS --testBuckets 4/0 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 10` --maxPort `expr $PORT03 + 19` $OPTS --testBuckets 4/1 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 20` --maxPort `expr $PORT03 + 29` $OPTS --testBuckets 4/2 2>&1
|
||||
scripts/unittest shell_server_aql --minPort `expr $PORT03 + 30` --maxPort `expr $PORT03 + 39` $OPTS --testBuckets 4/3 2>&1
|
||||
scripts/unittest ssl_server --minPort `expr $PORT03 + 40` --maxPort `expr $PORT03 + 49` $OPTS 2>&1
|
||||
scripts/unittest upgrade --minPort `expr $PORT03 + 50` --maxPort `expr $PORT03 + 59` $OPTS 2>&1
|
||||
" | parallel --no-notice --load 10 --jobs $concurrency
|
Loading…
Reference in New Issue