1
0
Fork 0

Merge with latest changes in improve-jenkins

stashing should work better now
This commit is contained in:
Andreas Streichardt 2017-08-31 17:53:48 +02:00
parent e2fdab431c
commit 7d2ab6055f
39 changed files with 192 additions and 423 deletions

View File

@ -59,8 +59,6 @@ set(ENTERPRISE_INCLUDE_DIR "enterprise")
option(USE_ENTERPRISE "enable enterprise build" OFF)
if (USE_ENTERPRISE)
add_definitions("-DUSE_ENTERPRISE=1")
include_directories(${ENTERPRISE_INCLUDE_DIR})
add_subdirectory(enterprise)
endif ()

View File

@ -326,11 +326,35 @@ Restrictions: ${restrictions.keySet().join(", ")}
// -----------------------------------------------------------------------------
def stashBinaries(os, edition) {
stash name: "binaries-${os}-${edition}", includes: "build/bin/**, build/tests/**, build/etc/**, etc/**, Installation/Pipeline/**, js/**, scripts/**, UnitTests/**, utils/**, resilience/**, enterprise/js/**", excludes: "build/bin/*.exe, build/bin/*.pdb, build/bin/*.ilk, build/tests/*.exe, build/tests/*.pdb, build/tests/*.ilk, js/node/node_modules/eslint*"
def paths = ["build/etc", "etc", "Installation/Pipeline", "js", "scripts", "UnitTests"]
if (os == "windows") {
paths << "build/bin/RelWithDebInfo"
paths << "build/tests/RelWithDebInfo"
// so frustrating...compress-archive is built in but it simply won't include the relative path to the archive :(
// powershell "Compress-Archive -Force -Path (Get-ChildItem -Recurse -Path " + paths.join(',') + ") -DestinationPath stash.zip -Confirm -CompressionLevel Fastest"
// install 7z portable (https://chocolatey.org/packages/7zip.portable)
powershell "7z a stash.zip -r -bd -mx=1 " + paths.join(" ")
// this is a super mega mess...scp will run as the system user and not as jenkins when run as a server
// I couldn't figure out how to properly get it running for hours...so last resort was to install putty
powershell "echo 'y' | pscp -i C:\\Users\\Jenkins\\.ssh\\putty-jenkins.ppk stash.zip jenkins@c1:/vol/cache/binaries-${env.BUILD_TAG}-${os}-${edition}.zip"
} else {
paths << "build/bin/"
paths << "build/tests/"
sh "GZIP=-1 tar cpzf stash.tar.gz " + paths.join(" ")
sh "scp stash.tar.gz c1:/vol/cache/binaries-${env.BUILD_TAG}-${os}-${edition}.tar.gz"
}
}
def unstashBinaries(os, edition) {
unstash name: "binaries-${os}-${edition}"
if (os == "windows") {
powershell "echo 'y' | pscp -i C:\\Users\\Jenkins\\.ssh\\putty-jenkins.ppk jenkins@c1:/vol/cache/binaries-${env.BUILD_TAG}-${os}-${edition}.zip stash.zip"
powershell "Expand-Archive -Path stash.zip -Force -DestinationPath ."
} else {
sh "scp c1:/vol/cache/binaries-${env.BUILD_TAG}-${os}-${edition}.tar.gz stash.tar.gz"
sh "tar xpzf stash.tar.gz"
}
}
// -----------------------------------------------------------------------------
@ -432,7 +456,7 @@ def getTests(os, edition, mode, engine) {
}
}
def testEdition(os, edition, mode, engine, port) {
def executeTests(os, edition, mode, engine, port) {
def arch = "LOG_test_${os}_${edition}_${mode}_${engine}"
if (os == 'linux' || os == 'mac') {
@ -464,17 +488,19 @@ def testEdition(os, edition, mode, engine, port) {
}
testMap["test-${os}-${edition}-${mode}-${engine}-${name}"] = {
// copy in groovy
testArgs += " --minPort " + port
testArgs += " --maxPort " + (port + portInterval - 1)
def command = "build/bin/arangosh --log.level warning --javascript.execute UnitTests/unittest.js ${test} -- "
command += testArgs
lock("test-${env.NODE_NAME}-${env.JOB_NAME}-${env.BUILD_ID}-${edition}-${engine}-${lockIndex}") {
timeout(15) {
if (os == "windows") {
powershell command
} else {
sh command
timeout(30) {
def tmpDir = pwd() + "/tmp"
withEnv(["TMPDIR=${tmpDir}", "TEMPDIR=${tmpDir}"]) {
if (os == "windows") {
powershell command
} else {
sh command
}
}
}
}
@ -540,23 +566,21 @@ def testStep(os, edition, mode, engine) {
def buildName = "${os}-${edition}"
def name = "${os}-${edition}-${mode}-${engine}"
stage("test-${name}") {
// seriously...60 minutes is the super absolute max max max.
// even in the worst situations ArangoDB MUST be able to finish within 60 minutes
// even if the features are green this is completely broken performance wise..
// DO NOT INCREASE!!
def port = 0
fileOperations([folderDeleteOperation('tmp'), folderDeleteOperation('build/bin'), folderDeleteOperation('js'), folderDeleteOperation('out'), folderCreateOperation('tmp'), fileDeleteOperation(excludes: '', includes: 'core.*,*.dmp')])
unstashBinaries(os, edition)
def port = 0
port = getStartPort(os) as Integer
echo "Using start port: ${port}"
if (os == "windows") {
powershell "copy build\\bin\\RelWithDebInfo\\* build\\bin"
powershell "Installation/Pipeline/include/test_setup_tmp.ps1"
} else {
sh "chmod +x Installation/Pipeline/include/test_setup_tmp.inc && sh Installation/Pipeline/include/test_setup_tmp.inc"
}
// seriously...60 minutes is the super absolute max max max.
// even in the worst situations ArangoDB MUST be able to finish within 60 minutes
// even if the features are green this is completely broken performance wise..
// DO NOT INCREASE!!
timeout(60) {
try {
testEdition(os, edition, mode, engine, port)
executeTests(os, edition, mode, engine, port)
}
finally {
def arch = "LOG_test_${os}_${edition}_${mode}_${engine}"
@ -590,19 +614,15 @@ def testStep(os, edition, mode, engine) {
}
}
def testStepParallel(osList, editionList, modeList) {
def testStepParallel(os, edition, modeList) {
def branches = [:]
for (os in osList) {
for (edition in editionList) {
for (mode in modeList) {
for (engine in ['mmfiles', 'rocksdb']) {
if (testCheck(os, edition, mode, engine)) {
def name = "test-${os}-${edition}-${mode}-${engine}";
for (mode in modeList) {
for (engine in ['mmfiles', 'rocksdb']) {
if (testCheck(os, edition, mode, engine)) {
def name = "test-${os}-${edition}-${mode}-${engine}";
branches[name] = testStep(os, edition, mode, engine)
}
}
branches[name] = testStep(os, edition, mode, engine)
}
}
}
@ -740,42 +760,24 @@ def testStepParallel(osList, editionList, modeList) {
def buildEdition(os, edition) {
def arch = "LOG_build_${os}_${edition}"
if (os == 'linux' || os == 'mac') {
sh "rm -rf ${arch}"
sh "mkdir -p ${arch}"
fileOperations([folderDeleteOperation(arch), folderCreateOperation(arch)])
if (os == 'linux') {
sh "./Installation/Pipeline/linux/build_${os}_${edition}.sh 64"
}
else if (os == 'mac') {
sh "./Installation/Pipeline/mac/build_${os}_${edition}.sh 16"
}
else if (os == 'windows') {
bat "del /F /Q ${arch}"
powershell "New-Item -ItemType Directory -Force -Path ${arch}"
}
try {
if (os == 'linux') {
sh "./Installation/Pipeline/linux/build_${os}_${edition}.sh 64"
}
else if (os == 'mac') {
sh "./Installation/Pipeline/mac/build_${os}_${edition}.sh 16"
}
else if (os == 'windows') {
// i concede...we need a lock for windows...I could not get it to run concurrently...
// v8 would not build multiple times at the same time on the same machine:
// PDB API call failed, error code '24': ' etc etc
// in theory it should be possible to parallelize it by setting an environment variable (see the build script) but for v8 it won't work :(
// feel free to recheck if there is time somewhen...this thing here really should not be possible but
// ensure that there are 2 concurrent builds on the SAME node building v8 at the same time to properly test it
// I just don't want any more "yeah that might randomly fail. just restart" sentences any more
def hostname = powershell(returnStdout: true, script: "hostname")
lock('build-${hostname}') {
powershell ". .\\Installation\\Pipeline\\windows\\build_${os}_${edition}.ps1"
}
}
}
finally {
if (os == 'linux' || os == 'mac') {
sh "for i in log-output; do test -e \"\$i\" && mv \"\$i\" ${arch} || true; done"
}
else if (os == 'windows') {
powershell "Move-Item -ErrorAction Ignore -Path log-output/* -Destination ${arch}"
// i concede...we need a lock for windows...I could not get it to run concurrently...
// v8 would not build multiple times at the same time on the same machine:
// PDB API call failed, error code '24': ' etc etc
// in theory it should be possible to parallelize it by setting an environment variable (see the build script) but for v8 it won't work :(
// feel free to recheck if there is time somewhen...this thing here really should not be possible but
// ensure that there are 2 concurrent builds on the SAME node building v8 at the same time to properly test it
// I just don't want any more "yeah that might randomly fail. just restart" sentences any more
def hostname = powershell(returnStdout: true, script: "hostname")
lock('build-${hostname}') {
powershell ". .\\Installation\\Pipeline\\windows\\build_${os}_${edition}.ps1"
}
}
}
@ -825,19 +827,21 @@ def runEdition(os, edition) {
timeout(90) {
buildEdition(os, edition)
// we only need one jslint test per edition
if (os == "linux") {
stage("jslint-${edition}") {
echo "Running jslint for ${edition}"
jslint()
}
}
stashBinaries(os, edition)
}
// we only need one jslint test per edition
if (os == "linux") {
stage("jslint-${edition}") {
echo "Running jslint for ${edition}"
jslint()
}
}
}
}
testStepParallel([os], [edition], ['cluster', 'singleserver'])
testStepParallel(os, edition, ['cluster', 'singleserver'])
}
}

View File

@ -6,6 +6,7 @@ ENTERPRISE=""
type="build"
if [ "$edition" == community ]; then
ENTERPRISE="-DUSE_ENTERPRISE=Off"
type="${type}_community"
elif [ "$edition" == enterprise ]; then
type="${type}_enterprise"
@ -30,30 +31,15 @@ echo "CONCURRENY: $concurrency"
echo "HOST: `hostname`"
echo "PWD: `pwd`"
mkdir -p build-$edition
if [ ! -f build-$edition/location ]; then
if [ "$os" == mac ]; then
(ls -l && echo "$edition $os") | md5 | awk '{print $1}' > build-$edition/location
else
(ls -l && echo "$edition $os") | md5sum | awk '{print $1}' > build-$edition/location
fi
fi
GENPATH="/tmp/`cat build-$edition/location`"
rm -f $GENPATH
ln -s `pwd` $GENPATH
cd $GENPATH
echo "GENPATH: `pwd`"
mkdir -p build
rm -rf log-output
mkdir -p log-output
touch log-output/build.log
(
cd build-$edition
set -eo pipefail
cd build
echo "`date +%T` configuring..."
CXXFLAGS=-fno-omit-frame-pointer \
@ -64,41 +50,10 @@ touch log-output/build.log
-DUSE_FAILURE_TESTS=On \
-DDEBUG_SYNC_REPLICATION=On \
$ENTERPRISE \
.. >> ../log-output/build.log 2>&1
if [ "$?" != 0 ]; then
if fgrep 'Re-run cmake with a different source directory' ../log-output/build.log; then
mv location ..
rm -rf *
mv ../location .
CXXFLAGS=-fno-omit-frame-pointer \
cmake \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DUSE_MAINTAINER_MODE=On \
-DUSE_CATCH_TESTS=On \
-DUSE_FAILURE_TESTS=On \
-DDEBUG_SYNC_REPLICATION=On \
$ENTERPRISE \
.. >> ../log-output/build.log 2>&1 || exit 1
else
exit 1
fi
fi
.. 2>&1 | tee ../log-output/build.log
echo "`date +%T` building..."
make -j $concurrency -l $load >> ../log-output/build.log 2>&1 || exit 1
make -j $concurrency -l $load 2>&1 | tee -a ../log-output/build.log
) || exit 1
# copy binaries to preserve them
echo "`date +%T` copying..."
rm -rf build
mkdir -p build/tests
cp -a build-$edition/bin build
cp -a build-$edition/etc build
cp -a build-$edition/tests/arangodbtests build/tests
cp -a build-$edition/tests/icudtl.dat build/tests
echo "`date +%T` done..."
echo "`date +%T` done..."

View File

@ -149,6 +149,6 @@ function createTests {
}
function RunTests {
Param ([int]$port, [string]$engine, [string]$edition, [string]$mode)
$jobs = createTests -port $port -engine mmfiles -edition community -mode singleserver
$jobs = createTests -port $port -engine $engine -edition $edition -mode $mode
executeParallel -jobs $jobs -parallelity 4
}

View File

@ -1,9 +0,0 @@
rm -rf core.* *.log out
rm -rf tmp && mkdir tmp
export TMPDIR=$(pwd)/tmp
export TEMPDIR=$(pwd)/tmp
rm -rf "log-output"
mkdir -p "log-output"
echo "TMPDIR: $TMPDIR"

View File

@ -1,9 +0,0 @@
New-Item -ItemType Directory -Force -Path log-output
New-Item -ItemType Directory -Force -Path tmp
$tmp=(Get-Location).path
$env:TEMP="$tmp\tmp"
$env:TMP="$tmp\tmp"
$env:TMPDIR="$tmp\tmp"
$env:TEMPDIR="$tmp\tmp"

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community mmfiles linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community rocksdb linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise mmfiles linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise rocksdb linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community mmfiles linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community rocksdb linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise mmfiles linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise rocksdb linux

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community mmfiles mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster community rocksdb mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise mmfiles mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" cluster enterprise rocksdb mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community mmfiles mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver community rocksdb mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise mmfiles mac

View File

@ -1,3 +0,0 @@
#!/bin/bash
. ./Installation/Pipeline/include/test_MODE_EDITION_ENGINE_OS.inc "$1" singleserver enterprise rocksdb mac

View File

@ -1,15 +1,15 @@
New-Item -ItemType Directory -Force -Path C:\ports
New-Item -ItemType Directory -Force -Path C:\ports | Out-Null
$timeLimit = (Get-Date).AddMinutes(-480)
Get-ChildItem C:\ports | ? { $_.LastWriteTime -lt $timeLimit } | Remove-Item
$timeLimit = (Get-Date).AddMinutes(-100)
Get-ChildItem C:\ports | ? { $_.LastWriteTime -lt $timeLimit } | Remove-Item -ErrorAction Ignore | Out-Null
$port = 15000
$port = 10000
$portIncrement = 2000
$port = $port - $portIncrement
do {
$port = $port + $portIncrement
$portFile = "C:\ports\$port"
}
until (New-Item -ItemType File -Path $portFile -ErrorAction SilentlyContinue)
until (New-Item -ItemType File -Path $portFile -ErrorAction Ignore)
Write-Output $port

View File

@ -2,8 +2,6 @@
type="test_jslint"
. ./Installation/Pipeline/include/test_setup_tmp.inc
echo "`date +%T` jslinting..."
./utils/jslint.sh
result=$?

View File

@ -1,5 +1,7 @@
$ErrorActionPreference="Stop"
$buildOptions = "-DUSE_MAINTAINER_MODE=On -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On -DCMAKE_BUILD_TYPE=RelWithDebInfo -DSKIP_PACKAGING=On"
$vcpath=$(Get-ItemProperty HKLM:\SOFTWARE\Wow6432Node\Microsoft\VisualStudio\SxS\VC7)."14.0"
#$env:_MSPDBSRV_ENDPOINT_="community-${env:BUILD_TAG}"
$buildOptions = "-DUSE_MAINTAINER_MODE=On -DUSE_ENTERPRISE=Off -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On -DCMAKE_BUILD_TYPE=RelWithDebInfo -DSKIP_PACKAGING=On"
Remove-Item -Force -Recurse log-output -ErrorAction SilentlyContinue
New-Item -Force -ItemType Directory log-output -ErrorAction SilentlyContinue
if (Get-Command docker -errorAction SilentlyContinue) {
@ -18,10 +20,10 @@ exit $LastExitCode
docker run --rm -v $volume m0ppers/build-container powershell C:\arangodb\buildscript.ps1 | Set-Content -PassThru log-output\build.log
} else {
$env:GYP_MSVS_OVERRIDE_PATH='C:\Program Files (x86)\Microsoft Visual Studio\Shared\14.0\VC\bin'
$env:GYP_MSVS_OVERRIDE_PATH="${vcpath}\bin"
New-Item -ItemType Directory -Force -Path build
cd build
Invoke-Expression "cmake .. -G `"Visual Studio 15 2017 Win64`" ${buildOptions} | Set-Content -PassThru ..\log-output\build.log"
cmake --build . --config RelWithDebInfo | Add-Content -PassThru ..\log-output\build.log
cd ..
}
}

View File

@ -1,4 +1,6 @@
$ErrorActionPreference="Stop"
$vcpath=$(Get-ItemProperty HKLM:\SOFTWARE\Wow6432Node\Microsoft\VisualStudio\SxS\VC7)."14.0"
#$env:_MSPDBSRV_ENDPOINT_="enterprise-${env:BUILD_TAG}"
$buildOptions = "-DUSE_MAINTAINER_MODE=On -DUSE_ENTERPRISE=On -DUSE_CATCH_TESTS=On -DUSE_FAILURE_TESTS=On -DDEBUG_SYNC_REPLICATION=On -DCMAKE_BUILD_TYPE=RelWithDebInfo -DSKIP_PACKAGING=On"
Remove-Item -Force -Recurse log-output -ErrorAction SilentlyContinue
New-Item -Force -ItemType Directory log-output -ErrorAction SilentlyContinue
@ -18,7 +20,7 @@ exit $LastExitCode
docker run --rm -v $volume m0ppers/build-container powershell C:\arangodb\buildscript.ps1 | Set-Content -PassThru log-output\build.log
} else {
$env:GYP_MSVS_OVERRIDE_PATH='C:\Program Files (x86)\Microsoft Visual Studio\Shared\14.0\VC\bin'
$env:GYP_MSVS_OVERRIDE_PATH="${vcpath}\bin"
New-Item -ItemType Directory -Force -Path build
cd build
Invoke-Expression "cmake .. -G `"Visual Studio 15 2017 Win64`" ${buildOptions} | Set-Content -PassThru ..\log-output\build.log"

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine mmfiles -edition community -mode cluster
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine rocksdb -edition community -mode cluster
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine mmfiles -edition enterprise -mode cluster
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine rocksdb -edition enterprise -mode cluster
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine mmfiles -edition community -mode singleserver
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine rocksdb -edition community -mode singleserver
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine mmfiles -edition enterprise -mode singleserver
del $portFile
exit $result

View File

@ -1,14 +0,0 @@
$ErrorActionPreference="Stop"
. Installation\Pipeline\include\test_setup_tmp.ps1
. Installation\Pipeline\port.ps1
. Installation\Pipeline\include\test_MODE_EDITION_ENGINE_windows.ps1
Copy-Item -force .\build\bin\RelWithDebInfo\* .\build\bin\
Copy-Item -force .\build\tests\RelWithDebInfo\* .\build\tests\
$result = RunTests -port $port -engine rocksdb -edition enterprise -mode singleserver
del $portFile
exit $result

View File

@ -1,58 +1,57 @@
/*jshint globalstrict:false, unused:false */
/*global print, start_pretty_print, ARGUMENTS */
/* jshint globalstrict:false, unused:false */
/* global print, start_pretty_print, ARGUMENTS */
'use strict';
const yaml = require("js-yaml");
const _ = require("lodash");
const _ = require('lodash');
const UnitTest = require("@arangodb/testing");
const UnitTest = require('@arangodb/testing');
const internalMembers = UnitTest.internalMembers;
const fs = require("fs");
const internal = require("internal"); // js/common/bootstrap/modules/internal.js
const fs = require('fs');
const internal = require('internal'); // js/common/bootstrap/modules/internal.js
const inspect = internal.inspect;
let testOutputDirectory;
function makePathGeneric(path) {
function makePathGeneric (path) {
return path.split(fs.pathSeparator);
}
function xmlEscape(s) {
return s.replace(/[<>&"]/g, function(c) {
return "&" + {
"<": "lt",
">": "gt",
"&": "amp",
"\"": "quot"
}[c] + ";";
function xmlEscape (s) {
return s.replace(/[<>&"]/g, function (c) {
return '&' + {
'<': 'lt',
'>': 'gt',
'&': 'amp',
'"': 'quot'
}[c] + ';';
});
}
function buildXml() {
let xml = ["<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"];
function buildXml () {
let xml = ['<?xml version="1.0" encoding="UTF-8"?>\n'];
xml.text = function(s) {
xml.text = function (s) {
Array.prototype.push.call(this, s);
return this;
};
xml.elem = function(tagName, attrs, close) {
this.text("<").text(tagName);
xml.elem = function (tagName, attrs, close) {
this.text('<').text(tagName);
attrs = attrs || {};
for (let a in attrs) {
if (attrs.hasOwnProperty(a)) {
this.text(" ").text(a).text("=\"")
.text(xmlEscape(String(attrs[a]))).text("\"");
this.text(' ').text(a).text('="')
.text(xmlEscape(String(attrs[a]))).text('"');
}
}
if (close) {
this.text("/");
this.text('/');
}
this.text(">\n");
this.text('>\n');
return this;
};
@ -60,18 +59,24 @@ function buildXml() {
return xml;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief converts results to XML representation
////////////////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
// @brief converts results to XML representation
// //////////////////////////////////////////////////////////////////////////////
function resultsToXml(results, baseName, cluster) {
function resultsToXml (results, baseName, cluster, isRocksDb) {
let clprefix = '';
if (cluster) {
clprefix = 'CL_';
}
const isSignificant = function(a, b) {
if (isRocksDb) {
clprefix += 'RX_';
} else {
clprefix += 'MM_';
}
const isSignificant = function (a, b) {
return (internalMembers.indexOf(b) === -1) && a.hasOwnProperty(b);
};
@ -81,7 +86,7 @@ function resultsToXml(results, baseName, cluster) {
for (let runName in run) {
if (isSignificant(run, runName)) {
const xmlName = clprefix + resultName + "_" + runName;
const xmlName = clprefix + resultName + '_' + runName;
const current = run[runName];
if (current.skipped) {
@ -96,7 +101,7 @@ function resultsToXml(results, baseName, cluster) {
}
let failuresFound = current.failed;
xml.elem("testsuite", {
xml.elem('testsuite', {
errors: 0,
failures: failuresFound,
tests: total,
@ -113,43 +118,43 @@ function resultsToXml(results, baseName, cluster) {
seen = true;
xml.elem("testcase", {
xml.elem('testcase', {
name: clprefix + oneTestName,
time: 0 + oneTest.duration
}, success);
if (!success) {
xml.elem("failure");
xml.elem('failure');
xml.text('<![CDATA[' + oneTest.message + ']]>\n');
xml.elem("/failure");
xml.elem("/testcase");
xml.elem('/failure');
xml.elem('/testcase');
}
}
}
if (!seen) {
xml.elem("testcase", {
xml.elem('testcase', {
name: 'all_tests_in_' + xmlName,
time: 0 + current.duration
}, true);
}
xml.elem("/testsuite");
xml.elem('/testsuite');
const fn = makePathGeneric(baseName + xmlName + ".xml").join('_');
const fn = makePathGeneric(baseName + xmlName + '.xml').join('_');
fs.write(testOutputDirectory + fn, xml.join(""));
fs.write(testOutputDirectory + fn, xml.join(''));
}
}
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief runs the test using testing.js
////////////////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
// @brief runs the test using testing.js
// //////////////////////////////////////////////////////////////////////////////
function main(argv) {
function main (argv) {
start_pretty_print();
// parse arguments
@ -178,8 +183,8 @@ function main(argv) {
options = internal.parseArgv(argv, 0); // parse option with parseArgv function
}
} catch (x) {
print("failed to parse the json options: " + x.message + "\n" + String(x.stack));
print("argv: ", argv);
print('failed to parse the json options: ' + x.message + '\n' + String(x.stack));
print('argv: ', argv);
return -1;
}
}
@ -191,7 +196,7 @@ function main(argv) {
}
options.testOutputDirectory = testOutputDirectory;
// force json reply
options.jsonReply = true;
@ -204,7 +209,7 @@ function main(argv) {
// run tests
r = UnitTest.unitTest(testSuits, options, testOutputDirectory) || {};
} catch (x) {
print("caught exception during test execution!");
print('caught exception during test execution!');
if (x.message !== undefined) {
print(x.message);
@ -214,7 +219,6 @@ function main(argv) {
print(x.stack);
} else {
print(x);
}
print(JSON.stringify(r));
@ -225,8 +229,8 @@ function main(argv) {
crashed: true
});
// whether or not there was an error
fs.write(testOutputDirectory + "/UNITTEST_RESULT_EXECUTIVE_SUMMARY.json", String(r.status));
// whether or not there was an error
fs.write(testOutputDirectory + '/UNITTEST_RESULT_EXECUTIVE_SUMMARY.json', String(r.status));
if (options.writeXmlReport) {
let j;
@ -237,14 +241,26 @@ function main(argv) {
j = inspect(r);
}
fs.write(testOutputDirectory + "/UNITTEST_RESULT.json", j);
fs.write(testOutputDirectory + "/UNITTEST_RESULT_CRASHED.json", String(r.crashed));
fs.write(testOutputDirectory + '/UNITTEST_RESULT.json', j);
fs.write(testOutputDirectory + '/UNITTEST_RESULT_CRASHED.json', String(r.crashed));
try {
resultsToXml(r,
"UNITTEST_RESULT_", (options.hasOwnProperty('cluster') && options.cluster));
let isCluster = false;
let isRocksDb = false;
let prefix = '';
if (options.hasOwnProperty('prefix')) {
prefix = options.prefix;
}
if (options.hasOwnProperty('cluster') && options.cluster) {
isCluster = true;
}
if (options.hasOwnProperty('storageEngine')) {
isRocksDb = (options.storageEngine === 'rocksdb');
}
resultsToXml(r, 'UNITTEST_RESULT_' + prefix, isCluster, isRocksDb);
} catch (x) {
print("exception while serializing status xml!");
print('exception while serializing status xml!');
print(x.message);
print(x.stack);
print(inspect(r));
@ -258,7 +274,9 @@ function main(argv) {
}
let result = main(ARGUMENTS);
if (!result) {
// force an error in the console
throw 'peng!';
}

View File

@ -428,6 +428,8 @@ target_link_libraries(arangoserver
)
if (USE_ENTERPRISE)
target_compile_definitions(arangoserver "-DUSE_ENTERPRISE=1")
target_include_directories(arangoserver ${ENTERPRISE_INCLUDE_DIR})
if (MSVC)
target_link_libraries(arangoserver
Wldap32.lib

View File

@ -167,58 +167,6 @@ function analyzeCoreDumpWindows (instanceInfo) {
// / information about the incident.
// //////////////////////////////////////////////////////////////////////////////
function analyzeCrash (binary, arangod, options, checkStr) {
var cpf = '/proc/sys/kernel/core_pattern';
if (fs.isFile(cpf)) {
var matchApport = /.*apport.*/;
var matchVarTmp = /\/var\/tmp/;
var matchSystemdCoredump = /.*systemd-coredump*/;
var corePattern = fs.readBuffer(cpf);
var cp = corePattern.asciiSlice(0, corePattern.length);
if (matchApport.exec(cp) != null) {
print(RED + 'apport handles corefiles on your system. Uninstall it if you want us to get corefiles for analysis.' + RESET);
return;
}
if (matchSystemdCoredump.exec(cp) !== null) {
options.coreDirectory = '/var/lib/systemd/coredump/*core*' + arangod.pid + '*';
} else if (matchVarTmp.exec(cp) !== null) {
options.coreDirectory = cp.replace('%e', '*').replace('%t', '*').replace('%p', arangod.pid);
} else {
print(RED + 'Don\'t know howto locate corefiles in your system. "' + cpf + '" contains: "' + cp + '"' + RESET);
return;
}
}
let pathParts = binary.split(fs.pathSeparator);
let bareBinary = binary;
if (pathParts.length > 0) {
bareBinary = pathParts[pathParts.length - 1];
}
const storeArangodPath = arangod.rootDir + '/' + bareBinary + '_' + arangod.pid;
print(RED +
'during: ' + checkStr + ': Core dump written; copying ' + binary + ' to ' +
storeArangodPath + ' for later analysis.\n' +
'Server shut down with :\n' +
yaml.safeDump(arangod) +
'marking build as crashy.' + RESET);
let hint = '';
if (platform.substr(0, 3) === 'win') {
// Windows: wait for procdump to do its job...
statusExternal(arangod.monitor, true);
hint = analyzeCoreDumpWindows(arangod);
} else if (platform === 'darwin') {
fs.copyFile(binary, storeArangodPath);
hint = analyzeCoreDumpMac(arangod, options, storeArangodPath, arangod.pid);
} else {
fs.copyFile(binary, storeArangodPath);
hint = analyzeCoreDump(arangod, options, storeArangodPath, arangod.pid);
}
arangod.exitStatus.gdbHint = 'Run debugger with "' + hint + '"';
print(RESET);
}

View File

@ -71,6 +71,8 @@ let optionsDocumentation = [
' - `buildType`: Windows build type (Debug, Release), leave empty on linux',
' - `configDir`: the directory containing the config files, defaults to',
' etc/testing',
' - `writeXml`: Write junit xml report files',
' - `prefix`: prefix for the tests in the xml reports',
'',
' - `rr`: if set to true arangod instances are run with rr',
'',

View File

@ -0,0 +1,18 @@
New-Item -Type Directory node_modules-bundled
foreach ($dir in $(Get-ChildItem node\node_modules -Directory)) {
if ($dir.Name -eq "mocha") {
Copy-Item -Recurse node\node_modules\$dir node_modules-bundled\
continue
}
$start=(Get-Content node\node_modules\$dir\package.json | ConvertFrom-Json).main
if ($start -eq $null) {
$start = "index.js"
}
if (!$start.EndsWith(".js")) {
$start += ".js"
}
New-Item -Type Directory node_modules-bundled\$dir
Set-Content -Path node_modules-bundled\$dir\package.json "{}"
webpack.cmd node\node_modules\$dir\$start node_modules-bundled\$dir\index.js --target node --output-library-target commonjs2
}