1
0
Fork 0
mirror of https://github.com/moby/moby.git synced 2022-11-09 12:21:53 -05:00

Merge pull request #39790 from thaJeztah/jenkinsfile_linting_and_skipping

integration: fix some linting issues, and don't fail build on artifact uploads
This commit is contained in:
Sebastiaan van Stijn 2019-08-27 15:34:04 +02:00 committed by GitHub
commit 001b78bffe
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 80 additions and 52 deletions

99
Jenkinsfile vendored
View file

@ -109,12 +109,15 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo 'Creating docker-py-bundles.tar.gz'
tar -czf docker-py-bundles.tar.gz bundles/test-docker-py/*.xml bundles/test-docker-py/*.log
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=docker-py
echo "Creating ${bundleName}-bundles.tar.gz"
tar -czf ${bundleName}-bundles.tar.gz bundles/test-docker-py/*.xml bundles/test-docker-py/*.log
'''
archiveArtifacts artifacts: 'docker-py-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
}
}
@ -200,12 +203,15 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo 'Creating unit-bundles.tar.gz'
tar -czvf unit-bundles.tar.gz bundles/junit-report.xml bundles/go-test-report.json bundles/profile.out
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=unit
echo "Creating ${bundleName}-bundles.tar.gz"
tar -czvf ${bundleName}-bundles.tar.gz bundles/junit-report.xml bundles/go-test-report.json bundles/profile.out
'''
archiveArtifacts artifacts: 'unit-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
cleanup {
sh 'make clean'
@ -317,13 +323,16 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo "Creating janky-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf janky-bundles.tar.gz
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=janky
echo "Creating ${bundleName}-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -o -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf ${bundleName}-bundles.tar.gz
'''
archiveArtifacts artifacts: 'janky-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
cleanup {
sh 'make clean'
@ -411,13 +420,16 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo "Creating s390x-integration-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf s390x-integration-bundles.tar.gz
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=s390x-integration
echo "Creating ${bundleName}-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -o -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf ${bundleName}-bundles.tar.gz
'''
archiveArtifacts artifacts: 's390x-integration-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
cleanup {
sh 'make clean'
@ -486,12 +498,16 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo "Creating s390x-integration-cli-bundles.tar.gz"
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf s390x-integration-cli-bundles.tar.gz
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=s390x-integration-cli
echo "Creating ${bundleName}-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -o -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf ${bundleName}-bundles.tar.gz
'''
archiveArtifacts artifacts: 's390x-integration-cli-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
cleanup {
sh 'make clean'
@ -577,13 +593,16 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo "Creating powerpc-integration-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf powerpc-integration-bundles.tar.gz
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=powerpc-integration
echo "Creating ${bundleName}-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -o -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf ${bundleName}-bundles.tar.gz
'''
archiveArtifacts artifacts: 'powerpc-integration-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
cleanup {
sh 'make clean'
@ -650,12 +669,16 @@ pipeline {
docker run --rm -v "$WORKSPACE:/workspace" busybox chown -R "$(id -u):$(id -g)" /workspace
'''
sh '''
echo "Creating powerpc-integration-cli-bundles.tar.gz"
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf powerpc-integration-cli-bundles.tar.gz
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE', message: 'Failed to create bundles.tar.gz') {
sh '''
bundleName=powerpc-integration-cli
echo "Creating ${bundleName}-bundles.tar.gz"
# exclude overlay2 directories
find bundles -path '*/root/*overlay2' -prune -o -type f \\( -o -name '*.log' -o -name '*.prof' \\) -print | xargs tar -czf ${bundleName}-bundles.tar.gz
'''
archiveArtifacts artifacts: 'powerpc-integration-cli-bundles.tar.gz'
archiveArtifacts artifacts: '*-bundles.tar.gz', allowEmptyArchive: true
}
}
cleanup {
sh 'make clean'

View file

@ -17,15 +17,16 @@ if [[ "${TESTFLAGS}" = *-test.run* ]]; then
fi
if [ -z ${MAKEDIR} ]; then
export MAKEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ -z "${MAKEDIR}" ]; then
MAKEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export MAKEDIR
fi
source "$MAKEDIR/.go-autogen"
source "${MAKEDIR}/.go-autogen"
# Set defaults
: ${TEST_REPEAT:=1}
: ${TESTFLAGS:=}
: ${TESTDEBUG:=}
: "${TEST_REPEAT:=1}"
: "${TESTFLAGS:=}"
: "${TESTDEBUG:=}"
setup_integration_test_filter() {
if [ -z "${TEST_FILTER}" ]; then
@ -33,9 +34,9 @@ setup_integration_test_filter() {
fi
if [ -z "${TEST_SKIP_INTEGRATION}" ]; then
: ${TEST_INTEGRATION_DIR:=$(grep -rl "func\ .*${TEST_FILTER}.*\(t\ \*testing\.T\)" ./integration | grep '_test\.go' | xargs -I file dirname file | uniq)}
: "${TEST_INTEGRATION_DIR:=$(grep -rl "func\ .*${TEST_FILTER}.*\(t\ \*testing\.T\)" ./integration | grep '_test\.go' | xargs -I file dirname file | uniq)}"
if [ -z "${TEST_INTEGRATION_DIR}" ]; then
echo Skipping integration tests since the supplied filter \"${TEST_FILTER}\" omits all integration tests
echo "Skipping integration tests since the supplied filter \"${TEST_FILTER}\" omits all integration tests"
TEST_SKIP_INTEGRATION=1
else
TESTFLAGS_INTEGRATION+="-test.run ${TEST_FILTER}"
@ -46,7 +47,7 @@ setup_integration_test_filter() {
# ease up on the filtering here since CLI suites are namespaced by an object
if grep -r "${TEST_FILTER}.*\(c\ \*check\.C\)" ./integration-cli | grep -q '_test\.go$'; then
TEST_SKIP_INTEGRATION_CLI=1
echo Skipping integration-cli tests since the supplied filter \"${TEST_FILTER}\" omits all integration-cli tests
echo "Skipping integration-cli tests since the supplied filter \"${TEST_FILTER}\" omits all integration-cli tests"
else
TESTFLAGS_INTEGRATION_CLI+="-check.f ${TEST_FILTER}"
fi
@ -54,7 +55,7 @@ setup_integration_test_filter() {
}
setup_integration_test_filter
integration_api_dirs=${TEST_INTEGRATION_DIR:-$(go list -test -f '{{- if ne .ForTest "" -}}{{- .Dir -}}{{- end -}}' ./integration/...)}
integration_api_dirs="${TEST_INTEGRATION_DIR:-$(go list -test -f '{{- if ne .ForTest "" -}}{{- .Dir -}}{{- end -}}' ./integration/...)}"
run_test_integration() {
set_platform_timeout
@ -72,6 +73,7 @@ run_test_integration_suites() {
if ! (
cd "$dir"
echo "Running $PWD flags=${flags}"
# shellcheck disable=SC2086
test_env ./test.main ${flags}
); then exit 1; fi
done
@ -82,12 +84,13 @@ run_test_integration_legacy_suites() {
flags="-check.v -check.timeout=${TIMEOUT} -test.timeout=360m $TESTFLAGS ${TESTFLAGS_INTEGRATION_CLI}"
cd integration-cli
echo "Running $PWD flags=${flags}"
# shellcheck disable=SC2086
test_env ./test.main $flags
)
}
build_test_suite_binaries() {
if [ ${DOCKER_INTEGRATION_TESTS_VERIFIED-} ]; then
if [ -n "${DOCKER_INTEGRATION_TESTS_VERIFIED}" ]; then
echo "Skipping building test binaries; as DOCKER_INTEGRATION_TESTS_VERIFIED is set"
return
fi
@ -112,6 +115,7 @@ build_test_suite_binary() {
cleanup_test_suite_binaries() {
[ -n "$TESTDEBUG" ] && return
echo "Removing test suite binaries"
# shellcheck disable=SC2038
find integration* -name test.main | xargs -r rm
}
@ -160,6 +164,7 @@ error_on_leaked_containerd_shims() {
awk '$2 == "containerd-shim" && $4 ~ /.*\/bundles\/.*\/test-integration/ { print $1 }')
if [ -n "$leftovers" ]; then
ps aux
# shellcheck disable=SC2086
kill -9 ${leftovers} 2> /dev/null
echo "!!!! WARNING you have left over shim(s), Cleanup your test !!!!"
exit 1
@ -169,11 +174,11 @@ error_on_leaked_containerd_shims() {
set_platform_timeout() {
# Test timeout.
if [ "${DOCKER_ENGINE_GOARCH}" = "arm64" ] || [ "${DOCKER_ENGINE_GOARCH}" = "arm" ]; then
: ${TIMEOUT:=10m}
: "${TIMEOUT:=10m}"
elif [ "${DOCKER_ENGINE_GOARCH}" = "windows" ]; then
: ${TIMEOUT:=8m}
: "${TIMEOUT:=8m}"
else
: ${TIMEOUT:=5m}
: "${TIMEOUT:=5m}"
fi
if [ "${TEST_REPEAT}" -gt 1 ]; then