diff --git a/pipelines/build/common/openjdk_build_pipeline.groovy b/pipelines/build/common/openjdk_build_pipeline.groovy index 0dfc54ad6..3cffb7bc0 100644 --- a/pipelines/build/common/openjdk_build_pipeline.groovy +++ b/pipelines/build/common/openjdk_build_pipeline.groovy @@ -1625,9 +1625,7 @@ class Build { } def buildScriptsAssemble( - buildConfigEnvVars, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter + buildConfigEnvVars ) { def build_path @@ -1642,9 +1640,11 @@ def buildScriptsAssemble( batOrSh "rm -rf ${base_path}/jdk/modules/jdk.jpackage/jdk/jpackage/internal/resources/*" } context.stage('assemble') { - if ( buildConfig.TARGET_OS == 'windows' && buildConfig.DOCKER_IMAGE ) { + if ( buildConfig.TARGET_OS == 'windows' && buildConfig.DOCKER_IMAGE ) { // SXAEC: Still TBC on this to determine if something fails without it // Ref https://github.com/adoptium/infrastructure/issues/3723 + // Fails to unstash even in non-docker case without the chmod e.g. windbld#840 + context.bat('c:\\cygwin64\\bin\\find /cygdrive/c/workspace -name public_suffix_list.dat -ls') context.bat('chmod -R a+rwX /cygdrive/c/workspace/openjdk-build/workspace/build/src/build & echo Done & exit 0') } // Restore signed JMODs @@ -1669,9 +1669,9 @@ def buildScriptsAssemble( try { context.timeout(time: buildTimeouts.BUILD_JDK_TIMEOUT, unit: 'HOURS') { context.println "openjdk_build_pipeline: calling MABF to assemble on win/mac JDK11+" - // SXAEC: Running ls -l here generates the shortname links required - // by the build and create paths referenced in the config.status file - if ( !context.isUnix() ) { + if ( !context.isUnix() && buildConfig.DOCKER_IMAGE ) { + // SXAEC: Running ls -l here generates the shortname links required + // by the build and create paths referenced in the config.status file context.bat(script: 'ls -l /cygdrive/c "/cygdrive/c/Program Files (x86)" "/cygdrive/c/Program Files (x86)/Microsoft Visual Studio/2022" "/cygdrive/c/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/VC/Redist/MSVC" "/cygdrive/c/Program Files (x86)/Windows Kits/10/bin" "/cygdrive/c/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/VC/Tools/MSVC" "/cygdrive/c/Program Files (x86)/Windows Kits/10/include" "/cygdrive/c/Program Files (x86)/Windows Kits/10/lib"') } batOrSh("bash ${ADOPT_DEFAULTS_JSON['scriptDirectories']['buildfarm']} --assemble-exploded-image") @@ -1712,8 +1712,6 @@ def buildScriptsAssemble( } throw new Exception("[ERROR] Build archive timeout (${buildTimeouts.BUILD_ARCHIVE_TIMEOUT} HOURS) has been reached. Exiting...") } - postBuildWSclean(cleanWorkspaceAfter, cleanWorkspaceBuildOutputAfter) - } // context.stage('assemble') } // End of buildScriptsAssemble() 1643-1765 @@ -1725,19 +1723,16 @@ def buildScriptsAssemble( def buildScripts( cleanWorkspace, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter, useAdoptShellScripts, enableSigner, buildConfigEnvVars ) { + // Create the repo handler with the user's defaults to ensure a temurin-build checkout is not null + // Pass actual ADOPT_DEFAULTS_JSON, and optional buildConfig CI and BUILD branch/tag overrides, + // so that RepoHandler checks out the desired repo correctly + def repoHandler = new RepoHandler(USER_REMOTE_CONFIGS, ADOPT_DEFAULTS_JSON, buildConfig.CI_REF, buildConfig.BUILD_REF) + repoHandler.setUserDefaultsJson(context, DEFAULTS_JSON['defaultsUrl']) return context.stage('build') { - // Create the repo handler with the user's defaults to ensure a temurin-build checkout is not null - // Pass actual ADOPT_DEFAULTS_JSON, and optional buildConfig CI and BUILD branch/tag overrides, - // so that RepoHandler checks out the desired repo correctly - def repoHandler = new RepoHandler(USER_REMOTE_CONFIGS, ADOPT_DEFAULTS_JSON, buildConfig.CI_REF, buildConfig.BUILD_REF) - repoHandler.setUserDefaultsJson(context, DEFAULTS_JSON['defaultsUrl']) - context.println 'USER_REMOTE_CONFIGS: ' context.println JsonOutput.toJson(USER_REMOTE_CONFIGS) context.println 'DEFAULTS_JSON: ' @@ -1857,9 +1852,9 @@ def buildScriptsAssemble( // Call make-adopt-build-farm.sh to do initial windows/mac build // windbld#254 context.println "openjdk_build_pipeline: Calling MABF on win/mac to build exploded image" -// batOrSh("bash ./${ADOPT_DEFAULTS_JSON['scriptDirectories']['buildfarm']}") + batOrSh("bash ./${ADOPT_DEFAULTS_JSON['scriptDirectories']['buildfarm']}") // Use cached version from an attempt at the first phase only - context.bat(script: "bash -c 'curl https://ci.adoptium.net/userContent/windows/openjdk-cached-workspace-phase1+8.tar.gz | tar -C /cygdrive/c/workspace/openjdk-build -xzf -'") +// context.bat(script: "bash -c 'curl https://ci.adoptium.net/userContent/windows/openjdk-cached-workspace-phase1+8.tar.gz | tar -C /cygdrive/c/workspace/openjdk-build -xzf -'") } def base_path = build_path if (openjdk_build_dir_arg == "") { @@ -1972,10 +1967,6 @@ def buildScriptsAssemble( throw new Exception("[ERROR] Build archive timeout (${buildTimeouts.BUILD_ARCHIVE_TIMEOUT} HOURS) has been reached. Exiting...") } - if ( !enableSigner ) { // Don't clean if we need the workspace for the later assemble phase - postBuildWSclean(cleanWorkspaceAfter, cleanWorkspaceBuildOutputAfter) - } - // Set Github Commit Status if (env.JOB_NAME.contains('pr-tester')) { updateGithubCommitStatus('SUCCESS', 'Build PASSED') @@ -2065,46 +2056,6 @@ def buildScriptsAssemble( } -def postBuildWSclean( - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter -) { - // post-build workspace clean: - if (cleanWorkspaceAfter || cleanWorkspaceBuildOutputAfter) { - try { - context.timeout(time: buildTimeouts.NODE_CLEAN_TIMEOUT, unit: 'HOURS') { - // Note: Underlying org.apache DirectoryScanner used by cleanWs has a bug scanning where it misses files containing ".." so use rm -rf instead - // Issue: https://issues.jenkins.io/browse/JENKINS-64779 - if (context.WORKSPACE != null && !context.WORKSPACE.isEmpty()) { - if (cleanWorkspaceAfter) { - context.println 'Cleaning workspace non-hidden files: ' + context.WORKSPACE + '/*' - context.sh(script: 'rm -rf ' + context.WORKSPACE + '/*') - - // Clean remaining hidden files using cleanWs - try { - context.println 'Cleaning workspace hidden files using cleanWs: ' + context.WORKSPACE - context.cleanWs notFailBuild: true, disableDeferredWipeout: true, deleteDirs: true - } catch (e) { - context.println "Failed to clean ${e}" - } - } else if (cleanWorkspaceBuildOutputAfter) { - context.println 'openjdk_build_pipelione: Cleaning workspace build output files' - batOrSh('rm -rf ' + context.WORKSPACE + '/workspace/build/src/build' + ' ' + context.WORKSPACE + '/workspace/target ' + context.WORKSPACE + '/workspace/build/devkit ' + context.WORKSPACE + '/workspace/build/straceOutput') - } - } else { - context.println 'Warning: Unable to clean workspace as context.WORKSPACE is null/empty' - } - } - } catch (FlowInterruptedException e) { - // Set Github Commit Status - if (env.JOB_NAME.contains('pr-tester')) { - updateGithubCommitStatus('FAILED', 'Build FAILED') - } - throw new Exception("[ERROR] AIX clean workspace timeout (${buildTimeouts.AIX_CLEAN_TIMEOUT} HOURS) has been reached. Exiting...") - } - } -} - /* Main function. This is what is executed remotely via the helper file kick_off_build.groovy, which is in turn executed by the downstream jobs. Running in downstream build job jdk-*-*-* called by kick_off_build.groovy @@ -2158,7 +2109,6 @@ def postBuildWSclean( updateGithubCommitStatus('PENDING', 'Pending') } } - if (buildConfig.DOCKER_IMAGE) { context.println "openjdk_build_pipeline: preparing to use docker image" // Docker build environment @@ -2180,7 +2130,6 @@ def postBuildWSclean( context.node(label) { addNodeToBuildDescription() // Cannot clean workspace from inside docker container - context.println 'SXAEC: batable and batted 2042 (rm cyclonedx-lib and security)' if ( buildConfig.TARGET_OS == 'windows' && buildConfig.DOCKER_IMAGE ) { context.bat('rm -rf c:/workspace/openjdk-build/cyclonedx-lib c:/workspace/openjdk-build/security') } @@ -2259,8 +2208,6 @@ def postBuildWSclean( context.docker.build("build-image", "--build-arg image=${buildConfig.DOCKER_IMAGE} -f ${buildConfig.DOCKER_FILE} .").inside(buildConfig.DOCKER_ARGS) { buildScripts( cleanWorkspace, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter, useAdoptShellScripts, enableSigner, envVars @@ -2285,8 +2232,6 @@ def postBuildWSclean( context.docker.image(buildConfig.DOCKER_IMAGE).inside(buildConfig.DOCKER_ARGS+" "+dockerRunArg) { buildScripts( cleanWorkspace, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter, useAdoptShellScripts, enableSigner, envVars @@ -2298,8 +2243,6 @@ def postBuildWSclean( context.docker.image(buildConfig.DOCKER_IMAGE).inside(buildConfig.DOCKER_ARGS+" "+dockerRunArg) { buildScripts( cleanWorkspace, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter, useAdoptShellScripts, enableSigner, envVars @@ -2315,9 +2258,7 @@ def postBuildWSclean( context.println "openjdk_build_pipeline: running assemble phase (invocation 1)" context.docker.image(buildConfig.DOCKER_IMAGE).inside(buildConfig.DOCKER_ARGS+" "+dockerRunArg) { buildScriptsAssemble( - envVars, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter + envVars ) } } @@ -2346,8 +2287,6 @@ def postBuildWSclean( context.ws(workspace) { buildScripts( cleanWorkspace, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter, useAdoptShellScripts, enableSigner, envVars @@ -2356,17 +2295,13 @@ def postBuildWSclean( buildScriptsEclipseSigner() context.println "openjdk_build_pipeline: running assemble phase (invocation 2)" buildScriptsAssemble( - envVars, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter + envVars ) } } } else { // Non-windows, non-docker buildScripts( cleanWorkspace, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter, useAdoptShellScripts, enableSigner, envVars @@ -2375,9 +2310,7 @@ def postBuildWSclean( buildScriptsEclipseSigner() context.println "openjdk_build_pipeline: running assemble phase (invocation 3)" buildScriptsAssemble( - envVars, - cleanWorkspaceAfter, - cleanWorkspaceBuildOutputAfter + envVars ) } } @@ -2386,6 +2319,45 @@ def postBuildWSclean( } } + // post-build workspace clean: + context.node(label) { + if (cleanWorkspaceAfter || cleanWorkspaceBuildOutputAfter) { + try { + context.timeout(time: buildTimeouts.NODE_CLEAN_TIMEOUT, unit: 'HOURS') { + // Note: Underlying org.apache DirectoryScanner used by cleanWs has a bug scanning where it misses files containing ".." so use rm -rf instead + // Issue: https://issues.jenkins.io/browse/JENKINS-64779 + if (context.WORKSPACE != null && !context.WORKSPACE.isEmpty()) { + if (cleanWorkspaceAfter) { + context.println 'Cleaning workspace non-hidden files: ' + context.WORKSPACE + '/*' + context.sh(script: 'rm -rf ' + context.WORKSPACE + '/*') + + // Clean remaining hidden files using cleanWs + try { + context.println 'Cleaning workspace hidden files using cleanWs: ' + context.WORKSPACE + context.cleanWs notFailBuild: true, disableDeferredWipeout: true, deleteDirs: true + } catch (e) { + context.println "Failed to clean ${e}" + } + } else if (cleanWorkspaceBuildOutputAfter) { + if ( enableSigner ) { + context.println 'ERROR? ENABLE_SIGNER and CLEAN_WORKSPACE_AFTER_BUILD both set' + } + context.println 'Cleaning workspace build output files under ' + context.WORKSPACE + batOrSh('rm -rf ' + context.WORKSPACE + '/workspace/build/src/build ' + context.WORKSPACE + '/workspace/target ' + context.WORKSPACE + '/workspace/build/devkit ' + context.WORKSPACE + '/workspace/build/straceOutput') + } + } else { + context.println 'Warning: Unable to clean workspace as context.WORKSPACE is null/empty' + } + } + } catch (FlowInterruptedException e) { + // Set Github Commit Status + if (env.JOB_NAME.contains('pr-tester')) { + updateGithubCommitStatus('FAILED', 'Build FAILED') + } + throw new Exception("[ERROR] AIX clean workspace timeout (${buildTimeouts.AIX_CLEAN_TIMEOUT} HOURS) has been reached. Exiting...") + } + } + } // Sign and archive jobs if needed if (enableSigner) {