Jenkinsfile 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343
  1. // Licensed to the Apache Software Foundation (ASF) under one
  2. // or more contributor license agreements. See the NOTICE file
  3. // distributed with this work for additional information
  4. // regarding copyright ownership. The ASF licenses this file
  5. // to you under the Apache License, Version 2.0 (the
  6. // "License"); you may not use this file except in compliance
  7. // with the License. You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing,
  12. // software distributed under the License is distributed on an
  13. // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. // KIND, either express or implied. See the License for the
  15. // specific language governing permissions and limitations
  16. // under the License.
  17. pipeline {
  18. agent {
  19. label 'Hadoop'
  20. }
  21. options {
  22. buildDiscarder(logRotator(numToKeepStr: '5'))
  23. timeout (time: 24, unit: 'HOURS')
  24. timestamps()
  25. checkoutToSubdirectory('src')
  26. }
  27. environment {
  28. YETUS='yetus'
  29. // Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
  30. YETUS_VERSION='f9ba0170a5787a5f4662d3769804fef0226a182f'
  31. }
  32. parameters {
  33. string(name: 'JIRA_ISSUE_KEY',
  34. defaultValue: '',
  35. description: 'The JIRA issue that has a patch needing pre-commit testing. Example: HADOOP-1234')
  36. }
  37. stages {
  38. stage ('install yetus') {
  39. steps {
  40. dir("${WORKSPACE}/${YETUS}") {
  41. checkout([
  42. $class: 'GitSCM',
  43. branches: [[name: "${env.YETUS_VERSION}"]],
  44. userRemoteConfigs: [[ url: 'https://github.com/apache/yetus.git']]]
  45. )
  46. }
  47. }
  48. }
  49. // Setup codebase so that each platform's build happens in its own exclusive copy of the
  50. // codebase.
  51. // Primarily because YETUS messes up the git branch information and affects the subsequent
  52. // optional stages after the first one.
  53. stage ('setup sources') {
  54. steps {
  55. dir("${WORKSPACE}/centos-7") {
  56. sh '''#!/usr/bin/env bash
  57. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-7
  58. '''
  59. }
  60. dir("${WORKSPACE}/centos-8") {
  61. sh '''#!/usr/bin/env bash
  62. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-8
  63. '''
  64. }
  65. dir("${WORKSPACE}/debian-10") {
  66. sh '''#!/usr/bin/env bash
  67. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/debian-10
  68. '''
  69. }
  70. dir("${WORKSPACE}/ubuntu-focal") {
  71. sh '''#!/usr/bin/env bash
  72. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/ubuntu-focal
  73. '''
  74. }
  75. }
  76. }
  77. // This is an optional stage which runs only when there's a change in
  78. // C++/C++ build/platform.
  79. // This stage serves as a means of cross platform validation, which is
  80. // really needed to ensure that any C++ related/platform change doesn't
  81. // break the Hadoop build on Centos 7.
  82. stage ('precommit-run Centos 7') {
  83. environment {
  84. SOURCEDIR = "${WORKSPACE}/centos-7/src"
  85. PATCHDIR = "${WORKSPACE}/centos-7/out"
  86. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_7"
  87. IS_OPTIONAL = 1
  88. }
  89. steps {
  90. withCredentials(
  91. [usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
  92. passwordVariable: 'GITHUB_TOKEN',
  93. usernameVariable: 'GITHUB_USER'),
  94. usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
  95. passwordVariable: 'JIRA_PASSWORD',
  96. usernameVariable: 'JIRA_USER')]) {
  97. sh '''#!/usr/bin/env bash
  98. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  99. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  100. '''
  101. }
  102. }
  103. post {
  104. // Since this is an optional platform, we want to copy the artifacts
  105. // and archive it only if the build fails, to help with debugging.
  106. failure {
  107. sh '''#!/usr/bin/env bash
  108. cp -Rp "${WORKSPACE}/centos-7/out" "${WORKSPACE}"
  109. '''
  110. archiveArtifacts "out/**"
  111. }
  112. cleanup() {
  113. script {
  114. sh '''#!/usr/bin/env bash
  115. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  116. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  117. '''
  118. }
  119. }
  120. }
  121. }
  122. // This is an optional stage which runs only when there's a change in
  123. // C++/C++ build/platform.
  124. // This stage serves as a means of cross platform validation, which is
  125. // really needed to ensure that any C++ related/platform change doesn't
  126. // break the Hadoop build on Centos 8.
  127. stage ('precommit-run Centos 8') {
  128. environment {
  129. SOURCEDIR = "${WORKSPACE}/centos-8/src"
  130. PATCHDIR = "${WORKSPACE}/centos-8/out"
  131. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_8"
  132. IS_OPTIONAL = 1
  133. }
  134. steps {
  135. withCredentials(
  136. [usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
  137. passwordVariable: 'GITHUB_TOKEN',
  138. usernameVariable: 'GITHUB_USER'),
  139. usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
  140. passwordVariable: 'JIRA_PASSWORD',
  141. usernameVariable: 'JIRA_USER')]) {
  142. sh '''#!/usr/bin/env bash
  143. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  144. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  145. '''
  146. }
  147. }
  148. post {
  149. // Since this is an optional platform, we want to copy the artifacts
  150. // and archive it only if the build fails, to help with debugging.
  151. failure {
  152. sh '''#!/usr/bin/env bash
  153. cp -Rp "${WORKSPACE}/centos-8/out" "${WORKSPACE}"
  154. '''
  155. archiveArtifacts "out/**"
  156. }
  157. cleanup() {
  158. script {
  159. sh '''#!/usr/bin/env bash
  160. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  161. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  162. '''
  163. }
  164. }
  165. }
  166. }
  167. // This is an optional stage which runs only when there's a change in
  168. // C++/C++ build/platform.
  169. // This stage serves as a means of cross platform validation, which is
  170. // really needed to ensure that any C++ related/platform change doesn't
  171. // break the Hadoop build on Debian 10.
  172. stage ('precommit-run Debian 10') {
  173. environment {
  174. SOURCEDIR = "${WORKSPACE}/debian-10/src"
  175. PATCHDIR = "${WORKSPACE}/debian-10/out"
  176. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_debian_10"
  177. IS_OPTIONAL = 1
  178. }
  179. steps {
  180. withCredentials(
  181. [usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
  182. passwordVariable: 'GITHUB_TOKEN',
  183. usernameVariable: 'GITHUB_USER'),
  184. usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
  185. passwordVariable: 'JIRA_PASSWORD',
  186. usernameVariable: 'JIRA_USER')]) {
  187. sh '''#!/usr/bin/env bash
  188. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  189. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  190. '''
  191. }
  192. }
  193. post {
  194. // Since this is an optional platform, we want to copy the artifacts
  195. // and archive it only if the build fails, to help with debugging.
  196. failure {
  197. sh '''#!/usr/bin/env bash
  198. cp -Rp "${WORKSPACE}/debian-10/out" "${WORKSPACE}"
  199. '''
  200. archiveArtifacts "out/**"
  201. }
  202. cleanup() {
  203. script {
  204. sh '''#!/usr/bin/env bash
  205. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  206. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  207. '''
  208. }
  209. }
  210. }
  211. }
  212. // We want to use Ubuntu Focal as our main CI and thus, this stage
  213. // isn't optional (runs for all the PRs).
  214. stage ('precommit-run Ubuntu focal') {
  215. environment {
  216. SOURCEDIR = "${WORKSPACE}/ubuntu-focal/src"
  217. PATCHDIR = "${WORKSPACE}/ubuntu-focal/out"
  218. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile"
  219. IS_OPTIONAL = 0
  220. }
  221. steps {
  222. withCredentials(
  223. [usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
  224. passwordVariable: 'GITHUB_TOKEN',
  225. usernameVariable: 'GITHUB_USER'),
  226. usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
  227. passwordVariable: 'JIRA_PASSWORD',
  228. usernameVariable: 'JIRA_USER')]) {
  229. sh '''#!/usr/bin/env bash
  230. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  231. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  232. '''
  233. }
  234. }
  235. post {
  236. always {
  237. script {
  238. // Publish status if it was missed (YETUS-1059)
  239. withCredentials(
  240. [usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd',
  241. passwordVariable: 'GITHUB_TOKEN',
  242. usernameVariable: 'GITHUB_USER')]) {
  243. sh '''#!/usr/bin/env bash
  244. # Copy the artifacts of Ubuntu focal build to workspace
  245. cp -Rp "${WORKSPACE}/ubuntu-focal/out" "${WORKSPACE}"
  246. # Send Github status
  247. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  248. "${SOURCEDIR}/dev-support/jenkins.sh" github_status_recovery
  249. '''
  250. }
  251. // YETUS output
  252. archiveArtifacts "out/**"
  253. // Publish the HTML report so that it can be looked at
  254. // Has to be relative to WORKSPACE.
  255. publishHTML (target: [
  256. allowMissing: true,
  257. keepAll: true,
  258. alwaysLinkToLastBuild: true,
  259. // Has to be relative to WORKSPACE
  260. reportDir: "out",
  261. reportFiles: 'report.html',
  262. reportName: 'Yetus Report'
  263. ])
  264. // Publish JUnit results
  265. try {
  266. junit "${SOURCEDIR}/**/target/surefire-reports/*.xml"
  267. } catch(e) {
  268. echo 'junit processing: ' + e.toString()
  269. }
  270. }
  271. }
  272. cleanup() {
  273. script {
  274. sh '''#!/usr/bin/env bash
  275. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  276. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  277. '''
  278. }
  279. }
  280. }
  281. }
  282. }
  283. post {
  284. // Jenkins pipeline jobs fill slaves on PRs without this :(
  285. cleanup() {
  286. script {
  287. sh '''#!/usr/bin/env bash
  288. # See HADOOP-13951
  289. chmod -R u+rxw "${WORKSPACE}"
  290. '''
  291. deleteDir()
  292. }
  293. }
  294. }
  295. }