Jenkinsfile 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328
  1. // Licensed to the Apache Software Foundation (ASF) under one
  2. // or more contributor license agreements. See the NOTICE file
  3. // distributed with this work for additional information
  4. // regarding copyright ownership. The ASF licenses this file
  5. // to you under the Apache License, Version 2.0 (the
  6. // "License"); you may not use this file except in compliance
  7. // with the License. You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing,
  12. // software distributed under the License is distributed on an
  13. // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. // KIND, either express or implied. See the License for the
  15. // specific language governing permissions and limitations
  16. // under the License.
  17. def getGithubAndJiraCreds() {
  18. return [usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
  19. passwordVariable: 'GITHUB_TOKEN',
  20. usernameVariable: 'GITHUB_USER'),
  21. usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
  22. passwordVariable: 'JIRA_PASSWORD',
  23. usernameVariable: 'JIRA_USER')]
  24. }
  25. pipeline {
  26. agent {
  27. label 'Hadoop'
  28. }
  29. options {
  30. buildDiscarder(logRotator(numToKeepStr: '5'))
  31. timeout (time: 24, unit: 'HOURS')
  32. timestamps()
  33. checkoutToSubdirectory('src')
  34. }
  35. environment {
  36. YETUS='yetus'
  37. // Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
  38. YETUS_VERSION='f9ba0170a5787a5f4662d3769804fef0226a182f'
  39. }
  40. parameters {
  41. string(name: 'JIRA_ISSUE_KEY',
  42. defaultValue: '',
  43. description: 'The JIRA issue that has a patch needing pre-commit testing. Example: HADOOP-1234')
  44. }
  45. stages {
  46. stage ('install yetus') {
  47. steps {
  48. dir("${WORKSPACE}/${YETUS}") {
  49. checkout([
  50. $class: 'GitSCM',
  51. branches: [[name: "${env.YETUS_VERSION}"]],
  52. userRemoteConfigs: [[ url: 'https://github.com/apache/yetus.git']]]
  53. )
  54. }
  55. }
  56. }
  57. // Setup codebase so that each platform's build happens in its own exclusive copy of the
  58. // codebase.
  59. // Primarily because YETUS messes up the git branch information and affects the subsequent
  60. // optional stages after the first one.
  61. stage ('setup sources') {
  62. steps {
  63. dir("${WORKSPACE}/centos-7") {
  64. sh '''#!/usr/bin/env bash
  65. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-7
  66. '''
  67. }
  68. dir("${WORKSPACE}/centos-8") {
  69. sh '''#!/usr/bin/env bash
  70. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-8
  71. '''
  72. }
  73. dir("${WORKSPACE}/debian-10") {
  74. sh '''#!/usr/bin/env bash
  75. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/debian-10
  76. '''
  77. }
  78. dir("${WORKSPACE}/ubuntu-focal") {
  79. sh '''#!/usr/bin/env bash
  80. cp -Rp ${WORKSPACE}/src ${WORKSPACE}/ubuntu-focal
  81. '''
  82. }
  83. }
  84. }
  85. // This is an optional stage which runs only when there's a change in
  86. // C++/C++ build/platform.
  87. // This stage serves as a means of cross platform validation, which is
  88. // really needed to ensure that any C++ related/platform change doesn't
  89. // break the Hadoop build on Centos 7.
  90. stage ('precommit-run Centos 7') {
  91. environment {
  92. SOURCEDIR = "${WORKSPACE}/centos-7/src"
  93. PATCHDIR = "${WORKSPACE}/centos-7/out"
  94. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_7"
  95. IS_OPTIONAL = 1
  96. }
  97. steps {
  98. withCredentials(getGithubAndJiraCreds()) {
  99. sh '''#!/usr/bin/env bash
  100. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  101. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  102. '''
  103. }
  104. }
  105. post {
  106. // Since this is an optional platform, we want to copy the artifacts
  107. // and archive it only if the build fails, to help with debugging.
  108. failure {
  109. sh '''#!/usr/bin/env bash
  110. cp -Rp "${WORKSPACE}/centos-7/out" "${WORKSPACE}"
  111. '''
  112. archiveArtifacts "out/**"
  113. }
  114. cleanup() {
  115. script {
  116. sh '''#!/usr/bin/env bash
  117. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  118. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  119. '''
  120. }
  121. }
  122. }
  123. }
  124. // This is an optional stage which runs only when there's a change in
  125. // C++/C++ build/platform.
  126. // This stage serves as a means of cross platform validation, which is
  127. // really needed to ensure that any C++ related/platform change doesn't
  128. // break the Hadoop build on Centos 8.
  129. stage ('precommit-run Centos 8') {
  130. environment {
  131. SOURCEDIR = "${WORKSPACE}/centos-8/src"
  132. PATCHDIR = "${WORKSPACE}/centos-8/out"
  133. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_8"
  134. IS_OPTIONAL = 1
  135. }
  136. steps {
  137. withCredentials(getGithubAndJiraCreds()) {
  138. sh '''#!/usr/bin/env bash
  139. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  140. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  141. '''
  142. }
  143. }
  144. post {
  145. // Since this is an optional platform, we want to copy the artifacts
  146. // and archive it only if the build fails, to help with debugging.
  147. failure {
  148. sh '''#!/usr/bin/env bash
  149. cp -Rp "${WORKSPACE}/centos-8/out" "${WORKSPACE}"
  150. '''
  151. archiveArtifacts "out/**"
  152. }
  153. cleanup() {
  154. script {
  155. sh '''#!/usr/bin/env bash
  156. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  157. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  158. '''
  159. }
  160. }
  161. }
  162. }
  163. // This is an optional stage which runs only when there's a change in
  164. // C++/C++ build/platform.
  165. // This stage serves as a means of cross platform validation, which is
  166. // really needed to ensure that any C++ related/platform change doesn't
  167. // break the Hadoop build on Debian 10.
  168. stage ('precommit-run Debian 10') {
  169. environment {
  170. SOURCEDIR = "${WORKSPACE}/debian-10/src"
  171. PATCHDIR = "${WORKSPACE}/debian-10/out"
  172. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_debian_10"
  173. IS_OPTIONAL = 1
  174. }
  175. steps {
  176. withCredentials(getGithubAndJiraCreds()) {
  177. sh '''#!/usr/bin/env bash
  178. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  179. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  180. '''
  181. }
  182. }
  183. post {
  184. // Since this is an optional platform, we want to copy the artifacts
  185. // and archive it only if the build fails, to help with debugging.
  186. failure {
  187. sh '''#!/usr/bin/env bash
  188. cp -Rp "${WORKSPACE}/debian-10/out" "${WORKSPACE}"
  189. '''
  190. archiveArtifacts "out/**"
  191. }
  192. cleanup() {
  193. script {
  194. sh '''#!/usr/bin/env bash
  195. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  196. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  197. '''
  198. }
  199. }
  200. }
  201. }
  202. // We want to use Ubuntu Focal as our main CI and thus, this stage
  203. // isn't optional (runs for all the PRs).
  204. stage ('precommit-run Ubuntu focal') {
  205. environment {
  206. SOURCEDIR = "${WORKSPACE}/ubuntu-focal/src"
  207. PATCHDIR = "${WORKSPACE}/ubuntu-focal/out"
  208. DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile"
  209. IS_OPTIONAL = 0
  210. }
  211. steps {
  212. withCredentials(getGithubAndJiraCreds()) {
  213. sh '''#!/usr/bin/env bash
  214. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  215. "${SOURCEDIR}/dev-support/jenkins.sh" run_ci
  216. '''
  217. }
  218. }
  219. post {
  220. always {
  221. script {
  222. // Publish status if it was missed (YETUS-1059)
  223. withCredentials(
  224. [usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd',
  225. passwordVariable: 'GITHUB_TOKEN',
  226. usernameVariable: 'GITHUB_USER')]) {
  227. sh '''#!/usr/bin/env bash
  228. # Copy the artifacts of Ubuntu focal build to workspace
  229. cp -Rp "${WORKSPACE}/ubuntu-focal/out" "${WORKSPACE}"
  230. # Send Github status
  231. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  232. "${SOURCEDIR}/dev-support/jenkins.sh" github_status_recovery
  233. '''
  234. }
  235. // YETUS output
  236. archiveArtifacts "out/**"
  237. // Publish the HTML report so that it can be looked at
  238. // Has to be relative to WORKSPACE.
  239. publishHTML (target: [
  240. allowMissing: true,
  241. keepAll: true,
  242. alwaysLinkToLastBuild: true,
  243. // Has to be relative to WORKSPACE
  244. reportDir: "out",
  245. reportFiles: 'report.html',
  246. reportName: 'Yetus Report'
  247. ])
  248. // Publish JUnit results
  249. try {
  250. junit "${SOURCEDIR}/**/target/surefire-reports/*.xml"
  251. } catch(e) {
  252. echo 'junit processing: ' + e.toString()
  253. }
  254. }
  255. }
  256. cleanup() {
  257. script {
  258. sh '''#!/usr/bin/env bash
  259. chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
  260. "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
  261. '''
  262. }
  263. }
  264. }
  265. }
  266. }
  267. post {
  268. // Jenkins pipeline jobs fill slaves on PRs without this :(
  269. cleanup() {
  270. script {
  271. sh '''#!/usr/bin/env bash
  272. # See HADOOP-13951
  273. chmod -R u+rxw "${WORKSPACE}"
  274. '''
  275. deleteDir()
  276. }
  277. }
  278. }
  279. }