Dockerfile 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # Dockerfile for installing the necessary dependencies for building Hadoop.
  17. # See BUILDING.txt.
  18. FROM ubuntu:trusty
  19. WORKDIR /root
  20. ENV DEBIAN_FRONTEND noninteractive
  21. ENV DEBCONF_TERSE true
  22. ######
  23. # Install common dependencies from packages
  24. ######
  25. RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
  26. ant \
  27. build-essential \
  28. bzip2 \
  29. cmake \
  30. curl \
  31. doxygen \
  32. fuse \
  33. g++ \
  34. gcc \
  35. git \
  36. gnupg-agent \
  37. make \
  38. maven \
  39. libbz2-dev \
  40. libcurl4-openssl-dev \
  41. libfuse-dev \
  42. libjansson-dev \
  43. libprotobuf-dev \
  44. libprotoc-dev \
  45. libsnappy-dev \
  46. libssl-dev \
  47. libtool \
  48. openjdk-7-jdk \
  49. pinentry-curses \
  50. pkg-config \
  51. protobuf-compiler \
  52. protobuf-c-compiler \
  53. python \
  54. python2.7 \
  55. python2.7-dev \
  56. python-pip \
  57. rsync \
  58. snappy \
  59. zlib1g-dev
  60. # Fixing the Apache commons / Maven dependency problem under Ubuntu:
  61. # See http://wiki.apache.org/commons/VfsProblems
  62. RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .
  63. #######
  64. # Java OpenJDK
  65. #######
  66. RUN echo "dot_style = mega" > "/root/.wgetrc"
  67. RUN echo "quiet = on" >> "/root/.wgetrc"
  68. RUN apt-get -q install --no-install-recommends -y software-properties-common
  69. RUN add-apt-repository -y ppa:webupd8team/java
  70. RUN apt-get -q update
  71. # Install OpenJDK 7
  72. RUN apt-get -q install --no-install-recommends -y openjdk-7-jdk
  73. ######
  74. # Install spotbugs (successor of findbugs)
  75. ######
  76. RUN mkdir -p /opt/spotbugs && \
  77. curl -L -s -S \
  78. http://repo.maven.apache.org/maven2/com/github/spotbugs/spotbugs/3.1.2/spotbugs-3.1.2.tgz \
  79. -o /opt/spotbugs.tar.gz && \
  80. tar xzf /opt/spotbugs.tar.gz --strip-components 1 -C /opt/spotbugs
  81. # Hadoop uses FINDBUGS_HOME to run spotbugs
  82. ENV FINDBUGS_HOME /opt/spotbugs
  83. ####
  84. # Install shellcheck
  85. ####
  86. RUN apt-get -q install -y cabal-install
  87. RUN mkdir /root/.cabal
  88. RUN echo "remote-repo: hackage.fpcomplete.com:http://hackage.fpcomplete.com/" >> /root/.cabal/config
  89. #RUN echo "remote-repo: hackage.haskell.org:http://hackage.haskell.org/" > /root/.cabal/config
  90. RUN echo "remote-repo-cache: /root/.cabal/packages" >> /root/.cabal/config
  91. RUN cabal update
  92. RUN cabal install shellcheck --global
  93. ####
  94. # Install bats
  95. ####
  96. RUN add-apt-repository -y ppa:duggan/bats
  97. RUN apt-get -q update
  98. RUN apt-get -q install --no-install-recommends -y bats
  99. ####
  100. # Install pylint
  101. ####
  102. RUN pip install pylint
  103. ####
  104. # Install dateutil.parser
  105. ####
  106. RUN pip install python-dateutil
  107. ###
  108. # Avoid out of memory errors in builds
  109. ###
  110. ENV MAVEN_OPTS -Xms256m -Xmx1536m
  111. ###
  112. # Everything past this point is either not needed for testing or breaks Yetus.
  113. # So tell Yetus not to read the rest of the file:
  114. # YETUS CUT HERE
  115. ###
  116. ####
  117. # Install Forrest (for Apache Hadoop website)
  118. ###
  119. RUN mkdir -p /usr/local/apache-forrest ; \
  120. curl -s -S -O http://archive.apache.org/dist/forrest/0.8/apache-forrest-0.8.tar.gz ; \
  121. tar xzf *forrest* --strip-components 1 -C /usr/local/apache-forrest ; \
  122. echo 'forrest.home=/usr/local/apache-forrest' > build.properties
  123. # Add a welcome message and environment checks.
  124. ADD hadoop_env_checks.sh /root/hadoop_env_checks.sh
  125. RUN chmod 755 /root/hadoop_env_checks.sh
  126. RUN echo '~/hadoop_env_checks.sh' >> /root/.bashrc