hadoop_confdir.bats 2.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. # Licensed to the Apache Software Foundation (ASF) under one or more
  2. # contributor license agreements. See the NOTICE file distributed with
  3. # this work for additional information regarding copyright ownership.
  4. # The ASF licenses this file to You under the Apache License, Version 2.0
  5. # (the "License"); you may not use this file except in compliance with
  6. # the License. You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. load hadoop-functions_test_helper
  16. create_fake_dirs () {
  17. HADOOP_PREFIX=${TMP}
  18. for j in conf etc/hadoop; do
  19. mkdir -p "${HADOOP_PREFIX}/${j}"
  20. echo "unittest=${j}" > "${HADOOP_PREFIX}/${j}/hadoop-env.sh"
  21. done
  22. }
  23. @test "hadoop_find_confdir (default)" {
  24. create_fake_dirs
  25. hadoop_find_confdir
  26. [ -n "${HADOOP_CONF_DIR}" ]
  27. }
  28. @test "hadoop_find_confdir (bw compat: conf)" {
  29. create_fake_dirs
  30. hadoop_find_confdir
  31. echo ">${HADOOP_CONF_DIR}< >${HADOOP_PREFIX}/conf<"
  32. [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/conf ]
  33. }
  34. @test "hadoop_find_confdir (etc/hadoop)" {
  35. create_fake_dirs
  36. rm -rf "${HADOOP_PREFIX}/conf"
  37. hadoop_find_confdir
  38. [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/etc/hadoop ]
  39. }
  40. @test "hadoop_verify_confdir (negative) " {
  41. create_fake_dirs
  42. HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
  43. run hadoop_verify_confdir
  44. [ -n "${output}" ]
  45. }
  46. @test "hadoop_verify_confdir (positive) " {
  47. create_fake_dirs
  48. HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
  49. touch "${HADOOP_CONF_DIR}/log4j.properties"
  50. run hadoop_verify_confdir
  51. [ -z "${output}" ]
  52. }
  53. @test "hadoop_exec_hadoopenv (positive) " {
  54. create_fake_dirs
  55. HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
  56. hadoop_exec_hadoopenv
  57. [ -n "${HADOOP_ENV_PROCESSED}" ]
  58. [ "${unittest}" = conf ]
  59. }
  60. @test "hadoop_exec_hadoopenv (negative) " {
  61. create_fake_dirs
  62. HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
  63. HADOOP_ENV_PROCESSED=true
  64. hadoop_exec_hadoopenv
  65. [ -z "${unittest}" ]
  66. }
  67. @test "hadoop_exec_userfuncs" {
  68. create_fake_dirs
  69. HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
  70. echo "unittest=userfunc" > "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  71. hadoop_exec_userfuncs
  72. [ "${unittest}" = "userfunc" ]
  73. }
  74. @test "hadoop_exec_hadooprc" {
  75. HOME=${TMP}
  76. echo "unittest=hadooprc" > "${TMP}/.hadooprc"
  77. hadoop_exec_hadooprc
  78. [ ${unittest} = "hadooprc" ]
  79. }