misc.sh 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. #!/bin/sh
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. ntest=1
  17. fname="$0"
  18. prepare() {
  19. BASE_URI="${HADOOP_COMPAT_BASE_URI}"
  20. LOCAL_URI="${HADOOP_COMPAT_LOCAL_URI}"
  21. SNAPSHOT_URI="${HADOOP_COMPAT_SNAPSHOT_URI}"
  22. STORAGE_POLICY="${HADOOP_COMPAT_STORAGE_POLICY}"
  23. STDOUT_DIR="${HADOOP_COMPAT_STDOUT_DIR}"
  24. PASS_FILE="${HADOOP_COMPAT_PASS_FILE}"
  25. FAIL_FILE="${HADOOP_COMPAT_FAIL_FILE}"
  26. SKIP_FILE="${HADOOP_COMPAT_SKIP_FILE}"
  27. export baseDir="${BASE_URI}/${fname}"
  28. export localDir="${LOCAL_URI}/${fname}"
  29. export snapshotDir="${SNAPSHOT_URI}"
  30. export storagePolicy="${STORAGE_POLICY}"
  31. stdoutDir="${STDOUT_DIR}/${fname}/stdout"
  32. stderrDir="${STDOUT_DIR}/${fname}/stderr"
  33. mkdir -p "${stdoutDir}"
  34. mkdir -p "${stderrDir}"
  35. mkdir -p "${localDir}"
  36. hadoop fs -mkdir -p "${baseDir}"
  37. }
  38. expect_ret() { (
  39. cname="${1}"
  40. shift
  41. expect="${1}"
  42. shift
  43. stdout="${stdoutDir}/${ntest}"
  44. stderr="${stderrDir}/${ntest}"
  45. "$@" 1>"${stdout}" 2>"${stderr}"
  46. result="$?"
  47. if should_skip "${stderr}"; then
  48. skip_case "${cname}"
  49. else
  50. if [ X"${result}" = X"${expect}" ]; then
  51. pass_case "${cname}"
  52. else
  53. fail_case "${cname}"
  54. fi
  55. fi
  56. )
  57. ntest=$((ntest + 1))
  58. }
  59. expect_out() { (
  60. cname="${1}"
  61. shift
  62. expect="${1}"
  63. shift
  64. stdout="${stdoutDir}/${ntest}"
  65. stderr="${stderrDir}/${ntest}"
  66. "$@" 1>"${stdout}" 2>"${stderr}"
  67. if should_skip "${stderr}"; then
  68. skip_case "${cname}"
  69. else
  70. if grep -Eq '^'"${expect}"'$' "${stdout}"; then
  71. pass_case "${cname}"
  72. else
  73. fail_case "${cname}"
  74. fi
  75. fi
  76. )
  77. ntest=$((ntest + 1))
  78. }
  79. expect_lines() { (
  80. cname="${1}"
  81. shift
  82. lineNum="${1}"
  83. shift
  84. lines=$(expect_lines_parse "${lineNum}" "$@")
  85. shift "${lineNum}"
  86. stdout="${stdoutDir}/${ntest}"
  87. stderr="${stderrDir}/${ntest}"
  88. "$@" 1>"${stdout}" 2>"${stderr}"
  89. if should_skip "${stderr}"; then
  90. skip_case "${cname}"
  91. else
  92. lineCount="0"
  93. while read -r line; do
  94. case "${line}" in
  95. *"Found"*"items"*)
  96. continue
  97. ;;
  98. esac
  99. selectedLine=$(expect_lines_select "${lines}" "${lineCount}")
  100. if ! echo "${line}" | grep -Eq '^'"${selectedLine}"'$'; then
  101. lineCount="-1"
  102. break
  103. else
  104. lineCount=$((lineCount + 1))
  105. shift
  106. fi
  107. done <"${stdout}"
  108. if [ "${lineCount}" -eq "${lineNum}" ]; then
  109. pass_case "${cname}"
  110. else
  111. fail_case "${cname}"
  112. fi
  113. fi
  114. )
  115. ntest=$((ntest + 1))
  116. }
  117. expect_lines_parse() {
  118. for _ in $(seq 1 "${1}"); do
  119. shift
  120. echo "${1}"
  121. done
  122. }
  123. expect_lines_select() {
  124. lineSelector="0"
  125. echo "${1}" | while read -r splittedLine; do
  126. if [ "${lineSelector}" -eq "${2}" ]; then
  127. echo "${splittedLine}"
  128. return
  129. fi
  130. lineSelector=$((lineSelector + 1))
  131. done
  132. echo ""
  133. }
  134. is_hadoop_shell() {
  135. if [ X"${1}" = X"hadoop" ] || [ X"${1}" = X"hdfs" ]; then
  136. return 0
  137. else
  138. return 1
  139. fi
  140. }
  141. should_skip() {
  142. if grep -q "Unknown command" "${1}" || grep -q "Illegal option" "${1}"; then
  143. return 0
  144. else
  145. return 1
  146. fi
  147. }
  148. pass_case() {
  149. echo "ok ${ntest}"
  150. echo "${fname} - #${ntest} ${1}" >> "${PASS_FILE}"
  151. }
  152. fail_case() {
  153. echo "not ok ${ntest}"
  154. echo "${fname} - #${ntest} ${1}" >> "${FAIL_FILE}"
  155. }
  156. skip_case() {
  157. echo "ok ${ntest}"
  158. echo "${fname} - #${ntest} ${1}" >> "${SKIP_FILE}"
  159. }
  160. prepare