check_hadoop.sh 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. #!/bin/bash
  2. #
  3. #
  4. # Licensed to the Apache Software Foundation (ASF) under one
  5. # or more contributor license agreements. See the NOTICE file
  6. # distributed with this work for additional information
  7. # regarding copyright ownership. The ASF licenses this file
  8. # to you under the Apache License, Version 2.0 (the
  9. # "License"); you may not use this file except in compliance
  10. # with the License. You may obtain a copy of the License at
  11. #
  12. # http://www.apache.org/licenses/LICENSE-2.0
  13. #
  14. # Unless required by applicable law or agreed to in writing,
  15. # software distributed under the License is distributed on an
  16. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  17. # KIND, either express or implied. See the License for the
  18. # specific language governing permissions and limitations
  19. # under the License.
  20. #
  21. #
  22. user=""
  23. secure="false"
  24. keytab=""
  25. kinit_path="/usr/kerberos/bin/kinit"
  26. while getopts ":u:k:s" opt; do
  27. case $opt in
  28. u)
  29. user=$OPTARG;
  30. ;;
  31. k)
  32. keytab=$OPTARG;
  33. ;;
  34. s)
  35. secure="true";
  36. ;;
  37. \?)
  38. echo "Invalid option: -$OPTARG" >&2
  39. exit 3
  40. ;;
  41. :)
  42. echo "UNKNOWNOption -$OPTARG requires an argument." >&2
  43. exit 3
  44. ;;
  45. esac
  46. done
  47. outfile="/tmp/nagios-hadoop-check.out"
  48. curtime=`date +"%F-%H-%M-%S"`
  49. fname="nagios-hadoop-check-${curtime}"
  50. if [[ "$user" == "" ]]; then
  51. echo "INVALID: user argument not specified";
  52. exit 3;
  53. fi
  54. if [[ "$keytab" == "" ]]; then
  55. keytab="/homes/$user/$user.headless.keytab"
  56. fi
  57. if [[ ! -f "$kinit_path" ]]; then
  58. kinit_path="kinit"
  59. fi
  60. if [[ "$secure" == "true" ]]; then
  61. sudo -u $user -i "$kinit_path -kt $keytab $user" > ${outfile} 2>&1
  62. fi
  63. sudo -u $user -i "hadoop dfs -copyFromLocal /etc/passwd ${fname}.input " > ${outfile} 2>&1
  64. if [[ "$?" -ne "0" ]]; then
  65. echo "CRITICAL: Error copying file to HDFS. See error output in ${outfile} on nagios server";
  66. exit 2;
  67. fi
  68. sudo -u $user -i "hadoop dfs -ls" > ${outfile} 2>&1
  69. if [[ "$?" -ne "0" ]]; then
  70. echo "CRITICAL: Error listing HDFS files. See error output in ${outfile} on nagios server";
  71. exit 2;
  72. fi
  73. sudo -u $user -i "hadoop jar /usr/share/hadoop/hadoop-examples-*.jar wordcount ${fname}.input ${fname}.out" >> ${outfile} 2>&1
  74. if [[ "$?" -ne "0" ]]; then
  75. echo "CRITICAL: Error running M/R job. See error output in ${outfile} on nagios server";
  76. exit 2;
  77. fi
  78. sudo -u $user -i "hadoop fs -rmr -skipTrash ${fname}.out" >> ${outfile} 2>&1
  79. if [[ "$?" -ne "0" ]]; then
  80. echo "CRITICAL: Error removing M/R job output. See error output in ${outfile} on nagios server";
  81. exit 2;
  82. fi
  83. sudo -u $user -i "hadoop fs -rm -skipTrash ${fname}.input" >> ${outfile} 2>&1
  84. if [[ "$?" -ne "0" ]]; then
  85. echo "CRITICAL: Error removing M/R job input. See error output in ${outfile} on nagios server";
  86. exit 2;
  87. fi
  88. echo "OK: M/R WordCount Job ran successfully"
  89. exit 0;