1
0

slsrun.sh 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. #!/usr/bin/env bash
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License. See accompanying LICENSE file.
  14. #
  15. function hadoop_usage()
  16. {
  17. echo "Usage: slsrun.sh <OPTIONS> "
  18. echo " --tracetype=<SYNTH | SLS | RUMEN>"
  19. echo " --tracelocation=<FILE1,FILE2,...>"
  20. echo " (deprecated --input-rumen=<FILE1,FILE2,...> | --input-sls=<FILE1,FILE2,...>)"
  21. echo " --output-dir=<SLS_SIMULATION_OUTPUT_DIRECTORY>"
  22. echo " [--nodes=<SLS_NODES_FILE>]"
  23. echo " [--track-jobs=<JOBID1,JOBID2,...>]"
  24. echo " [--print-simulation]"
  25. }
  26. function parse_args()
  27. {
  28. for i in "$@"; do
  29. case $i in
  30. --input-rumen=*)
  31. inputrumen=${i#*=}
  32. ;;
  33. --input-sls=*)
  34. inputsls=${i#*=}
  35. ;;
  36. --tracetype=*)
  37. tracetype=${i#*=}
  38. ;;
  39. --tracelocation=*)
  40. tracelocation=${i#*=}
  41. ;;
  42. --output-dir=*)
  43. outputdir=${i#*=}
  44. ;;
  45. --nodes=*)
  46. nodes=${i#*=}
  47. ;;
  48. --track-jobs=*)
  49. trackjobs=${i#*=}
  50. ;;
  51. --print-simulation)
  52. printsimulation="true"
  53. ;;
  54. *)
  55. hadoop_error "ERROR: Invalid option ${i}"
  56. hadoop_exit_with_usage 1
  57. ;;
  58. esac
  59. done
  60. if [[ -z "${inputrumen}" && -z "${inputsls}" && -z "${tracetype}" ]] ; then
  61. hadoop_error "ERROR: Either --input-rumen, --input-sls, or --tracetype (with --tracelocation) must be specified."
  62. fi
  63. if [[ -n "${inputrumen}" && -n "${inputsls}" && -n "${tracetype}" ]] ; then
  64. hadoop_error "ERROR: Only specify one of --input-rumen, --input-sls, or --tracetype (with --tracelocation)"
  65. fi
  66. if [[ -z "${outputdir}" ]] ; then
  67. hadoop_error "ERROR: The output directory --output-dir must be specified."
  68. hadoop_exit_with_usage 1
  69. fi
  70. }
  71. function calculate_classpath
  72. {
  73. hadoop_add_to_classpath_tools hadoop-sls
  74. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}/timelineservice"'/*'
  75. }
  76. function run_simulation() {
  77. local args
  78. if [[ "${inputsls}" != "" ]] ; then
  79. hadoop_add_param args -inputsls "-inputsls ${inputsls}"
  80. fi
  81. if [[ "${inputrumen}" != "" ]] ; then
  82. hadoop_add_param args -inputrumen "-inputrumen ${inputrumen}"
  83. fi
  84. if [[ "${tracetype}" != "" ]] ; then
  85. hadoop_add_param args -tracetype "-tracetype ${tracetype}"
  86. hadoop_add_param args -tracelocation "-tracelocation ${tracelocation}"
  87. fi
  88. hadoop_add_param args -output "-output ${outputdir}"
  89. if [[ -n "${nodes}" ]] ; then
  90. hadoop_add_param args -nodes "-nodes ${nodes}"
  91. fi
  92. if [[ -n "${trackjobs}" ]] ; then
  93. hadoop_add_param args -trackjobs "-trackjobs ${trackjobs}"
  94. fi
  95. if [[ "${printsimulation}" == "true" ]] ; then
  96. hadoop_add_param args -printsimulation "-printsimulation"
  97. fi
  98. hadoop_add_client_opts
  99. hadoop_finalize
  100. # shellcheck disable=SC2086
  101. hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args}
  102. }
  103. # let's locate libexec...
  104. if [[ -n "${HADOOP_HOME}" ]]; then
  105. HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
  106. else
  107. this="${BASH_SOURCE-$0}"
  108. bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
  109. HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
  110. fi
  111. HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
  112. # shellcheck disable=SC2034
  113. HADOOP_NEW_CONFIG=true
  114. if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
  115. # shellcheck disable=SC1090
  116. . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
  117. else
  118. echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
  119. exit 1
  120. fi
  121. if [[ $# = 0 ]]; then
  122. hadoop_exit_with_usage 1
  123. fi
  124. parse_args "${@}"
  125. calculate_classpath
  126. run_simulation