hadoop_err-unit.c 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. #include "common/hadoop_err.h"
  19. #include "common/test.h"
  20. #include <string.h>
  21. #include <errno.h>
  22. #include <stdio.h>
  23. #include <stdlib.h>
  24. #include <uv.h>
  25. #define RUNTIME_EXCEPTION_ERROR_CODE EFAULT
  26. static int hadoop_lerr_alloc_test(int code, char *verMsg, char *fmt) {
  27. struct hadoop_err *err;
  28. err = hadoop_lerr_alloc(code, fmt);
  29. EXPECT_STR_EQ(verMsg, hadoop_err_msg(err));
  30. EXPECT_INT_EQ(code, hadoop_err_code(err));
  31. hadoop_err_free(err);
  32. return 0;
  33. }
  34. static int hadoop_lerr_alloc_test2(int code, char *verMsg) {
  35. struct hadoop_err *err;
  36. char msg[100];
  37. memset(msg, 0, 100);
  38. strcat(msg, verMsg);
  39. err = hadoop_lerr_alloc(code, "foo bar baz %d", 101);
  40. EXPECT_STR_EQ(strcat(msg, "foo bar baz 101"), hadoop_err_msg(err));
  41. EXPECT_INT_EQ(code, hadoop_err_code(err));
  42. hadoop_err_free(err);
  43. return 0;
  44. }
  45. static int hadoop_uverr_alloc_test(int code, char *verMsg, char *fmt) {
  46. struct hadoop_err *err;
  47. err = hadoop_uverr_alloc(code, fmt);
  48. EXPECT_STR_EQ(verMsg, hadoop_err_msg(err));
  49. EXPECT_INT_EQ(code, hadoop_err_code(err));
  50. hadoop_err_free(err);
  51. return 0;
  52. }
  53. static int hadoop_uverr_alloc_test2(int code, char *verMsg) {
  54. struct hadoop_err *err;
  55. char msg[100];
  56. memset(msg, 0, 100);
  57. strcat(msg, verMsg);
  58. err = hadoop_uverr_alloc(code, "foo bar baz %d", 101);
  59. EXPECT_STR_EQ(strcat(msg, "foo bar baz 101"), hadoop_err_msg(err));
  60. EXPECT_INT_EQ(code, hadoop_err_code(err));
  61. hadoop_err_free(err);
  62. return 0;
  63. }
  64. int main(void) {
  65. hadoop_lerr_alloc_test(RUNTIME_EXCEPTION_ERROR_CODE,
  66. "org.apache.hadoop.native.HadoopCore.RuntimeException: "
  67. "test RUNTIME_EXCEPTION_ERROR_CODE",
  68. "test RUNTIME_EXCEPTION_ERROR_CODE");
  69. hadoop_lerr_alloc_test(EINVAL,
  70. "org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
  71. "test EINVAL", "test EINVAL");
  72. hadoop_lerr_alloc_test(ENOMEM,
  73. "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: "
  74. "test ENOMEM", "test ENOMEM");
  75. hadoop_lerr_alloc_test(0,
  76. "org.apache.hadoop.native.HadoopCore.IOException: "
  77. "test default", "test default");
  78. hadoop_uverr_alloc_test(UV_EOF,
  79. "org.apache.hadoop.native.HadoopCore.EOFException: end of file: "
  80. "test UV_EOF", "test UV_EOF");
  81. hadoop_uverr_alloc_test(UV_EINVAL,
  82. "org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
  83. "invalid argument: test UV_EINVAL", "test UV_EINVAL");
  84. hadoop_uverr_alloc_test(UV_ECONNREFUSED,
  85. "org.apache.hadoop.native.HadoopCore.ConnectionRefusedException: "
  86. "connection refused: test UV_ECONNREFUSED",
  87. "test UV_ECONNREFUSED");
  88. hadoop_uverr_alloc_test(UV_ENOMEM,
  89. "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: "
  90. "not enough memory: test UV_ENOMEM", "test UV_ENOMEM");
  91. hadoop_uverr_alloc_test(0,
  92. "org.apache.hadoop.native.HadoopCore.IOException: "
  93. "Unknown system error: test default", "test default");
  94. hadoop_lerr_alloc_test2(EINVAL,
  95. "org.apache.hadoop.native.HadoopCore.InvalidRequestException: ");
  96. hadoop_lerr_alloc_test2(RUNTIME_EXCEPTION_ERROR_CODE,
  97. "org.apache.hadoop.native.HadoopCore.RuntimeException: ");
  98. hadoop_lerr_alloc_test2(ENOMEM,
  99. "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: ");
  100. hadoop_lerr_alloc_test2(0,
  101. "org.apache.hadoop.native.HadoopCore.IOException: ");
  102. hadoop_uverr_alloc_test2(UV_EOF,
  103. "org.apache.hadoop.native.HadoopCore.EOFException: end of file: ");
  104. hadoop_uverr_alloc_test2(UV_EINVAL,
  105. "org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
  106. "invalid argument: ");
  107. hadoop_uverr_alloc_test2(UV_ECONNREFUSED,
  108. "org.apache.hadoop.native.HadoopCore.ConnectionRefusedException: "
  109. "connection refused: ");
  110. hadoop_uverr_alloc_test2(UV_ENOMEM,
  111. "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: "
  112. "not enough memory: ");
  113. hadoop_uverr_alloc_test2(0,
  114. "org.apache.hadoop.native.HadoopCore.IOException: "
  115. "Unknown system error: ");
  116. return EXIT_SUCCESS;
  117. }
  118. // vim: ts=4:sw=4:tw=79:et