|
@@ -17,24 +17,112 @@
|
|
|
*/
|
|
|
|
|
|
#include "common/hadoop_err.h"
|
|
|
+
|
|
|
#include "common/test.h"
|
|
|
|
|
|
+#include <string.h>
|
|
|
#include <errno.h>
|
|
|
#include <stdio.h>
|
|
|
#include <stdlib.h>
|
|
|
#include <uv.h>
|
|
|
|
|
|
-int main(void)
|
|
|
-{
|
|
|
- struct hadoop_err *err;
|
|
|
+#define RUNTIME_EXCEPTION_ERROR_CODE EFAULT
|
|
|
+
|
|
|
+static int hadoop_lerr_alloc_test(int code, char *verMsg, char *fmt) {
|
|
|
+ struct hadoop_err *err;
|
|
|
+ err = hadoop_lerr_alloc(code, fmt);
|
|
|
+ EXPECT_STR_EQ(verMsg, hadoop_err_msg(err));
|
|
|
+ EXPECT_INT_EQ(code, hadoop_err_code(err));
|
|
|
+ hadoop_err_free(err);
|
|
|
+ return 0;
|
|
|
+}
|
|
|
+
|
|
|
+static int hadoop_lerr_alloc_test2(int code, char *verMsg) {
|
|
|
+ struct hadoop_err *err;
|
|
|
+ char msg[100];
|
|
|
+ memset(msg, 0, 100);
|
|
|
+ strcat(msg, verMsg);
|
|
|
+ err = hadoop_lerr_alloc(code, "foo bar baz %d", 101);
|
|
|
+ EXPECT_STR_EQ(strcat(msg, "foo bar baz 101"), hadoop_err_msg(err));
|
|
|
+ EXPECT_INT_EQ(code, hadoop_err_code(err));
|
|
|
+ hadoop_err_free(err);
|
|
|
+ return 0;
|
|
|
+}
|
|
|
+
|
|
|
+static int hadoop_uverr_alloc_test(int code, char *verMsg, char *fmt) {
|
|
|
+ struct hadoop_err *err;
|
|
|
+ err = hadoop_uverr_alloc(code, fmt);
|
|
|
+ EXPECT_STR_EQ(verMsg, hadoop_err_msg(err));
|
|
|
+ EXPECT_INT_EQ(code, hadoop_err_code(err));
|
|
|
+ hadoop_err_free(err);
|
|
|
+ return 0;
|
|
|
+}
|
|
|
|
|
|
- err = hadoop_lerr_alloc(EINVAL, "foo bar baz %d", 101);
|
|
|
- EXPECT_STR_EQ("org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
|
|
|
- "foo bar baz 101", hadoop_err_msg(err));
|
|
|
- EXPECT_INT_EQ(EINVAL, hadoop_err_code(err));
|
|
|
- hadoop_err_free(err);
|
|
|
+static int hadoop_uverr_alloc_test2(int code, char *verMsg) {
|
|
|
+ struct hadoop_err *err;
|
|
|
+ char msg[100];
|
|
|
+ memset(msg, 0, 100);
|
|
|
+ strcat(msg, verMsg);
|
|
|
+ err = hadoop_uverr_alloc(code, "foo bar baz %d", 101);
|
|
|
+ EXPECT_STR_EQ(strcat(msg, "foo bar baz 101"), hadoop_err_msg(err));
|
|
|
+ EXPECT_INT_EQ(code, hadoop_err_code(err));
|
|
|
+ hadoop_err_free(err);
|
|
|
+ return 0;
|
|
|
+}
|
|
|
|
|
|
- return EXIT_SUCCESS;
|
|
|
+int main(void) {
|
|
|
+ hadoop_lerr_alloc_test(RUNTIME_EXCEPTION_ERROR_CODE,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.RuntimeException: "
|
|
|
+ "test RUNTIME_EXCEPTION_ERROR_CODE",
|
|
|
+ "test RUNTIME_EXCEPTION_ERROR_CODE");
|
|
|
+ hadoop_lerr_alloc_test(EINVAL,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
|
|
|
+ "test EINVAL", "test EINVAL");
|
|
|
+ hadoop_lerr_alloc_test(ENOMEM,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: "
|
|
|
+ "test ENOMEM", "test ENOMEM");
|
|
|
+ hadoop_lerr_alloc_test(0,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.IOException: "
|
|
|
+ "test default", "test default");
|
|
|
+ hadoop_uverr_alloc_test(UV_EOF,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.EOFException: end of file: "
|
|
|
+ "test UV_EOF", "test UV_EOF");
|
|
|
+ hadoop_uverr_alloc_test(UV_EINVAL,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
|
|
|
+ "invalid argument: test UV_EINVAL", "test UV_EINVAL");
|
|
|
+ hadoop_uverr_alloc_test(UV_ECONNREFUSED,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.ConnectionRefusedException: "
|
|
|
+ "connection refused: test UV_ECONNREFUSED",
|
|
|
+ "test UV_ECONNREFUSED");
|
|
|
+ hadoop_uverr_alloc_test(UV_ENOMEM,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: "
|
|
|
+ "not enough memory: test UV_ENOMEM", "test UV_ENOMEM");
|
|
|
+ hadoop_uverr_alloc_test(0,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.IOException: "
|
|
|
+ "Unknown system error: test default", "test default");
|
|
|
+ hadoop_lerr_alloc_test2(EINVAL,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.InvalidRequestException: ");
|
|
|
+ hadoop_lerr_alloc_test2(RUNTIME_EXCEPTION_ERROR_CODE,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.RuntimeException: ");
|
|
|
+ hadoop_lerr_alloc_test2(ENOMEM,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: ");
|
|
|
+ hadoop_lerr_alloc_test2(0,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.IOException: ");
|
|
|
+ hadoop_uverr_alloc_test2(UV_EOF,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.EOFException: end of file: ");
|
|
|
+ hadoop_uverr_alloc_test2(UV_EINVAL,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.InvalidRequestException: "
|
|
|
+ "invalid argument: ");
|
|
|
+ hadoop_uverr_alloc_test2(UV_ECONNREFUSED,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.ConnectionRefusedException: "
|
|
|
+ "connection refused: ");
|
|
|
+ hadoop_uverr_alloc_test2(UV_ENOMEM,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.OutOfMemoryException: "
|
|
|
+ "not enough memory: ");
|
|
|
+ hadoop_uverr_alloc_test2(0,
|
|
|
+ "org.apache.hadoop.native.HadoopCore.IOException: "
|
|
|
+ "Unknown system error: ");
|
|
|
+ return EXIT_SUCCESS;
|
|
|
}
|
|
|
|
|
|
// vim: ts=4:sw=4:tw=79:et
|