浏览代码

MAPREDUCE-5102. fix coverage org.apache.hadoop.mapreduce.lib.db and org.apache.hadoop.mapred.lib.db. Contributed by Aleksey Gorshkov, Andrey Klochkov, and Nathan Roberts

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530816 13f79535-47bb-0310-9956-ffa450edef68
Jason Darrell Lowe 11 年之前
父节点
当前提交
00395a0064

+ 4 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -187,6 +187,10 @@ Release 2.3.0 - UNRELEASED
 
     MAPREDUCE-5514. Fix TestRMContainerAllocator. (Zhijie Shen via acmurthy) 
 
+    MAPREDUCE-5102. fix coverage org.apache.hadoop.mapreduce.lib.db and
+    org.apache.hadoop.mapred.lib.db (Aleksey Gorshkov, Andrey Klochkov, and
+    Nathan Roberts via jlowe)
+
 Release 2.2.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 157 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/lib/db/TestDBInputFormat.java

@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.lib.db;
+
+import java.sql.DriverManager;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.lib.db.DBInputFormat.DBInputSplit;
+import org.apache.hadoop.mapred.lib.db.DBInputFormat.DBRecordReader;
+import org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapred.lib.db.DBConfiguration;
+import org.apache.hadoop.mapreduce.lib.db.DriverForTest;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+
+public class TestDBInputFormat {
+
+  /**
+   * test DBInputFormat class. Class should split result for chunks
+   * @throws Exception
+   */
+  @Test(timeout = 10000)
+  public void testDBInputFormat() throws Exception {
+    JobConf configuration = new JobConf();
+    setupDriver(configuration);
+    
+    DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
+    format.setConf(configuration);
+    format.setConf(configuration);
+    DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
+    Reporter reporter = mock(Reporter.class);
+    RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
+        splitter, configuration, reporter);
+
+    configuration.setInt(MRJobConfig.NUM_MAPS, 3);
+    InputSplit[] lSplits = format.getSplits(configuration, 3);
+    assertEquals(5, lSplits[0].getLength());
+    assertEquals(3, lSplits.length);
+
+    // test reader .Some simple tests
+    assertEquals(LongWritable.class, reader.createKey().getClass());
+    assertEquals(0, reader.getPos());
+    assertEquals(0, reader.getProgress(), 0.001);
+    reader.close();
+  }
+  
+  /** 
+   * test configuration for db. should works DBConfiguration.* parameters. 
+   */
+  @Test (timeout = 5000)
+  public void testSetInput() {
+    JobConf configuration = new JobConf();
+
+    String[] fieldNames = { "field1", "field2" };
+    DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
+        "conditions", "orderBy", fieldNames);
+    assertEquals(
+        "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
+        configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
+            .getName());
+    assertEquals("table",
+        configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));
+
+    String[] fields = configuration
+        .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
+    assertEquals("field1", fields[0]);
+    assertEquals("field2", fields[1]);
+
+    assertEquals("conditions",
+        configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
+    assertEquals("orderBy",
+        configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));
+
+    configuration = new JobConf();
+
+    DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
+        "countQuery");
+    assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
+    assertEquals("countQuery",
+        configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));
+    
+    JobConf jConfiguration = new JobConf();
+    DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
+        "password");
+    assertEquals("driverClass",
+        jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
+    assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
+    assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
+    assertEquals("password",
+        jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
+    jConfiguration = new JobConf();
+    DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
+    assertEquals("driverClass",
+        jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
+    assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
+    assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
+    assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
+  }
+
+  /**
+   * 
+   * test DBRecordReader. This reader should creates keys, values, know about position.. 
+   */
+  @SuppressWarnings("unchecked")
+  @Test (timeout = 5000)
+  public void testDBRecordReader() throws Exception {
+
+    JobConf job = mock(JobConf.class);
+    DBConfiguration dbConfig = mock(DBConfiguration.class);
+    String[] fields = { "field1", "filed2" };
+
+    @SuppressWarnings("rawtypes")
+    DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
+        new DBInputSplit(),  NullDBWritable.class, job,
+        DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
+    LongWritable key = reader.createKey();
+    assertEquals(0, key.get());
+    DBWritable value = reader.createValue();
+    assertEquals(
+        "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
+            .getClass().getName());
+    assertEquals(0, reader.getPos());
+    assertFalse(reader.next(key, value));
+
+  }
+
+  private void setupDriver(JobConf configuration) throws Exception {
+    configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
+    DriverManager.registerDriver(new DriverForTest());
+    configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
+        DriverForTest.class.getCanonicalName());
+  }
+
+}

+ 112 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/db/DriverForTest.java

@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.lib.db;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.Driver;
+import java.sql.DriverPropertyInfo;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.sql.Statement;
+import java.util.Properties;
+import java.util.logging.Logger;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+
+/**
+ * class emulates a connection to database
+ * 
+ */
+public class DriverForTest implements Driver {
+
+  public static Connection getConnection() {
+    Connection connection = mock(FakeConnection.class);
+    try {
+      Statement statement = mock(Statement.class);
+      ResultSet results = mock(ResultSet.class);
+      when(results.getLong(1)).thenReturn(15L);
+      when(statement.executeQuery(any(String.class))).thenReturn(results);
+      when(connection.createStatement()).thenReturn(statement);
+
+      DatabaseMetaData metadata = mock(DatabaseMetaData.class);
+      when(metadata.getDatabaseProductName()).thenReturn("Test");
+      when(connection.getMetaData()).thenReturn(metadata);
+
+      PreparedStatement reparedStatement0= mock(PreparedStatement.class);
+      when(connection.prepareStatement(anyString())).thenReturn(
+          reparedStatement0);
+
+      PreparedStatement preparedStatement = mock(PreparedStatement.class);
+      ResultSet resultSet = mock(ResultSet.class);
+      when(resultSet.next()).thenReturn(false);
+      when(preparedStatement.executeQuery()).thenReturn(resultSet);
+
+      when(connection.prepareStatement(anyString(), anyInt(), anyInt()))
+          .thenReturn(preparedStatement);
+    } catch (SQLException e) {
+      ;
+    }
+    return connection;
+  }
+
+  @Override
+  public boolean acceptsURL(String arg0) throws SQLException {
+    return "testUrl".equals(arg0);
+  }
+
+  @Override
+  public Connection connect(String arg0, Properties arg1) throws SQLException {
+   
+    return getConnection();
+  }
+
+  @Override
+  public int getMajorVersion() {
+    return 1;
+  }
+
+  @Override
+  public int getMinorVersion() {
+    return 1;
+  }
+
+  @Override
+  public DriverPropertyInfo[] getPropertyInfo(String arg0, Properties arg1)
+      throws SQLException {
+
+    return null;
+  }
+
+  @Override
+  public boolean jdbcCompliant() {
+    return true;
+  }
+  
+  public Logger getParentLogger() throws SQLFeatureNotSupportedException {
+    throw new SQLFeatureNotSupportedException();
+  }
+
+  private interface FakeConnection extends Connection{
+    public void setSessionTimeZone(String arg);
+  }
+  
+}

+ 170 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDbClasses.java

@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.lib.db;
+
+import java.sql.Connection;
+import java.sql.Types;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit;
+import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable;
+import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+
+public class TestDbClasses {
+  /**
+   * test splitters from DataDrivenDBInputFormat. For different data types may
+   * be different splitter
+   */
+  @Test(timeout = 1000)
+  public void testDataDrivenDBInputFormatSplitter() {
+    DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
+    testCommonSplitterTypes(format);
+    assertEquals(DateSplitter.class, format.getSplitter(Types.TIMESTAMP)
+        .getClass());
+    assertEquals(DateSplitter.class, format.getSplitter(Types.DATE).getClass());
+    assertEquals(DateSplitter.class, format.getSplitter(Types.TIME).getClass());
+  }
+
+  @Test(timeout = 1000)
+  public void testDataDrivenDBInputFormat() throws Exception {
+    JobContext jobContext = mock(JobContext.class);
+    Configuration configuration = new Configuration();
+    configuration.setInt(MRJobConfig.NUM_MAPS, 1);
+
+    when(jobContext.getConfiguration()).thenReturn(configuration);
+    DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
+    List<InputSplit> splits = format.getSplits(jobContext);
+    assertEquals(1, splits.size());
+    DataDrivenDBInputSplit split = (DataDrivenDBInputSplit) splits.get(0);
+    assertEquals("1=1", split.getLowerClause());
+    assertEquals("1=1", split.getUpperClause());
+
+    // 2
+    configuration.setInt(MRJobConfig.NUM_MAPS, 2);
+
+    DataDrivenDBInputFormat.setBoundingQuery(configuration, "query");
+    assertEquals("query",
+        configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
+
+    Job job = mock(Job.class);
+    when(job.getConfiguration()).thenReturn(configuration);
+    DataDrivenDBInputFormat.setInput(job, NullDBWritable.class, "query",
+        "Bounding Query");
+
+    assertEquals("Bounding Query",
+        configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
+  }
+
+  @Test(timeout = 1000)
+  public void testOracleDataDrivenDBInputFormat() throws Exception {
+    OracleDataDrivenDBInputFormat<NullDBWritable> format = 
+        new OracleDataDrivenDBInputFormatForTest();
+    testCommonSplitterTypes(format);
+    assertEquals(OracleDateSplitter.class, format.getSplitter(Types.TIMESTAMP)
+        .getClass());
+    assertEquals(OracleDateSplitter.class, 
+        format.getSplitter(Types.DATE).getClass());
+    assertEquals(OracleDateSplitter.class, 
+        format.getSplitter(Types.TIME).getClass());
+  }
+
+  /**
+   * test generate sql script for OracleDBRecordReader.
+   */
+
+  @Test(timeout = 2000)
+  public void testOracleDBRecordReader() throws Exception {
+    DBInputSplit splitter = new DBInputSplit(1, 10);
+    Configuration configuration = new Configuration();
+    Connection connect = DriverForTest.getConnection();
+
+    DBConfiguration dbConfiguration = new DBConfiguration(configuration);
+    dbConfiguration.setInputOrderBy("Order");
+    String[] fields = { "f1", "f2" };
+
+    OracleDBRecordReader<NullDBWritable> recorder = new OracleDBRecordReader<NullDBWritable>(
+        splitter, NullDBWritable.class, configuration, connect,
+        dbConfiguration, "condition", fields, "table");
+    assertEquals(
+        "SELECT * FROM (SELECT a.*,ROWNUM dbif_rno FROM ( SELECT f1, f2 FROM table WHERE condition ORDER BY Order ) a WHERE rownum <= 1 + 9 ) WHERE dbif_rno >= 1",
+        recorder.getSelectQuery());
+  }
+
+  private void testCommonSplitterTypes(
+      DataDrivenDBInputFormat<NullDBWritable> format) {
+    assertEquals(BigDecimalSplitter.class, format.getSplitter(Types.DECIMAL)
+        .getClass());
+    assertEquals(BigDecimalSplitter.class, format.getSplitter(Types.NUMERIC)
+        .getClass());
+    assertEquals(BooleanSplitter.class, format.getSplitter(Types.BOOLEAN)
+        .getClass());
+    assertEquals(BooleanSplitter.class, format.getSplitter(Types.BIT)
+        .getClass());
+    assertEquals(IntegerSplitter.class, format.getSplitter(Types.BIGINT)
+        .getClass());
+    assertEquals(IntegerSplitter.class, format.getSplitter(Types.TINYINT)
+        .getClass());
+    assertEquals(IntegerSplitter.class, format.getSplitter(Types.SMALLINT)
+        .getClass());
+    assertEquals(IntegerSplitter.class, format.getSplitter(Types.INTEGER)
+        .getClass());
+    assertEquals(FloatSplitter.class, format.getSplitter(Types.DOUBLE)
+        .getClass());
+    assertEquals(FloatSplitter.class, format.getSplitter(Types.REAL).getClass());
+    assertEquals(FloatSplitter.class, format.getSplitter(Types.FLOAT)
+        .getClass());
+    assertEquals(TextSplitter.class, format.getSplitter(Types.LONGVARCHAR)
+        .getClass());
+    assertEquals(TextSplitter.class, format.getSplitter(Types.CHAR).getClass());
+    assertEquals(TextSplitter.class, format.getSplitter(Types.VARCHAR)
+        .getClass());
+    // if unknown data type splitter is null
+    assertNull(format.getSplitter(Types.BINARY));
+  }
+
+  private class OracleDataDrivenDBInputFormatForTest extends
+      OracleDataDrivenDBInputFormat<NullDBWritable> {
+
+    @Override
+    public DBConfiguration getDBConf() {
+
+      String[] names = { "field1", "field2" };
+      DBConfiguration result = mock(DBConfiguration.class);
+      when(result.getInputConditions()).thenReturn("conditions");
+      when(result.getInputFieldNames()).thenReturn(names);
+      when(result.getInputTableName()).thenReturn("table");
+      return result;
+    }
+
+    @Override
+    public Connection getConnection() {
+      return DriverForTest.getConnection();
+    }
+
+  }
+  
+}

+ 164 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestSplitters.java

@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.lib.db;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.sql.ResultSet;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test Splitters. Splitters should build parts of sql sentences for split result. 
+ */
+public class TestSplitters {
+
+  private Configuration configuration;
+  
+  @Before
+  public void setup() {
+    configuration = new Configuration();
+    configuration.setInt(MRJobConfig.NUM_MAPS, 2);
+  }
+  
+  @Test(timeout=2000)
+  public void testBooleanSplitter() throws Exception{
+    BooleanSplitter splitter = new BooleanSplitter();
+    ResultSet result = mock(ResultSet.class);
+    when(result.getString(1)).thenReturn("result1");
+    
+    List<InputSplit> splits=splitter.split(configuration, result, "column");
+    assertSplits(new String[] {"column = FALSE column = FALSE",
+        "column IS NULL column IS NULL"}, splits);
+    
+    when(result.getString(1)).thenReturn("result1");
+    when(result.getString(2)).thenReturn("result2");
+    when(result.getBoolean(1)).thenReturn(true);
+    when(result.getBoolean(2)).thenReturn(false);
+
+    splits=splitter.split(configuration, result, "column");
+    assertEquals(0, splits.size());
+
+    when(result.getString(1)).thenReturn("result1");
+    when(result.getString(2)).thenReturn("result2");
+    when(result.getBoolean(1)).thenReturn(false);
+    when(result.getBoolean(2)).thenReturn(true);
+
+    splits = splitter.split(configuration, result, "column");
+    assertSplits(new String[] {
+        "column = FALSE column = FALSE", ".*column = TRUE"}, splits);
+  }
+  
+  @Test(timeout=2000)
+  public void testFloatSplitter() throws Exception{
+    FloatSplitter splitter = new FloatSplitter();
+    
+    ResultSet results = mock(ResultSet.class);
+
+    List<InputSplit> splits = splitter.split(configuration, results, "column");
+    assertSplits(new String[] {".*column IS NULL"}, splits);
+    
+    when(results.getString(1)).thenReturn("result1");
+    when(results.getString(2)).thenReturn("result2");
+    when(results.getDouble(1)).thenReturn(5.0);
+    when(results.getDouble(2)).thenReturn(7.0);
+
+    splits = splitter.split(configuration, results, "column1");
+    assertSplits(new String[] {"column1 >= 5.0 column1 < 6.0", 
+        "column1 >= 6.0 column1 <= 7.0"}, splits);
+  }
+
+  @Test(timeout=2000)
+  public void testBigDecimalSplitter() throws Exception{
+    BigDecimalSplitter splitter = new BigDecimalSplitter();
+    ResultSet result = mock(ResultSet.class);
+    
+    List<InputSplit> splits = splitter.split(configuration, result, "column");
+    assertSplits(new String[] {".*column IS NULL"}, splits);
+
+    when(result.getString(1)).thenReturn("result1");
+    when(result.getString(2)).thenReturn("result2");
+    when(result.getBigDecimal(1)).thenReturn(new BigDecimal(10));
+    when(result.getBigDecimal(2)).thenReturn(new BigDecimal(12));
+
+    splits = splitter.split(configuration, result, "column1");
+    assertSplits(new String[] {"column1 >= 10 column1 < 11",
+        "column1 >= 11 column1 <= 12"}, splits);
+  }
+
+  @Test(timeout=2000)
+  public void testIntegerSplitter() throws Exception{
+    IntegerSplitter splitter = new IntegerSplitter();
+    ResultSet result = mock(ResultSet.class);
+    
+    List<InputSplit> splits = splitter.split(configuration, result, "column");
+    assertSplits(new String[] {".*column IS NULL"}, splits);
+
+    when(result.getString(1)).thenReturn("result1");
+    when(result.getString(2)).thenReturn("result2");
+    when(result.getLong(1)).thenReturn(8L);
+    when(result.getLong(2)).thenReturn(19L);
+
+    splits = splitter.split(configuration, result, "column1");
+    assertSplits(new String[] {"column1 >= 8 column1 < 13",
+        "column1 >= 13 column1 < 18", "column1 >= 18 column1 <= 19"}, splits);
+  }
+
+  @Test(timeout=2000)
+  public void testTextSplitter() throws Exception{
+    TextSplitter splitter = new TextSplitter();
+    ResultSet result = mock(ResultSet.class);
+    
+    List<InputSplit> splits = splitter.split(configuration, result, "column");
+    assertSplits(new String[] {"column IS NULL column IS NULL"}, splits);
+
+    when(result.getString(1)).thenReturn("result1");
+    when(result.getString(2)).thenReturn("result2");
+
+    splits = splitter.split(configuration, result, "column1");
+    assertSplits(new String[] {"column1 >= 'result1' column1 < 'result1.'",
+        "column1 >= 'result1' column1 <= 'result2'"}, splits);
+  }
+
+  private void assertSplits(String[] expectedSplitRE, 
+      List<InputSplit> splits) throws IOException {
+    assertEquals(expectedSplitRE.length, splits.size());
+    for (int i = 0; i < expectedSplitRE.length; i++) {
+      DataDrivenDBInputSplit split = (DataDrivenDBInputSplit) splits.get(i);
+      String actualExpr = split.getLowerClause() + " " + split.getUpperClause();
+      assertTrue("Split #" + (i+1) + " expression is wrong."
+          + " Expected " + expectedSplitRE[i]
+          + " Actual " + actualExpr,
+          Pattern.matches(expectedSplitRE[i], actualExpr));
+    }
+  }
+  
+}