sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From b...@apache.org
Subject [2/2] git commit: SQOOP-680 Re enable unit tests for map reduce execution (Jarek Jarcec Cecho)
Date Wed, 07 Nov 2012 00:32:20 GMT
SQOOP-680 Re enable unit tests for map reduce execution
(Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/71f40446
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/71f40446
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/71f40446

Branch: refs/heads/sqoop2
Commit: 71f404460de9baa6aec5876dc189422760e8a7c0
Parents: 6c20aa4
Author: Bilung Lee <blee@apache.org>
Authored: Tue Nov 6 16:15:05 2012 -0800
Committer: Bilung Lee <blee@apache.org>
Committed: Tue Nov 6 16:15:05 2012 -0800

----------------------------------------------------------------------
 .../java/org/apache/sqoop/common/MapContext.java   |    4 +-
 .../java/org/apache/sqoop/utils/ClassUtils.java    |    4 +
 .../connector/jdbc/TestExportInitializer.java      |  214 ++++++---------
 .../sqoop/connector/jdbc/TestExportLoader.java     |  185 ++++++-------
 .../sqoop/connector/jdbc/TestImportExtractor.java  |   51 ++---
 .../connector/jdbc/TestImportInitializer.java      |  133 +++------
 .../connector/jdbc/TestImportPartitioner.java      |   62 ++---
 .../apache/sqoop/job/mr/ConfigurationUtils.java    |   16 +
 .../java/org/apache/sqoop/job/TestHdfsLoad.java    |   26 +-
 .../java/org/apache/sqoop/job/TestJobEngine.java   |  196 -------------
 .../java/org/apache/sqoop/job/TestMapReduce.java   |   34 +--
 .../java/org/apache/sqoop/job/io/TestData.java     |   16 +-
 12 files changed, 326 insertions(+), 615 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/common/src/main/java/org/apache/sqoop/common/MapContext.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/sqoop/common/MapContext.java b/common/src/main/java/org/apache/sqoop/common/MapContext.java
index b245148..2229889 100644
--- a/common/src/main/java/org/apache/sqoop/common/MapContext.java
+++ b/common/src/main/java/org/apache/sqoop/common/MapContext.java
@@ -77,7 +77,7 @@ public class MapContext implements ImmutableContext {
 
     String value = options.get(key);
 
-    return Long.getLong(value);
+    return Long.parseLong(value);
   }
 
   /**
@@ -91,7 +91,7 @@ public class MapContext implements ImmutableContext {
 
     String value = options.get(key);
 
-    return Integer.getInteger(value);
+    return Integer.parseInt(value);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/common/src/main/java/org/apache/sqoop/utils/ClassUtils.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/sqoop/utils/ClassUtils.java b/common/src/main/java/org/apache/sqoop/utils/ClassUtils.java
index 3f99f59..b63fce2 100644
--- a/common/src/main/java/org/apache/sqoop/utils/ClassUtils.java
+++ b/common/src/main/java/org/apache/sqoop/utils/ClassUtils.java
@@ -36,6 +36,10 @@ public final class ClassUtils {
    * @return Class instance or NULL
    */
   public static Class<?> loadClass(String className) {
+    if(className == null) {
+      return null;
+    }
+
     Class<?> klass = null;
     try {
       klass = Class.forName(className);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportInitializer.java
----------------------------------------------------------------------
diff --git a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportInitializer.java b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportInitializer.java
index 532e6fd..24bbb41 100644
--- a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportInitializer.java
+++ b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportInitializer.java
@@ -17,15 +17,14 @@
  */
 package org.apache.sqoop.connector.jdbc;
 
-import java.util.Hashtable;
-
 import junit.framework.TestCase;
 
+import org.apache.sqoop.common.MutableContext;
+import org.apache.sqoop.common.MutableMapContext;
+import org.apache.sqoop.connector.jdbc.configuration.ConnectionConfiguration;
+import org.apache.sqoop.connector.jdbc.configuration.ExportJobConfiguration;
 import org.apache.sqoop.job.Constants;
 import org.apache.sqoop.job.etl.Initializer;
-//import org.apache.sqoop.job.etl.MutableContext;
-//import org.apache.sqoop.job.etl.Options;
-import org.junit.Test;
 
 public class TestExportInitializer extends TestCase {
 
@@ -36,129 +35,92 @@ public class TestExportInitializer extends TestCase {
   private GenericJdbcExecutor executor;
 
   public TestExportInitializer() {
-    tableName = getClass().getSimpleName();
-    tableSql = "INSERT INTO \"" + tableName + "\" VALUES (?,?,?)";
+    tableName = getClass().getSimpleName().toUpperCase();
+    tableSql = "INSERT INTO " + tableName + " VALUES (?,?,?)";
     tableColumns = "ICOL,VCOL";
   }
 
-  public void testVoid() { }
-
-//  @Override
-//  public void setUp() {
-//    executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
-//        GenericJdbcTestConstants.URL, null, null);
-//
-//    if (!executor.existTable(tableName)) {
-//      executor.executeUpdate("CREATE TABLE "
-//          + executor.delimitIdentifier(tableName)
-//          + "(ICOL INTEGER PRIMARY KEY, DCOL DOUBLE, VCOL VARCHAR(20))");
-//    }
-//  }
-//
-//  @Override
-//  public void tearDown() {
-//    executor.close();
-//  }
-//
-//  @Test
-//  public void testTableName() throws Exception {
-//    DummyOptions options = new DummyOptions();
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-//        GenericJdbcTestConstants.DRIVER);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-//        GenericJdbcTestConstants.URL);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_NAME,
-//        tableName);
-//
-//    DummyContext context = new DummyContext();
-//
-//    Initializer initializer = new GenericJdbcExportInitializer();
-//    initializer.run(context, options);
-//
-//    verifyResult(context,
-//        "INSERT INTO " + executor.delimitIdentifier(tableName)
-//            + " VALUES (?,?,?)",
-//        GenericJdbcConnectorConstants.DEFAULT_WAREHOUSE + tableName);
-//  }
-//
-//  @Test
-//  public void testTableNameWithTableColumns() throws Exception {
-//    DummyOptions options = new DummyOptions();
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-//        GenericJdbcTestConstants.DRIVER);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-//        GenericJdbcTestConstants.URL);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_NAME,
-//        tableName);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_COLUMNS,
-//        tableColumns);
-//
-//    DummyContext context = new DummyContext();
-//
-//    Initializer initializer = new GenericJdbcExportInitializer();
-//    initializer.run(context, options);
-//
-//    verifyResult(context,
-//        "INSERT INTO " + executor.delimitIdentifier(tableName)
-//            + " (" + tableColumns + ") VALUES (?,?)",
-//        GenericJdbcConnectorConstants.DEFAULT_WAREHOUSE + tableName);
-//  }
-//
-//  @Test
-//  public void testTableSql() throws Exception {
-//    DummyOptions options = new DummyOptions();
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-//        GenericJdbcTestConstants.DRIVER);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-//        GenericJdbcTestConstants.URL);
-//    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_SQL,
-//        tableSql);
-//
-//    DummyContext context = new DummyContext();
-//
-//    Initializer initializer = new GenericJdbcExportInitializer();
-//    initializer.run(context, options);
-//
-//    verifyResult(context,
-//        "INSERT INTO " + executor.delimitIdentifier(tableName)
-//            + " VALUES (?,?,?)",
-//        GenericJdbcConnectorConstants.DEFAULT_WAREHOUSE
-//            + GenericJdbcConnectorConstants.DEFAULT_DATADIR);
-//  }
-//
-//  private void verifyResult(DummyContext context,
-//      String dataSql, String inputDirectory) {
-//    assertEquals(dataSql, context.getString(
-//        GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL));
-//    assertEquals(inputDirectory, context.getString(
-//        Constants.JOB_ETL_INPUT_DIRECTORY));
-//  }
-//
-//  public class DummyOptions implements Options {
-//    Hashtable<String, String> store = new Hashtable<String, String>();
-//
-//    public void setOption(String key, String value) {
-//      store.put(key, value);
-//    }
-//
-//    @Override
-//    public String getOption(String key) {
-//      return store.get(key);
-//    }
-//  }
-//
-//  public class DummyContext implements MutableContext {
-//    Hashtable<String, String> store = new Hashtable<String, String>();
-//
-//    @Override
-//    public String getString(String key) {
-//      return store.get(key);
-//    }
-//
-//    @Override
-//    public void setString(String key, String value) {
-//      store.put(key, value);
-//    }
-//  }
+  @Override
+  public void setUp() {
+    executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
+        GenericJdbcTestConstants.URL, null, null);
+
+    if (!executor.existTable(tableName)) {
+      executor.executeUpdate("CREATE TABLE "
+          + executor.delimitIdentifier(tableName)
+          + "(ICOL INTEGER PRIMARY KEY, DCOL DOUBLE, VCOL VARCHAR(20))");
+    }
+  }
+
+  @Override
+  public void tearDown() {
+    executor.close();
+  }
+
+  public void testTableName() throws Exception {
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.tableName = tableName;
+
+    ExportJobConfiguration jobConf = new ExportJobConfiguration();
+
+    MutableContext context = new MutableMapContext();
+
+    Initializer initializer = new GenericJdbcExportInitializer();
+    initializer.initialize(context, connConf, jobConf);
+
+    verifyResult(context,
+        "INSERT INTO " + executor.delimitIdentifier(tableName)
+            + " VALUES (?,?,?)",
+        GenericJdbcConnectorConstants.DEFAULT_WAREHOUSE + tableName);
+  }
+
+  public void testTableNameWithTableColumns() throws Exception {
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.tableName = tableName;
+    connConf.columns = tableColumns;
 
+    ExportJobConfiguration jobConf = new ExportJobConfiguration();
+
+    MutableContext context = new MutableMapContext();
+
+    Initializer initializer = new GenericJdbcExportInitializer();
+    initializer.initialize(context, connConf, jobConf);
+
+    verifyResult(context,
+        "INSERT INTO " + executor.delimitIdentifier(tableName)
+            + " (" + tableColumns + ") VALUES (?,?)",
+        GenericJdbcConnectorConstants.DEFAULT_WAREHOUSE + tableName);
+  }
+
+  public void testTableSql() throws Exception {
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.sql = tableSql;
+
+    ExportJobConfiguration jobConf = new ExportJobConfiguration();
+
+    MutableContext context = new MutableMapContext();
+
+    Initializer initializer = new GenericJdbcExportInitializer();
+    initializer.initialize(context, connConf, jobConf);
+
+    verifyResult(context,
+        "INSERT INTO " + executor.delimitIdentifier(tableName)
+            + " VALUES (?,?,?)",
+        GenericJdbcConnectorConstants.DEFAULT_WAREHOUSE
+            + GenericJdbcConnectorConstants.DEFAULT_DATADIR);
+  }
+
+  private void verifyResult(MutableContext context,
+      String dataSql, String inputDirectory) {
+    assertEquals(dataSql, context.getString(
+        GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL));
+    assertEquals(inputDirectory, context.getString(
+        Constants.JOB_ETL_INPUT_DIRECTORY));
+  }
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportLoader.java
----------------------------------------------------------------------
diff --git a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportLoader.java b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportLoader.java
index 649808d..c97693d 100644
--- a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportLoader.java
+++ b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestExportLoader.java
@@ -18,10 +18,13 @@
 package org.apache.sqoop.connector.jdbc;
 
 import java.sql.ResultSet;
-import java.util.HashMap;
 
 import junit.framework.TestCase;
 
+import org.apache.sqoop.common.MutableContext;
+import org.apache.sqoop.common.MutableMapContext;
+import org.apache.sqoop.connector.jdbc.configuration.ConnectionConfiguration;
+import org.apache.sqoop.connector.jdbc.configuration.ExportJobConfiguration;
 import org.apache.sqoop.job.etl.Loader;
 import org.apache.sqoop.job.io.DataReader;
 import org.junit.Test;
@@ -36,105 +39,89 @@ public class TestExportLoader extends TestCase {
   private static final int NUMBER_OF_ROWS = 101;
 
   public TestExportLoader() {
-    tableName = getClass().getSimpleName();
+    tableName = getClass().getSimpleName().toUpperCase();
   }
 
-  public void testVoid() { }
-
-//  @Override
-//  public void setUp() {
-//    executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
-//        GenericJdbcTestConstants.URL, null, null);
-//
-//    if (!executor.existTable(tableName)) {
-//      executor.executeUpdate("CREATE TABLE "
-//          + executor.delimitIdentifier(tableName)
-//          + "(ICOL INTEGER PRIMARY KEY, DCOL DOUBLE, VCOL VARCHAR(20))");
-//    }
-//  }
-//
-//  @Override
-//  public void tearDown() {
-//    executor.close();
-//  }
-//
-//  @Test
-//  public void testInsert() throws Exception {
-//    DummyContext context = new DummyContext();
-//    context.setString(
-//        GenericJdbcConnectorConstants.CONNECTOR_JDBC_DRIVER,
-//        GenericJdbcTestConstants.DRIVER);
-//    context.setString(
-//        GenericJdbcConnectorConstants.CONNECTOR_JDBC_URL,
-//        GenericJdbcTestConstants.URL);
-//    context.setString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL,
-//        "INSERT INTO " + executor.delimitIdentifier(tableName)
-//            + " VALUES (?,?,?)");
-//
-//    Loader loader = new GenericJdbcExportLoader();
-//    DummyReader reader = new DummyReader();
-//
-//    loader.run(context, reader);
-//
-//    int index = START;
-//    ResultSet rs = executor.executeQuery("SELECT * FROM "
-//        + executor.delimitIdentifier(tableName) + " ORDER BY ICOL");
-//    while (rs.next()) {
-//      assertEquals(Integer.valueOf(index), rs.getObject(1));
-//      assertEquals(Double.valueOf(index), rs.getObject(2));
-//      assertEquals(String.valueOf(index), rs.getObject(3));
-//      index++;
-//    }
-//    assertEquals(NUMBER_OF_ROWS, index-START);
-//  }
-//
-//  public class DummyContext implements MutableContext {
-//    HashMap<String, String> store = new HashMap<String, String>();
-//
-//    @Override
-//    public String getString(String key) {
-//      return store.get(key);
-//    }
-//
-//    @Override
-//    public void setString(String key, String value) {
-//      store.put(key, value);
-//    }
-//  }
-//
-//  public class DummyReader extends DataReader {
-//    int index = 0;
-//
-//    @Override
-//    public void setFieldDelimiter(char fieldDelimiter) {
-//      // do nothing and use default delimiter
-//    }
-//
-//    @Override
-//    public Object[] readArrayRecord() {
-//      if (index < NUMBER_OF_ROWS) {
-//        Object[] array = new Object[] {
-//            new Integer(START+index),
-//            new Double(START+index),
-//            String.valueOf(START+index) };
-//        index++;
-//        return array;
-//      } else {
-//        return null;
-//      }
-//    }
-//
-//    @Override
-//    public String readCsvRecord() {
-//      fail("This method should not be invoked.");
-//      return null;
-//    }
-//
-//    @Override
-//    public Object readContent(int type) {
-//      fail("This method should not be invoked.");
-//      return null;
-//    }
-//  }
+  @Override
+  public void setUp() {
+    executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
+        GenericJdbcTestConstants.URL, null, null);
+
+    if (!executor.existTable(tableName)) {
+      executor.executeUpdate("CREATE TABLE "
+          + executor.delimitIdentifier(tableName)
+          + "(ICOL INTEGER PRIMARY KEY, DCOL DOUBLE, VCOL VARCHAR(20))");
+    }
+  }
+
+  @Override
+  public void tearDown() {
+    executor.close();
+  }
+
+  public void testInsert() throws Exception {
+    MutableContext context = new MutableMapContext();
+
+    context.setString(
+        GenericJdbcConnectorConstants.CONNECTOR_JDBC_DRIVER,
+        GenericJdbcTestConstants.DRIVER);
+    context.setString(
+        GenericJdbcConnectorConstants.CONNECTOR_JDBC_URL,
+        GenericJdbcTestConstants.URL);
+    context.setString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL,
+        "INSERT INTO " + executor.delimitIdentifier(tableName)
+            + " VALUES (?,?,?)");
+
+    Loader loader = new GenericJdbcExportLoader();
+    DummyReader reader = new DummyReader();
+
+    loader.run(context, reader);
+
+    int index = START;
+    ResultSet rs = executor.executeQuery("SELECT * FROM "
+        + executor.delimitIdentifier(tableName) + " ORDER BY ICOL");
+    while (rs.next()) {
+      assertEquals(index, rs.getObject(1));
+      assertEquals((double) index, rs.getObject(2));
+      assertEquals(String.valueOf(index), rs.getObject(3));
+      index++;
+    }
+    assertEquals(NUMBER_OF_ROWS, index-START);
+  }
+
+  public class DummyReader extends DataReader {
+    int index = 0;
+
+    @Override
+    public void setFieldDelimiter(char fieldDelimiter) {
+      // do nothing and use default delimiter
+    }
+
+    @Override
+    public Object[] readArrayRecord() {
+      if (index < NUMBER_OF_ROWS) {
+        Object[] array = new Object[] {
+            START + index,
+            (double) (START + index),
+            String.valueOf(START+index) };
+        index++;
+        return array;
+      } else {
+        return null;
+      }
+    }
+
+    @Override
+    public String readCsvRecord() {
+      fail("This method should not be invoked.");
+      return null;
+    }
+
+    @Override
+    public Object readContent(int type) {
+      fail("This method should not be invoked.");
+      return null;
+    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportExtractor.java
----------------------------------------------------------------------
diff --git a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportExtractor.java b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportExtractor.java
index d5c8b3c..168e572 100644
--- a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportExtractor.java
+++ b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportExtractor.java
@@ -17,13 +17,14 @@
  */
 package org.apache.sqoop.connector.jdbc;
 
-import java.util.HashMap;
-
 import junit.framework.TestCase;
 
+import org.apache.sqoop.common.MutableContext;
+import org.apache.sqoop.common.MutableMapContext;
+import org.apache.sqoop.connector.jdbc.configuration.ConnectionConfiguration;
+import org.apache.sqoop.connector.jdbc.configuration.ImportJobConfiguration;
 import org.apache.sqoop.job.etl.Extractor;
 import org.apache.sqoop.job.io.DataWriter;
-import org.junit.Test;
 
 public class TestImportExtractor extends TestCase {
 
@@ -35,12 +36,9 @@ public class TestImportExtractor extends TestCase {
   private static final int NUMBER_OF_ROWS = 101;
 
   public TestImportExtractor() {
-    tableName = getClass().getSimpleName();
+    tableName = getClass().getSimpleName().toUpperCase();
   }
 
-  public void testVoid() {}
-
-  /*
   @Override
   public void setUp() {
     executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
@@ -65,9 +63,8 @@ public class TestImportExtractor extends TestCase {
     executor.close();
   }
 
-  @Test
   public void testQuery() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_DRIVER,
         GenericJdbcTestConstants.DRIVER);
@@ -78,6 +75,9 @@ public class TestImportExtractor extends TestCase {
         "SELECT * FROM " + executor.delimitIdentifier(tableName)
             + " WHERE ${CONDITIONS}");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     GenericJdbcImportPartition partition;
 
     Extractor extractor = new GenericJdbcImportExtractor();
@@ -85,20 +85,19 @@ public class TestImportExtractor extends TestCase {
 
     partition = new GenericJdbcImportPartition();
     partition.setConditions("-50.0 <= DCOL AND DCOL < -16.6666666666666665");
-    extractor.initialize(context, partition, writer);
+    extractor.run(context, connConf, jobConf, partition, writer);
 
     partition = new GenericJdbcImportPartition();
     partition.setConditions("-16.6666666666666665 <= DCOL AND DCOL < 16.666666666666667");
-    extractor.initialize(context, partition, writer);
+    extractor.run(context, connConf, jobConf, partition, writer);
 
     partition = new GenericJdbcImportPartition();
     partition.setConditions("16.666666666666667 <= DCOL AND DCOL <= 50.0");
-    extractor.initialize(context, partition, writer);
+    extractor.run(context, connConf, jobConf, partition, writer);
   }
 
-  @Test
   public void testSubquery() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_DRIVER,
         GenericJdbcTestConstants.DRIVER);
@@ -110,6 +109,9 @@ public class TestImportExtractor extends TestCase {
             + "(SELECT * FROM " + executor.delimitIdentifier(tableName)
             + " WHERE ${CONDITIONS}) SQOOP_SUBQUERY_ALIAS");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     GenericJdbcImportPartition partition;
 
     Extractor extractor = new GenericJdbcImportExtractor();
@@ -117,29 +119,15 @@ public class TestImportExtractor extends TestCase {
 
     partition = new GenericJdbcImportPartition();
     partition.setConditions("-50 <= ICOL AND ICOL < -16");
-    extractor.initialize(context, partition, writer);
+    extractor.run(context, connConf, jobConf, partition, writer);
 
     partition = new GenericJdbcImportPartition();
     partition.setConditions("-16 <= ICOL AND ICOL < 17");
-    extractor.initialize(context, partition, writer);
+    extractor.run(context, connConf, jobConf, partition, writer);
 
     partition = new GenericJdbcImportPartition();
     partition.setConditions("17 <= ICOL AND ICOL < 50");
-    extractor.initialize(context, partition, writer);
-  }
-
-  public class DummyContext implements MutableContext {
-    HashMap<String, String> store = new HashMap<String, String>();
-
-    @Override
-    public String getString(String key) {
-      return store.get(key);
-    }
-
-    @Override
-    public void setString(String key, String value) {
-      store.put(key, value);
-    }
+    extractor.run(context, connConf, jobConf, partition, writer);
   }
 
   public class DummyWriter extends DataWriter {
@@ -174,5 +162,4 @@ public class TestImportExtractor extends TestCase {
       fail("This method should not be invoked.");
     }
   }
-*/
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportInitializer.java
----------------------------------------------------------------------
diff --git a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportInitializer.java b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportInitializer.java
index 7d8c282..8957ed1 100644
--- a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportInitializer.java
+++ b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportInitializer.java
@@ -18,13 +18,15 @@
 package org.apache.sqoop.connector.jdbc;
 
 import java.sql.Types;
-import java.util.Hashtable;
 
 import junit.framework.TestCase;
 
+import org.apache.sqoop.common.MutableContext;
+import org.apache.sqoop.common.MutableMapContext;
+import org.apache.sqoop.connector.jdbc.configuration.ConnectionConfiguration;
+import org.apache.sqoop.connector.jdbc.configuration.ImportJobConfiguration;
 import org.apache.sqoop.job.Constants;
 import org.apache.sqoop.job.etl.Initializer;
-import org.junit.Test;
 
 public class TestImportInitializer extends TestCase {
 
@@ -38,14 +40,11 @@ public class TestImportInitializer extends TestCase {
   private static final int NUMBER_OF_ROWS = 101;
 
   public TestImportInitializer() {
-    tableName = getClass().getSimpleName();
-    tableSql = "SELECT * FROM \"" + tableName + "\" WHERE ${CONDITIONS}";
+    tableName = getClass().getSimpleName().toUpperCase();
+    tableSql = "SELECT * FROM " + tableName + " WHERE ${CONDITIONS}";
     tableColumns = "ICOL,VCOL";
   }
 
-  public void testVoid() {}
-
-  /*
   @Override
   public void setUp() {
     executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
@@ -70,20 +69,18 @@ public class TestImportInitializer extends TestCase {
     executor.close();
   }
 
-  @Test
   public void testTableName() throws Exception {
-    DummyOptions options = new DummyOptions();
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-        GenericJdbcTestConstants.DRIVER);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-        GenericJdbcTestConstants.URL);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_NAME,
-        tableName);
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.tableName = tableName;
+
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
 
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
 
     Initializer initializer = new GenericJdbcImportInitializer();
-    initializer.initialize(context, options);
+    initializer.initialize(context, connConf, jobConf);
 
     verifyResult(context,
         "SELECT * FROM " + executor.delimitIdentifier(tableName)
@@ -96,22 +93,19 @@ public class TestImportInitializer extends TestCase {
         String.valueOf(START+NUMBER_OF_ROWS-1));
   }
 
-  @Test
   public void testTableNameWithTableColumns() throws Exception {
-    DummyOptions options = new DummyOptions();
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-        GenericJdbcTestConstants.DRIVER);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-        GenericJdbcTestConstants.URL);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_NAME,
-        tableName);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_COLUMNS,
-        tableColumns);
-
-    DummyContext context = new DummyContext();
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.tableName = tableName;
+    connConf.columns = tableColumns;
+
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
+    MutableContext context = new MutableMapContext();
 
     Initializer initializer = new GenericJdbcImportInitializer();
-    initializer.initialize(context, options);
+    initializer.initialize(context, connConf, jobConf);
 
     verifyResult(context,
         "SELECT ICOL,VCOL FROM " + executor.delimitIdentifier(tableName)
@@ -124,22 +118,19 @@ public class TestImportInitializer extends TestCase {
         String.valueOf(START+NUMBER_OF_ROWS-1));
   }
 
-  @Test
   public void testTableSql() throws Exception {
-    DummyOptions options = new DummyOptions();
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-        GenericJdbcTestConstants.DRIVER);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-        GenericJdbcTestConstants.URL);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_SQL,
-        tableSql);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_PCOL,
-        "DCOL");
-
-    DummyContext context = new DummyContext();
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.sql = tableSql;
+    connConf.partitionColumn = "DCOL";
+
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
+    MutableContext context = new MutableMapContext();
 
     Initializer initializer = new GenericJdbcImportInitializer();
-    initializer.initialize(context, options);
+    initializer.initialize(context, connConf, jobConf);
 
     verifyResult(context,
         "SELECT * FROM " + executor.delimitIdentifier(tableName)
@@ -153,24 +144,20 @@ public class TestImportInitializer extends TestCase {
         String.valueOf((double)(START+NUMBER_OF_ROWS-1)));
   }
 
-  @Test
   public void testTableSqlWithTableColumns() throws Exception {
-    DummyOptions options = new DummyOptions();
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_JDBCDRIVER,
-        GenericJdbcTestConstants.DRIVER);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_CONN_CONNECTSTRING,
-        GenericJdbcTestConstants.URL);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_SQL,
-        tableSql);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_COLUMNS,
-        tableColumns);
-    options.setOption(GenericJdbcConnectorConstants.INPUT_TBL_PCOL,
-        "DCOL");
-
-    DummyContext context = new DummyContext();
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    connConf.jdbcDriver = GenericJdbcTestConstants.DRIVER;
+    connConf.connectionString = GenericJdbcTestConstants.URL;
+    connConf.sql = tableSql;
+    connConf.columns = tableColumns;
+    connConf.partitionColumn = "DCOL";
+
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
+    MutableContext context = new MutableMapContext();
 
     Initializer initializer = new GenericJdbcImportInitializer();
-    initializer.initialize(context, options);
+    initializer.initialize(context, connConf, jobConf);
 
     verifyResult(context,
         "SELECT SQOOP_SUBQUERY_ALIAS.ICOL,SQOOP_SUBQUERY_ALIAS.VCOL FROM "
@@ -185,7 +172,7 @@ public class TestImportInitializer extends TestCase {
         String.valueOf((double)(START+NUMBER_OF_ROWS-1)));
   }
 
-  private void verifyResult(DummyContext context,
+  private void verifyResult(MutableContext context,
       String dataSql, String fieldNames, String outputDirectory,
       String partitionColumnName, String partitionColumnType,
       String partitionMinValue, String partitionMaxValue) {
@@ -205,32 +192,4 @@ public class TestImportInitializer extends TestCase {
     assertEquals(partitionMaxValue, context.getString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_PARTITION_MAXVALUE));
   }
-
-  public class DummyOptions implements Options {
-    Hashtable<String, String> store = new Hashtable<String, String>();
-
-    public void setOption(String key, String value) {
-      store.put(key, value);
-    }
-
-    @Override
-    public String getOption(String key) {
-      return store.get(key);
-    }
-  }
-
-  public class DummyContext implements MutableContext {
-    Hashtable<String, String> store = new Hashtable<String, String>();
-
-    @Override
-    public String getString(String key) {
-      return store.get(key);
-    }
-
-    @Override
-    public void setString(String key, String value) {
-      store.put(key, value);
-    }
-  }
-*/
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportPartitioner.java
----------------------------------------------------------------------
diff --git a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportPartitioner.java b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportPartitioner.java
index c8b56c1..d5db190 100644
--- a/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportPartitioner.java
+++ b/connector/connector-generic-jdbc/src/test/java/org/apache/sqoop/connector/jdbc/TestImportPartitioner.java
@@ -24,6 +24,10 @@ import java.util.List;
 
 import junit.framework.TestCase;
 
+import org.apache.sqoop.common.MutableContext;
+import org.apache.sqoop.common.MutableMapContext;
+import org.apache.sqoop.connector.jdbc.configuration.ConnectionConfiguration;
+import org.apache.sqoop.connector.jdbc.configuration.ImportJobConfiguration;
 import org.apache.sqoop.job.Constants;
 import org.apache.sqoop.job.etl.Partition;
 import org.apache.sqoop.job.etl.Partitioner;
@@ -34,12 +38,8 @@ public class TestImportPartitioner extends TestCase {
   private static final int START = -5;
   private static final int NUMBER_OF_ROWS = 11;
 
-  public void testVoid() {}
-
-/*
-  @Test
   public void testIntegerEvenPartition() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNNAME,
         "ICOL");
@@ -54,8 +54,11 @@ public class TestImportPartitioner extends TestCase {
         String.valueOf(START + NUMBER_OF_ROWS - 1));
     context.setString(Constants.JOB_ETL_NUMBER_PARTITIONS, "5");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     Partitioner partitioner = new GenericJdbcImportPartitioner();
-    List<Partition> partitions = partitioner.initialize(context);
+    List<Partition> partitions = partitioner.getPartitions(context, connConf, jobConf);
 
     verifyResult(partitions, new String[] {
         "-5 <= ICOL AND ICOL < -3",
@@ -66,9 +69,8 @@ public class TestImportPartitioner extends TestCase {
     });
   }
 
-  @Test
   public void testIntegerUnevenPartition() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNNAME,
         "ICOL");
@@ -83,8 +85,11 @@ public class TestImportPartitioner extends TestCase {
         String.valueOf(START + NUMBER_OF_ROWS - 1));
     context.setString(Constants.JOB_ETL_NUMBER_PARTITIONS, "3");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     Partitioner partitioner = new GenericJdbcImportPartitioner();
-    List<Partition> partitions = partitioner.initialize(context);
+    List<Partition> partitions = partitioner.getPartitions(context, connConf, jobConf);
 
     verifyResult(partitions, new String[] {
         "-5 <= ICOL AND ICOL < -1",
@@ -93,9 +98,8 @@ public class TestImportPartitioner extends TestCase {
     });
   }
 
-  @Test
   public void testIntegerOverPartition() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNNAME,
         "ICOL");
@@ -110,8 +114,11 @@ public class TestImportPartitioner extends TestCase {
         String.valueOf(START + NUMBER_OF_ROWS - 1));
     context.setString(Constants.JOB_ETL_NUMBER_PARTITIONS, "13");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     Partitioner partitioner = new GenericJdbcImportPartitioner();
-    List<Partition> partitions = partitioner.initialize(context);
+    List<Partition> partitions = partitioner.getPartitions(context, connConf, jobConf);
 
     verifyResult(partitions, new String[] {
         "-5 <= ICOL AND ICOL < -4",
@@ -127,9 +134,8 @@ public class TestImportPartitioner extends TestCase {
     });
   }
 
-  @Test
   public void testFloatingPointEvenPartition() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNNAME,
         "DCOL");
@@ -144,8 +150,11 @@ public class TestImportPartitioner extends TestCase {
         String.valueOf((double)(START + NUMBER_OF_ROWS - 1)));
     context.setString(Constants.JOB_ETL_NUMBER_PARTITIONS, "5");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     Partitioner partitioner = new GenericJdbcImportPartitioner();
-    List<Partition> partitions = partitioner.initialize(context);
+    List<Partition> partitions = partitioner.getPartitions(context, connConf, jobConf);
 
     verifyResult(partitions, new String[] {
         "-5.0 <= DCOL AND DCOL < -3.0",
@@ -156,9 +165,8 @@ public class TestImportPartitioner extends TestCase {
     });
   }
 
-  @Test
   public void testFloatingPointUnevenPartition() throws Exception {
-    DummyContext context = new DummyContext();
+    MutableContext context = new MutableMapContext();
     context.setString(
         GenericJdbcConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNNAME,
         "DCOL");
@@ -173,8 +181,11 @@ public class TestImportPartitioner extends TestCase {
         String.valueOf((double)(START + NUMBER_OF_ROWS - 1)));
     context.setString(Constants.JOB_ETL_NUMBER_PARTITIONS, "3");
 
+    ConnectionConfiguration connConf = new ConnectionConfiguration();
+    ImportJobConfiguration jobConf = new ImportJobConfiguration();
+
     Partitioner partitioner = new GenericJdbcImportPartitioner();
-    List<Partition> partitions = partitioner.initialize(context);
+    List<Partition> partitions = partitioner.getPartitions(context, connConf, jobConf);
 
     verifyResult(partitions, new String[] {
         "-5.0 <= DCOL AND DCOL < -1.6666666666666665",
@@ -193,19 +204,4 @@ public class TestImportPartitioner extends TestCase {
           ((GenericJdbcImportPartition)iterator.next()).getConditions());
     }
   }
-
-  public class DummyContext implements MutableContext {
-    HashMap<String, String> store = new HashMap<String, String>();
-
-    @Override
-    public String getString(String key) {
-      return store.get(key);
-    }
-
-    @Override
-    public void setString(String key, String value) {
-      store.put(key, value);
-    }
-  }
-*/
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/ConfigurationUtils.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/ConfigurationUtils.java b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/ConfigurationUtils.java
index 59baaf6..ae647ce 100644
--- a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/ConfigurationUtils.java
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/ConfigurationUtils.java
@@ -51,11 +51,27 @@ public final class ConfigurationUtils {
       JobConstants.JOB_CONFIG_FRAMEWORK_JOB);
   }
 
+  /**
+   * Load configuration instance serialized in Hadoop configuration object
+   * @param configuration Hadoop configuration object associated with the job
+   * @param classProperty Property with stored configuration class name
+   * @param valueProperty Property with stored JSON representation of the
+   *                      configuration object
+   * @return New instance with loaded data
+   */
   private static Object loadConfiguration(Configuration configuration,
                                           String classProperty,
                                           String valueProperty) {
+    // Create new instance of configuration class
     Object object = ClassUtils.instantiate(configuration.get(classProperty));
+    if(object == null) {
+      return null;
+    }
+
+    // Fill it with JSON data
     FormUtils.fillValues(configuration.get(valueProperty), object);
+
+    // And give it back
     return object;
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestHdfsLoad.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestHdfsLoad.java b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestHdfsLoad.java
index c74faa2..812dd8e 100644
--- a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestHdfsLoad.java
+++ b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestHdfsLoad.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.sqoop.common.ImmutableContext;
 import org.apache.sqoop.job.etl.Extractor;
 import org.apache.sqoop.job.etl.HdfsSequenceImportLoader;
 import org.apache.sqoop.job.etl.HdfsTextImportLoader;
@@ -43,7 +44,6 @@ import org.apache.sqoop.job.etl.Partitioner;
 import org.apache.sqoop.job.io.Data;
 import org.apache.sqoop.job.io.DataWriter;
 import org.apache.sqoop.job.mr.SqoopFileOutputFormat;
-import org.junit.Test;
 
 public class TestHdfsLoad extends TestCase {
 
@@ -59,9 +59,6 @@ public class TestHdfsLoad extends TestCase {
     outdir = OUTPUT_ROOT + "/" + getClass().getSimpleName();
   }
 
-  public void testVoid() {}
-  /*
-  @Test
   public void testUncompressedText() throws Exception {
     FileUtils.delete(outdir);
 
@@ -79,7 +76,6 @@ public class TestHdfsLoad extends TestCase {
     verifyOutputText(filereader);
   }
 
-  @Test
   public void testCompressedText() throws Exception {
     FileUtils.delete(outdir);
 
@@ -109,7 +105,7 @@ public class TestHdfsLoad extends TestCase {
     int index = START_ID*NUMBER_OF_ROWS_PER_ID;
     while ((actual = reader.readLine()) != null){
       data.setContent(new Object[] {
-          new Integer(index), new Double(index), String.valueOf(index) },
+        index, (double) index, String.valueOf(index) },
           Data.ARRAY_RECORD);
       expected = data.toString();
       index++;
@@ -122,7 +118,6 @@ public class TestHdfsLoad extends TestCase {
         index-START_ID*NUMBER_OF_ROWS_PER_ID);
   }
 
-  @Test
   public void testUncompressedSequence() throws Exception {
     FileUtils.delete(outdir);
 
@@ -140,7 +135,6 @@ public class TestHdfsLoad extends TestCase {
     verifyOutputSequence(filereader);
   }
 
-  @Test
   public void testCompressedSequence() throws Exception {
     FileUtils.delete(outdir);
 
@@ -166,7 +160,7 @@ public class TestHdfsLoad extends TestCase {
     Data data = new Data();
     while (reader.next(actual)){
       data.setContent(new Object[] {
-          new Integer(index), new Double(index), String.valueOf(index) },
+          index, (double) index, String.valueOf(index) },
           Data.ARRAY_RECORD);
       expected.set(data.toString());
       index++;
@@ -199,11 +193,16 @@ public class TestHdfsLoad extends TestCase {
     public void write(DataOutput out) throws IOException {
       out.writeInt(id);
     }
+
+    @Override
+    public String toString() {
+      return Integer.toString(id);
+    }
   }
 
   public static class DummyPartitioner extends Partitioner {
     @Override
-    public List<Partition> initialize(Context context) {
+    public List<Partition> getPartitions(ImmutableContext context, Object oc, Object oj) {
       List<Partition> partitions = new LinkedList<Partition>();
       for (int id = START_ID; id <= NUMBER_OF_IDS; id++) {
         DummyPartition partition = new DummyPartition();
@@ -216,17 +215,16 @@ public class TestHdfsLoad extends TestCase {
 
   public static class DummyExtractor extends Extractor {
     @Override
-    public void initialize(Context context, Partition partition, DataWriter writer) {
+    public void run(ImmutableContext context, Object oc, Object oj, Partition partition, DataWriter writer) {
       int id = ((DummyPartition)partition).getId();
       for (int row = 0; row < NUMBER_OF_ROWS_PER_ID; row++) {
         Object[] array = new Object[] {
-          new Integer(id*NUMBER_OF_ROWS_PER_ID+row),
-          new Double(id*NUMBER_OF_ROWS_PER_ID+row),
+          id * NUMBER_OF_ROWS_PER_ID + row,
+          (double) (id * NUMBER_OF_ROWS_PER_ID + row),
           String.valueOf(id*NUMBER_OF_ROWS_PER_ID+row)
         };
         writer.writeArrayRecord(array);
       }
     }
   }
-  */
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestJobEngine.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestJobEngine.java b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestJobEngine.java
deleted file mode 100644
index 51dddb4..0000000
--- a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestJobEngine.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.job;
-
-import java.io.BufferedReader;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Locale;
-import java.util.ResourceBundle;
-
-import junit.framework.TestCase;
-
-import org.apache.sqoop.connector.spi.SqoopConnector;
-import org.apache.sqoop.job.etl.Exporter;
-import org.apache.sqoop.job.etl.Extractor;
-import org.apache.sqoop.job.etl.Importer;
-import org.apache.sqoop.job.etl.Initializer;
-import org.apache.sqoop.job.etl.Partition;
-import org.apache.sqoop.job.etl.Partitioner;
-import org.apache.sqoop.job.io.Data;
-import org.apache.sqoop.job.io.DataWriter;
-import org.apache.sqoop.model.MConnectionForms;
-import org.apache.sqoop.model.MJob.Type;
-import org.apache.sqoop.model.MJobForms;
-import org.apache.sqoop.validation.Validator;
-import org.junit.Test;
-
-public class TestJobEngine extends TestCase {
-
-  private static final String DATA_DIR = TestJobEngine.class.getSimpleName();
-  private static final String WAREHOUSE_ROOT = "/tmp/sqoop/warehouse/";
-
-  private static final String OUTPUT_DIR = WAREHOUSE_ROOT + DATA_DIR;
-  private static final String OUTPUT_FILE = "part-r-00000";
-  private static final int START_PARTITION = 1;
-  private static final int NUMBER_OF_PARTITIONS = 9;
-  private static final int NUMBER_OF_ROWS_PER_PARTITION = 10;
-
-  public void testVoid() { }
-/*
-  @Test
-  public void testImport() throws Exception {
-    FileUtils.delete(OUTPUT_DIR);
-
-    DummyConnector connector = new DummyConnector();
-    EtlOptions options = new EtlOptions(connector);
-
-    JobEngine engine = new JobEngine();
-    engine.initialize(options);
-
-    String fileName = OUTPUT_DIR + "/" + OUTPUT_FILE;
-    InputStream filestream = FileUtils.open(fileName);
-    BufferedReader filereader = new BufferedReader(new InputStreamReader(
-        filestream, Data.CHARSET_NAME));
-    verifyOutput(filereader);
-  }
-
-  private void verifyOutput(BufferedReader reader)
-      throws IOException {
-    String line = null;
-    int index = START_PARTITION*NUMBER_OF_ROWS_PER_PARTITION;
-    Data expected = new Data();
-    while ((line = reader.readLine()) != null){
-      expected.setContent(new Object[] {
-          new Integer(index),
-          new Double(index),
-          String.valueOf(index) },
-          Data.ARRAY_RECORD);
-      index++;
-
-      assertEquals(expected.toString(), line);
-    }
-    reader.close();
-
-    assertEquals(NUMBER_OF_PARTITIONS*NUMBER_OF_ROWS_PER_PARTITION,
-        index-START_PARTITION*NUMBER_OF_ROWS_PER_PARTITION);
-  }
-
-  public class DummyConnector implements SqoopConnector {
-
-    @Override
-    public Importer getImporter() {
-      return new Importer(
-          DummyImportInitializer.class,
-          DummyImportPartitioner.class,
-          DummyImportExtractor.class,
-          null);
-    }
-
-    @Override
-    public Exporter getExporter() {
-      fail("This method should not be invoked.");
-      return null;
-    }
-
-    @Override
-    public ResourceBundle getBundle(Locale locale) {
-      fail("This method should not be invoked.");
-      return null;
-    }
-
-    @Override
-    public Validator getValidator() {
-      fail("This method should not be invoked.");
-      return null;
-    }
-
-    @Override
-    public Class getConnectionConfigurationClass() {
-      fail("This method should not be invoked.");
-      return null;
-    }
-
-    @Override
-    public Class getJobConfigurationClass(Type jobType) {
-      fail("This method should not be invoked.");
-      return null;
-    }
-  }
-
-  public static class DummyImportInitializer extends Initializer {
-    @Override
-    public void initialize(MutableContext context, Options options) {
-      context.setString(Constants.JOB_ETL_OUTPUT_DIRECTORY, OUTPUT_DIR);
-    }
-  }
-
-  public static class DummyImportPartition extends Partition {
-    private int id;
-
-    public void setId(int id) {
-      this.id = id;
-    }
-
-    public int getId() {
-      return id;
-    }
-
-    @Override
-    public void readFields(DataInput in) throws IOException {
-      id = in.readInt();
-    }
-
-    @Override
-    public void write(DataOutput out) throws IOException {
-      out.writeInt(id);
-    }
-  }
-
-  public static class DummyImportPartitioner extends Partitioner {
-    @Override
-    public List<Partition> initialize(Context context) {
-      List<Partition> partitions = new LinkedList<Partition>();
-      for (int id = START_PARTITION; id <= NUMBER_OF_PARTITIONS; id++) {
-        DummyImportPartition partition = new DummyImportPartition();
-        partition.setId(id);
-        partitions.add(partition);
-      }
-      return partitions;
-    }
-  }
-
-  public static class DummyImportExtractor extends Extractor {
-    @Override
-    public void initialize(Context context, Partition partition, DataWriter writer) {
-      int id = ((DummyImportPartition)partition).getId();
-      for (int row = 0; row < NUMBER_OF_ROWS_PER_PARTITION; row++) {
-        writer.writeArrayRecord(new Object[] {
-            new Integer(id*NUMBER_OF_ROWS_PER_PARTITION+row),
-            new Double(id*NUMBER_OF_ROWS_PER_PARTITION+row),
-            String.valueOf(id*NUMBER_OF_ROWS_PER_PARTITION+row)});
-      }
-    }
-  }
-*/
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestMapReduce.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestMapReduce.java b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestMapReduce.java
index 94ab560..e269899 100644
--- a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestMapReduce.java
+++ b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestMapReduce.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.sqoop.common.ImmutableContext;
 import org.apache.sqoop.job.etl.Extractor;
 import org.apache.sqoop.job.etl.Loader;
 import org.apache.sqoop.job.etl.Partition;
@@ -45,7 +46,6 @@ import org.apache.sqoop.job.mr.SqoopInputFormat;
 import org.apache.sqoop.job.mr.SqoopMapper;
 import org.apache.sqoop.job.mr.SqoopNullOutputFormat;
 import org.apache.sqoop.job.mr.SqoopSplit;
-import org.junit.Test;
 
 public class TestMapReduce extends TestCase {
 
@@ -53,10 +53,6 @@ public class TestMapReduce extends TestCase {
   private static final int NUMBER_OF_PARTITIONS = 9;
   private static final int NUMBER_OF_ROWS_PER_PARTITION = 10;
 
-  public void testVoid() {}
-
-  /*
-  @Test
   public void testInputFormat() throws Exception {
     Configuration conf = new Configuration();
     conf.set(JobConstants.JOB_ETL_PARTITIONER, DummyPartitioner.class.getName());
@@ -73,7 +69,6 @@ public class TestMapReduce extends TestCase {
     }
   }
 
-  @Test
   public void testMapper() throws Exception {
     Configuration conf = new Configuration();
     conf.set(JobConstants.JOB_ETL_PARTITIONER, DummyPartitioner.class.getName());
@@ -83,7 +78,6 @@ public class TestMapReduce extends TestCase {
         DummyOutputFormat.class);
   }
 
-  @Test
   public void testOutputFormat() throws Exception {
     Configuration conf = new Configuration();
     conf.set(JobConstants.JOB_ETL_PARTITIONER, DummyPartitioner.class.getName());
@@ -114,11 +108,16 @@ public class TestMapReduce extends TestCase {
     public void write(DataOutput out) throws IOException {
       out.writeInt(id);
     }
+
+    @Override
+    public String toString() {
+      return Integer.toString(id);
+    }
   }
 
   public static class DummyPartitioner extends Partitioner {
     @Override
-    public List<Partition> initialize(Context context) {
+    public List<Partition> getPartitions(ImmutableContext context, Object oc, Object oj) {
       List<Partition> partitions = new LinkedList<Partition>();
       for (int id = START_PARTITION; id <= NUMBER_OF_PARTITIONS; id++) {
         DummyPartition partition = new DummyPartition();
@@ -131,12 +130,12 @@ public class TestMapReduce extends TestCase {
 
   public static class DummyExtractor extends Extractor {
     @Override
-    public void initialize(Context context, Partition partition, DataWriter writer) {
+    public void run(ImmutableContext context, Object oc, Object oj, Partition partition, DataWriter writer) {
       int id = ((DummyPartition)partition).getId();
       for (int row = 0; row < NUMBER_OF_ROWS_PER_PARTITION; row++) {
         writer.writeArrayRecord(new Object[] {
-            new Integer(id*NUMBER_OF_ROWS_PER_PARTITION+row),
-            new Double(id*NUMBER_OF_ROWS_PER_PARTITION+row),
+            id * NUMBER_OF_ROWS_PER_PARTITION + row,
+            (double) (id * NUMBER_OF_ROWS_PER_PARTITION + row),
             String.valueOf(id*NUMBER_OF_ROWS_PER_PARTITION+row)});
       }
     }
@@ -168,8 +167,8 @@ public class TestMapReduce extends TestCase {
       @Override
       public void write(Data key, NullWritable value) {
         data.setContent(new Object[] {
-          new Integer(index),
-          new Double(index),
+          index,
+          (double) index,
           String.valueOf(index)},
           Data.ARRAY_RECORD);
         index++;
@@ -209,21 +208,20 @@ public class TestMapReduce extends TestCase {
     private Data actual = new Data();
 
     @Override
-    public void initialize(Context context, DataReader reader) {
+    public void run(ImmutableContext context, DataReader reader) {
       Object[] array;
       while ((array = reader.readArrayRecord()) != null) {
         actual.setContent(array, Data.ARRAY_RECORD);
 
         expected.setContent(new Object[] {
-          new Integer(index),
-          new Double(index),
+          index,
+          (double) index,
           String.valueOf(index)},
           Data.ARRAY_RECORD);
         index++;
 
         assertEquals(expected.toString(), actual.toString());
-      };
+      }
     }
   }
-  */
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/71f40446/execution/mapreduce/src/test/java/org/apache/sqoop/job/io/TestData.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/test/java/org/apache/sqoop/job/io/TestData.java b/execution/mapreduce/src/test/java/org/apache/sqoop/job/io/TestData.java
index d4a7d4d..ea7ac70 100644
--- a/execution/mapreduce/src/test/java/org/apache/sqoop/job/io/TestData.java
+++ b/execution/mapreduce/src/test/java/org/apache/sqoop/job/io/TestData.java
@@ -34,13 +34,13 @@ public class TestData extends TestCase {
 
     // with special characters:
     expected =
-        Long.valueOf((long)TEST_NUMBER) + "," +
-        Double.valueOf(TEST_NUMBER) + "," +
+        (long) TEST_NUMBER + "," +
+        TEST_NUMBER + "," +
         "'" + String.valueOf(TEST_NUMBER) + "\\',s'" + "," +
         Arrays.toString(new byte[] {1, 2, 3, 4, 5});
     data.setContent(new Object[] {
-        Long.valueOf((long)TEST_NUMBER),
-        Double.valueOf(TEST_NUMBER),
+        (long) TEST_NUMBER,
+        TEST_NUMBER,
         String.valueOf(TEST_NUMBER) + "',s",
         new byte[] {1, 2, 3, 4, 5} },
         Data.ARRAY_RECORD);
@@ -49,13 +49,13 @@ public class TestData extends TestCase {
 
     // with null characters:
     expected =
-        Long.valueOf((long)TEST_NUMBER) + "," +
-        Double.valueOf(TEST_NUMBER) + "," +
+        (long) TEST_NUMBER + "," +
+        TEST_NUMBER + "," +
         "null" + "," +
         Arrays.toString(new byte[] {1, 2, 3, 4, 5});
     data.setContent(new Object[] {
-        Long.valueOf((long)TEST_NUMBER),
-        Double.valueOf(TEST_NUMBER),
+        (long) TEST_NUMBER,
+        TEST_NUMBER,
         null,
         new byte[] {1, 2, 3, 4, 5} },
         Data.ARRAY_RECORD);


Mime
View raw message