sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From annaszo...@apache.org
Subject [4/4] sqoop git commit: SQOOP-3174: Add SQLServer manual tests to 3rd party test suite
Date Mon, 15 May 2017 08:10:12 GMT
SQOOP-3174: Add SQLServer manual tests to 3rd party test suite

(Boglarka Egyed via Anna Szonyi)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/558bdaea
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/558bdaea
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/558bdaea

Branch: refs/heads/trunk
Commit: 558bdaea907200bfdea2e6e434aabf97e7a566f2
Parents: 0ca73d4
Author: Anna Szonyi <annaszonyi@apache.org>
Authored: Mon May 15 10:03:49 2017 +0200
Committer: Anna Szonyi <annaszonyi@apache.org>
Committed: Mon May 15 10:03:49 2017 +0200

----------------------------------------------------------------------
 build.xml                                       |   2 +-
 .../SQLServerManagerExportManualTest.java       | 473 -----------
 .../manager/SQLServerManagerExportTest.java     | 473 +++++++++++
 .../SQLServerManagerImportManualTest.java       | 368 --------
 .../manager/SQLServerManagerImportTest.java     | 370 ++++++++
 ...erDatatypeExportDelimitedFileManualTest.java |  90 --
 ...QLServerDatatypeExportDelimitedFileTest.java |  92 ++
 ...verDatatypeExportSequenceFileManualTest.java | 262 ------
 ...SQLServerDatatypeExportSequenceFileTest.java | 264 ++++++
 ...erDatatypeImportDelimitedFileManualTest.java | 304 -------
 ...QLServerDatatypeImportDelimitedFileTest.java | 306 +++++++
 ...verDatatypeImportSequenceFileManualTest.java | 845 ------------------
 ...SQLServerDatatypeImportSequenceFileTest.java | 847 +++++++++++++++++++
 .../SQLServerHiveImportManualTest.java          | 190 -----
 .../sqlserver/SQLServerHiveImportTest.java      | 192 +++++
 .../sqlserver/SQLServerManagerManualTest.java   | 366 --------
 .../manager/sqlserver/SQLServerManagerTest.java | 368 ++++++++
 .../sqlserver/SQLServerMultiColsManualTest.java | 136 ---
 .../sqlserver/SQLServerMultiColsTest.java       | 138 +++
 .../sqlserver/SQLServerMultiMapsManualTest.java | 320 -------
 .../sqlserver/SQLServerMultiMapsTest.java       | 322 +++++++
 .../SQLServerParseMethodsManualTest.java        | 285 -------
 .../sqlserver/SQLServerParseMethodsTest.java    | 287 +++++++
 .../sqlserver/SQLServerQueryManualTest.java     | 301 -------
 .../manager/sqlserver/SQLServerQueryTest.java   | 303 +++++++
 .../sqlserver/SQLServerSplitByManualTest.java   | 269 ------
 .../manager/sqlserver/SQLServerSplitByTest.java | 271 ++++++
 .../sqlserver/SQLServerWhereManualTest.java     | 290 -------
 .../manager/sqlserver/SQLServerWhereTest.java   | 292 +++++++
 29 files changed, 4526 insertions(+), 4500 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 10deb83..af43c47 100644
--- a/build.xml
+++ b/build.xml
@@ -743,7 +743,7 @@
       <sysproperty key="test.build.data" value="${build.test}/data"/>
       <sysproperty key="build.test" value="${build.test}"/>
 
-      <!-- microsoft sqlserver manual test related properties-->
+      <!-- microsoft sqlserver thirdparty test related properties-->
       <sysproperty key="test.data.dir" value="${basedir}/testdata"/>
       <sysproperty key="ms.datatype.test.data.file.export" value="DatatypeTestData-export-lite.txt"/>
       <sysproperty key="ms.datatype.test.data.file.import" value="DatatypeTestData-import-lite.txt"/>

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java b/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java
deleted file mode 100644
index 668a3a9..0000000
--- a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java
+++ /dev/null
@@ -1,473 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
-import org.apache.hadoop.conf.Configuration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Writer;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Please see instructions in SQLServerManagerImportManualTest.
- */
-public class SQLServerManagerExportManualTest extends ExportJobTestCase {
-
-    public static final Log LOG = LogFactory.getLog(
-      SQLServerManagerExportManualTest.class.getName());
-
-  static final String HOST_URL = System.getProperty(
-          "sqoop.test.sqlserver.connectstring.host_url",
-          "jdbc:sqlserver://sqlserverhost:1433");
-  static final String DATABASE_NAME = System.getProperty(
-      "sqoop.test.sqlserver.database",
-      "sqooptest");
-  static final String DATABASE_USER = System.getProperty(
-      "ms.sqlserver.username",
-      "sqoopuser");
-  static final String DATABASE_PASSWORD = System.getProperty(
-      "ms.sqlserver.password",
-      "password");
-
-  static final String SCHEMA_DBO = "dbo";
-  static final String DBO_TABLE_NAME = "EMPLOYEES_MSSQL";
-  static final String DBO_BINARY_TABLE_NAME = "BINARYTYPE_MSSQL";
-  static final String SCHEMA_SCH = "sch";
-  static final String SCH_TABLE_NAME = "PRIVATE_TABLE";
-  static final String CONNECT_STRING = HOST_URL
-              + ";databaseName=" + DATABASE_NAME;
-
-  static final String CONNECTOR_FACTORY = System.getProperty(
-      "sqoop.test.msserver.connector.factory",
-      ConnFactory.DEFAULT_FACTORY_CLASS_NAMES);
-
-  // instance variables populated during setUp, used during tests
-  private SQLServerManager manager;
-  private Configuration conf = new Configuration();
-  private Connection conn = null;
-
-  @Override
-  protected Configuration getConf() {
-    return conf;
-  }
-
-  @Override
-  protected boolean useHsqldbTestServer() {
-    return false;
-  }
-
-  private String getDropTableStatement(String schema, String tableName) {
-    return "DROP TABLE IF EXISTS " + manager.escapeObjectName(schema)
-        + "." + manager.escapeObjectName(tableName);
-  }
-
-  @Before
-  public void setUp() {
-    super.setUp();
-
-    SqoopOptions options = new SqoopOptions(CONNECT_STRING,
-      DBO_TABLE_NAME);
-    options.setUsername(DATABASE_USER);
-    options.setPassword(DATABASE_PASSWORD);
-
-    manager = new SQLServerManager(options);
-
-    createTableAndPopulateData(SCHEMA_DBO, DBO_TABLE_NAME);
-    createTableAndPopulateData(SCHEMA_SCH, SCH_TABLE_NAME);
-
-    // To test with Microsoft SQL server connector, copy the connector jar to
-    // sqoop.thirdparty.lib.dir and set sqoop.test.msserver.connector.factory
-    // to com.microsoft.sqoop.SqlServer.MSSQLServerManagerFactory. By default,
-    // the built-in SQL server connector is used.
-    conf.setStrings(ConnFactory.FACTORY_CLASS_NAMES_KEY, CONNECTOR_FACTORY);
-  }
-
-  public void createTableAndPopulateData(String schema, String table) {
-    String fulltableName = manager.escapeObjectName(schema)
-      + "." + manager.escapeObjectName(table);
-
-    Statement stmt = null;
-
-    // Create schema if needed
-    try {
-      conn = manager.getConnection();
-      stmt = conn.createStatement();
-      stmt.execute("CREATE SCHEMA " + schema);
-      conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.info("Can't create schema: " + sqlE.getMessage());
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing stmt", ex);
-      }
-    }
-
-    // Drop the existing table, if there is one.
-    try {
-      conn = manager.getConnection();
-      stmt = conn.createStatement();
-      stmt.execute("DROP TABLE " + fulltableName);
-      conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.info("Table was not dropped: " + sqlE.getMessage());
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing stmt", ex);
-      }
-    }
-
-   // Create and populate table
-   try {
-      conn = manager.getConnection();
-      conn.setAutoCommit(false);
-      stmt = conn.createStatement();
-
-      // create the database table and populate it with data.
-      stmt.executeUpdate("CREATE TABLE " + fulltableName + " ("
-          + "id INT NOT NULL, "
-          + "name VARCHAR(24) NOT NULL, "
-          + "salary FLOAT, "
-          + "dept VARCHAR(32), "
-          + "PRIMARY KEY (id))");
-      conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.error("Encountered SQL Exception: ", sqlE);
-      sqlE.printStackTrace();
-      fail("SQLException when running test setUp(): " + sqlE);
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing connection/stmt", ex);
-      }
-    }
-  }
-
-  public void createSQLServerBinaryTypeTable(String schema, String table) {
-    String fulltableName = manager.escapeObjectName(schema)
-      + "." + manager.escapeObjectName(table);
-
-    Statement stmt = null;
-
-    // Create schema if needed
-    try {
-    conn = manager.getConnection();
-    stmt = conn.createStatement();
-    stmt.execute("CREATE SCHEMA " + schema);
-    conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.info("Can't create schema: " + sqlE.getMessage());
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing stmt", ex);
-      }
-    }
-
-    // Drop the existing table, if there is one.
-    try {
-      conn = manager.getConnection();
-      stmt = conn.createStatement();
-      stmt.execute("DROP TABLE " + fulltableName);
-      conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.info("Table was not dropped: " + sqlE.getMessage());
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing stmt", ex);
-      }
-    }
-
-    // Create and populate table
-    try {
-      conn = manager.getConnection();
-      conn.setAutoCommit(false);
-      stmt = conn.createStatement();
-
-      // create the database table and populate it with data.
-      stmt.executeUpdate("CREATE TABLE " + fulltableName + " ("
-          + "id INT PRIMARY KEY, "
-          + "b1 BINARY(10), "
-          + "b2 VARBINARY(10))");
-      conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.error("Encountered SQL Exception: ", sqlE);
-      sqlE.printStackTrace();
-      fail("SQLException when running test setUp(): " + sqlE);
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing connection/stmt", ex);
-      }
-    }
-  }
-
-  @After
-  public void tearDown() {
-    try {
-      Statement stmt = conn.createStatement();
-      stmt.executeUpdate(getDropTableStatement(SCHEMA_DBO, DBO_TABLE_NAME));
-      stmt.executeUpdate(getDropTableStatement(SCHEMA_SCH, SCH_TABLE_NAME));
-    } catch (SQLException e) {
-      LOG.error("Can't clean up the database:", e);
-    }
-
-    super.tearDown();
-    try {
-      conn.close();
-      manager.close();
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    }
-  }
-
-  private String [] getArgv(String tableName,
-                            String... extraArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    CommonArgs.addHadoopFlags(args);
-
-    args.add("--table");
-    args.add(tableName);
-    args.add("--export-dir");
-    args.add(getWarehouseDir());
-    args.add("--fields-terminated-by");
-    args.add(",");
-    args.add("--lines-terminated-by");
-    args.add("\\n");
-    args.add("--connect");
-    args.add(CONNECT_STRING);
-    args.add("--username");
-    args.add(DATABASE_USER);
-    args.add("--password");
-    args.add(DATABASE_PASSWORD);
-    args.add("-m");
-    args.add("1");
-
-    for (String arg : extraArgs) {
-      args.add(arg);
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  protected void createTestFile(String filename,
-                                String[] lines)
-                                throws IOException {
-    new File(getWarehouseDir()).mkdirs();
-    File file = new File(getWarehouseDir() + "/" + filename);
-    Writer output = new BufferedWriter(new FileWriter(file));
-    for(String line : lines) {
-      output.write(line);
-      output.write("\n");
-    }
-    output.close();
-  }
-
-  @Test
-  public void testExport() throws IOException, SQLException {
-    createTestFile("inputFile", new String[] {
-      "2,Bob,400,sales",
-      "3,Fred,15,marketing",
-    });
-
-    runExport(getArgv(DBO_TABLE_NAME));
-
-    assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
-  }
-
-  @Test
-  public void testExportCustomSchema() throws IOException, SQLException {
-    createTestFile("inputFile", new String[] {
-      "2,Bob,400,sales",
-      "3,Fred,15,marketing",
-    });
-
-    String[] extra = new String[] {"--",
-      "--schema",
-      SCHEMA_SCH,
-    };
-
-    runExport(getArgv(SCH_TABLE_NAME, extra));
-
-    assertRowCount(
-      2,
-      escapeObjectName(SCHEMA_SCH) + "." + escapeObjectName(SCH_TABLE_NAME),
-      conn
-    );
-  }
-
-  @Test
-  public void testExportTableHints() throws IOException, SQLException {
-    createTestFile("inputFile", new String[] {
-      "2,Bob,400,sales",
-      "3,Fred,15,marketing",
-    });
-
-    String []extra = new String[] {"--", "--table-hints",
-      "ROWLOCK",
-    };
-    runExport(getArgv(DBO_TABLE_NAME, extra));
-    assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
-  }
-
-  @Test
-  public void testExportTableHintsMultiple() throws IOException, SQLException {
-    createTestFile("inputFile", new String[] {
-      "2,Bob,400,sales",
-      "3,Fred,15,marketing",
-    });
-
-    String []extra = new String[] {"--", "--table-hints",
-      "ROWLOCK,NOWAIT",
-    };
-    runExport(getArgv(DBO_TABLE_NAME, extra));
-    assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
-  }
-
-  @Test
-  public void testSQLServerBinaryType() throws IOException, SQLException {
-    createSQLServerBinaryTypeTable(SCHEMA_DBO, DBO_BINARY_TABLE_NAME);
-    createTestFile("inputFile", new String[] {
-      "1,73 65 63 72 65 74 00 00 00 00,73 65 63 72 65 74"
-    });
-    String[] expectedContent = {"73656372657400000000", "736563726574"};
-    runExport(getArgv(DBO_BINARY_TABLE_NAME));
-    assertRowCount(1, escapeObjectName(DBO_BINARY_TABLE_NAME), conn);
-    checkSQLBinaryTableContent(expectedContent, escapeObjectName(DBO_BINARY_TABLE_NAME), conn);
-  }
-
-  /** Make sure mixed update/insert export work correctly. */
-  @Test
-  public void testUpsertTextExport() throws IOException, SQLException {
-    createTestFile("inputFile", new String[] {
-      "2,Bob,400,sales",
-      "3,Fred,15,marketing",
-    });
-    // first time will be insert.
-    runExport(getArgv(SCH_TABLE_NAME, "--update-key", "id",
-              "--update-mode", "allowinsert", "--", "--schema", SCHEMA_SCH));
-    // second time will be update.
-    runExport(getArgv(SCH_TABLE_NAME, "--update-key", "id",
-              "--update-mode", "allowinsert", "--", "--schema", SCHEMA_SCH));
-    assertRowCount(2, escapeObjectName(SCHEMA_SCH) + "." + escapeObjectName(SCH_TABLE_NAME), conn);
-  }
-
-  public static void checkSQLBinaryTableContent(String[] expected, String tableName, Connection connection){
-    Statement stmt = null;
-    ResultSet rs = null;
-    try {
-      stmt = connection.createStatement();
-      rs = stmt.executeQuery("SELECT TOP 1 [b1], [b2] FROM " + tableName);
-      rs.next();
-      assertEquals(expected[0], rs.getString("b1"));
-      assertEquals(expected[1], rs.getString("b2"));
-    } catch (SQLException e) {
-        LOG.error("Can't verify table content", e);
-        fail();
-    } finally {
-      try {
-        connection.commit();
-
-        if (stmt != null) {
-          stmt.close();
-        }
-        if (rs != null) {
-          rs.close();
-        }
-      } catch (SQLException ex) {
-        LOG.info("Ignored exception in finally block.");
-      }
-    }
-  }
-
-  public static void assertRowCount(long expected,
-                                    String tableName,
-                                    Connection connection) {
-    Statement stmt = null;
-    ResultSet rs = null;
-    try {
-      stmt = connection.createStatement();
-      rs = stmt.executeQuery("SELECT count(*) FROM " + tableName);
-
-      rs.next();
-
-      assertEquals(expected, rs.getLong(1));
-    } catch (SQLException e) {
-      LOG.error("Can't verify number of rows", e);
-      fail();
-    } finally {
-      try {
-        connection.commit();
-
-        if (stmt != null) {
-          stmt.close();
-        }
-        if (rs != null) {
-          rs.close();
-        }
-      } catch (SQLException ex) {
-        LOG.info("Ignored exception in finally block.");
-      }
-    }
-  }
-
-  public String escapeObjectName(String objectName) {
-    return "[" + objectName + "]";
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java b/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java
new file mode 100644
index 0000000..c87994f
--- /dev/null
+++ b/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java
@@ -0,0 +1,473 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.cloudera.sqoop.manager;
+
+import com.cloudera.sqoop.ConnFactory;
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Please see instructions in SQLServerManagerImportTest.
+ */
+public class SQLServerManagerExportTest extends ExportJobTestCase {
+
+    public static final Log LOG = LogFactory.getLog(
+      SQLServerManagerExportTest.class.getName());
+
+  static final String HOST_URL = System.getProperty(
+          "sqoop.test.sqlserver.connectstring.host_url",
+          "jdbc:sqlserver://sqlserverhost:1433");
+  static final String DATABASE_NAME = System.getProperty(
+      "sqoop.test.sqlserver.database",
+      "sqooptest");
+  static final String DATABASE_USER = System.getProperty(
+      "ms.sqlserver.username",
+      "sqoopuser");
+  static final String DATABASE_PASSWORD = System.getProperty(
+      "ms.sqlserver.password",
+      "password");
+
+  static final String SCHEMA_DBO = "dbo";
+  static final String DBO_TABLE_NAME = "EMPLOYEES_MSSQL";
+  static final String DBO_BINARY_TABLE_NAME = "BINARYTYPE_MSSQL";
+  static final String SCHEMA_SCH = "sch";
+  static final String SCH_TABLE_NAME = "PRIVATE_TABLE";
+  static final String CONNECT_STRING = HOST_URL
+              + ";databaseName=" + DATABASE_NAME;
+
+  static final String CONNECTOR_FACTORY = System.getProperty(
+      "sqoop.test.msserver.connector.factory",
+      ConnFactory.DEFAULT_FACTORY_CLASS_NAMES);
+
+  // instance variables populated during setUp, used during tests
+  private SQLServerManager manager;
+  private Configuration conf = new Configuration();
+  private Connection conn = null;
+
+  @Override
+  protected Configuration getConf() {
+    return conf;
+  }
+
+  @Override
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  private String getDropTableStatement(String schema, String tableName) {
+    return "DROP TABLE IF EXISTS " + manager.escapeObjectName(schema)
+        + "." + manager.escapeObjectName(tableName);
+  }
+
+  @Before
+  public void setUp() {
+    super.setUp();
+
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING,
+      DBO_TABLE_NAME);
+    options.setUsername(DATABASE_USER);
+    options.setPassword(DATABASE_PASSWORD);
+
+    manager = new SQLServerManager(options);
+
+    createTableAndPopulateData(SCHEMA_DBO, DBO_TABLE_NAME);
+    createTableAndPopulateData(SCHEMA_SCH, SCH_TABLE_NAME);
+
+    // To test with Microsoft SQL server connector, copy the connector jar to
+    // sqoop.thirdparty.lib.dir and set sqoop.test.msserver.connector.factory
+    // to com.microsoft.sqoop.SqlServer.MSSQLServerManagerFactory. By default,
+    // the built-in SQL server connector is used.
+    conf.setStrings(ConnFactory.FACTORY_CLASS_NAMES_KEY, CONNECTOR_FACTORY);
+  }
+
+  public void createTableAndPopulateData(String schema, String table) {
+    String fulltableName = manager.escapeObjectName(schema)
+      + "." + manager.escapeObjectName(table);
+
+    Statement stmt = null;
+
+    // Create schema if needed
+    try {
+      conn = manager.getConnection();
+      stmt = conn.createStatement();
+      stmt.execute("CREATE SCHEMA " + schema);
+      conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.info("Can't create schema: " + sqlE.getMessage());
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing stmt", ex);
+      }
+    }
+
+    // Drop the existing table, if there is one.
+    try {
+      conn = manager.getConnection();
+      stmt = conn.createStatement();
+      stmt.execute("DROP TABLE " + fulltableName);
+      conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.info("Table was not dropped: " + sqlE.getMessage());
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing stmt", ex);
+      }
+    }
+
+   // Create and populate table
+   try {
+      conn = manager.getConnection();
+      conn.setAutoCommit(false);
+      stmt = conn.createStatement();
+
+      // create the database table and populate it with data.
+      stmt.executeUpdate("CREATE TABLE " + fulltableName + " ("
+          + "id INT NOT NULL, "
+          + "name VARCHAR(24) NOT NULL, "
+          + "salary FLOAT, "
+          + "dept VARCHAR(32), "
+          + "PRIMARY KEY (id))");
+      conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.error("Encountered SQL Exception: ", sqlE);
+      sqlE.printStackTrace();
+      fail("SQLException when running test setUp(): " + sqlE);
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing connection/stmt", ex);
+      }
+    }
+  }
+
+  public void createSQLServerBinaryTypeTable(String schema, String table) {
+    String fulltableName = manager.escapeObjectName(schema)
+      + "." + manager.escapeObjectName(table);
+
+    Statement stmt = null;
+
+    // Create schema if needed
+    try {
+    conn = manager.getConnection();
+    stmt = conn.createStatement();
+    stmt.execute("CREATE SCHEMA " + schema);
+    conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.info("Can't create schema: " + sqlE.getMessage());
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing stmt", ex);
+      }
+    }
+
+    // Drop the existing table, if there is one.
+    try {
+      conn = manager.getConnection();
+      stmt = conn.createStatement();
+      stmt.execute("DROP TABLE " + fulltableName);
+      conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.info("Table was not dropped: " + sqlE.getMessage());
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing stmt", ex);
+      }
+    }
+
+    // Create and populate table
+    try {
+      conn = manager.getConnection();
+      conn.setAutoCommit(false);
+      stmt = conn.createStatement();
+
+      // create the database table and populate it with data.
+      stmt.executeUpdate("CREATE TABLE " + fulltableName + " ("
+          + "id INT PRIMARY KEY, "
+          + "b1 BINARY(10), "
+          + "b2 VARBINARY(10))");
+      conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.error("Encountered SQL Exception: ", sqlE);
+      sqlE.printStackTrace();
+      fail("SQLException when running test setUp(): " + sqlE);
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing connection/stmt", ex);
+      }
+    }
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      Statement stmt = conn.createStatement();
+      stmt.executeUpdate(getDropTableStatement(SCHEMA_DBO, DBO_TABLE_NAME));
+      stmt.executeUpdate(getDropTableStatement(SCHEMA_SCH, SCH_TABLE_NAME));
+    } catch (SQLException e) {
+      LOG.error("Can't clean up the database:", e);
+    }
+
+    super.tearDown();
+    try {
+      conn.close();
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  private String [] getArgv(String tableName,
+                            String... extraArgs) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    CommonArgs.addHadoopFlags(args);
+
+    args.add("--table");
+    args.add(tableName);
+    args.add("--export-dir");
+    args.add(getWarehouseDir());
+    args.add("--fields-terminated-by");
+    args.add(",");
+    args.add("--lines-terminated-by");
+    args.add("\\n");
+    args.add("--connect");
+    args.add(CONNECT_STRING);
+    args.add("--username");
+    args.add(DATABASE_USER);
+    args.add("--password");
+    args.add(DATABASE_PASSWORD);
+    args.add("-m");
+    args.add("1");
+
+    for (String arg : extraArgs) {
+      args.add(arg);
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  protected void createTestFile(String filename,
+                                String[] lines)
+                                throws IOException {
+    new File(getWarehouseDir()).mkdirs();
+    File file = new File(getWarehouseDir() + "/" + filename);
+    Writer output = new BufferedWriter(new FileWriter(file));
+    for(String line : lines) {
+      output.write(line);
+      output.write("\n");
+    }
+    output.close();
+  }
+
+  @Test
+  public void testExport() throws IOException, SQLException {
+    createTestFile("inputFile", new String[] {
+      "2,Bob,400,sales",
+      "3,Fred,15,marketing",
+    });
+
+    runExport(getArgv(DBO_TABLE_NAME));
+
+    assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
+  }
+
+  @Test
+  public void testExportCustomSchema() throws IOException, SQLException {
+    createTestFile("inputFile", new String[] {
+      "2,Bob,400,sales",
+      "3,Fred,15,marketing",
+    });
+
+    String[] extra = new String[] {"--",
+      "--schema",
+      SCHEMA_SCH,
+    };
+
+    runExport(getArgv(SCH_TABLE_NAME, extra));
+
+    assertRowCount(
+      2,
+      escapeObjectName(SCHEMA_SCH) + "." + escapeObjectName(SCH_TABLE_NAME),
+      conn
+    );
+  }
+
+  @Test
+  public void testExportTableHints() throws IOException, SQLException {
+    createTestFile("inputFile", new String[] {
+      "2,Bob,400,sales",
+      "3,Fred,15,marketing",
+    });
+
+    String []extra = new String[] {"--", "--table-hints",
+      "ROWLOCK",
+    };
+    runExport(getArgv(DBO_TABLE_NAME, extra));
+    assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
+  }
+
+  @Test
+  public void testExportTableHintsMultiple() throws IOException, SQLException {
+    createTestFile("inputFile", new String[] {
+      "2,Bob,400,sales",
+      "3,Fred,15,marketing",
+    });
+
+    String []extra = new String[] {"--", "--table-hints",
+      "ROWLOCK,NOWAIT",
+    };
+    runExport(getArgv(DBO_TABLE_NAME, extra));
+    assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
+  }
+
+  @Test
+  public void testSQLServerBinaryType() throws IOException, SQLException {
+    createSQLServerBinaryTypeTable(SCHEMA_DBO, DBO_BINARY_TABLE_NAME);
+    createTestFile("inputFile", new String[] {
+      "1,73 65 63 72 65 74 00 00 00 00,73 65 63 72 65 74"
+    });
+    String[] expectedContent = {"73656372657400000000", "736563726574"};
+    runExport(getArgv(DBO_BINARY_TABLE_NAME));
+    assertRowCount(1, escapeObjectName(DBO_BINARY_TABLE_NAME), conn);
+    checkSQLBinaryTableContent(expectedContent, escapeObjectName(DBO_BINARY_TABLE_NAME), conn);
+  }
+
+  /** Make sure mixed update/insert export work correctly. */
+  @Test
+  public void testUpsertTextExport() throws IOException, SQLException {
+    createTestFile("inputFile", new String[] {
+      "2,Bob,400,sales",
+      "3,Fred,15,marketing",
+    });
+    // first time will be insert.
+    runExport(getArgv(SCH_TABLE_NAME, "--update-key", "id",
+              "--update-mode", "allowinsert", "--", "--schema", SCHEMA_SCH));
+    // second time will be update.
+    runExport(getArgv(SCH_TABLE_NAME, "--update-key", "id",
+              "--update-mode", "allowinsert", "--", "--schema", SCHEMA_SCH));
+    assertRowCount(2, escapeObjectName(SCHEMA_SCH) + "." + escapeObjectName(SCH_TABLE_NAME), conn);
+  }
+
+  public static void checkSQLBinaryTableContent(String[] expected, String tableName, Connection connection){
+    Statement stmt = null;
+    ResultSet rs = null;
+    try {
+      stmt = connection.createStatement();
+      rs = stmt.executeQuery("SELECT TOP 1 [b1], [b2] FROM " + tableName);
+      rs.next();
+      assertEquals(expected[0], rs.getString("b1"));
+      assertEquals(expected[1], rs.getString("b2"));
+    } catch (SQLException e) {
+        LOG.error("Can't verify table content", e);
+        fail();
+    } finally {
+      try {
+        connection.commit();
+
+        if (stmt != null) {
+          stmt.close();
+        }
+        if (rs != null) {
+          rs.close();
+        }
+      } catch (SQLException ex) {
+        LOG.info("Ignored exception in finally block.");
+      }
+    }
+  }
+
+  public static void assertRowCount(long expected,
+                                    String tableName,
+                                    Connection connection) {
+    Statement stmt = null;
+    ResultSet rs = null;
+    try {
+      stmt = connection.createStatement();
+      rs = stmt.executeQuery("SELECT count(*) FROM " + tableName);
+
+      rs.next();
+
+      assertEquals(expected, rs.getLong(1));
+    } catch (SQLException e) {
+      LOG.error("Can't verify number of rows", e);
+      fail();
+    } finally {
+      try {
+        connection.commit();
+
+        if (stmt != null) {
+          stmt.close();
+        }
+        if (rs != null) {
+          rs.close();
+        }
+      } catch (SQLException ex) {
+        LOG.info("Ignored exception in finally block.");
+      }
+    }
+  }
+
+  public String escapeObjectName(String objectName) {
+    return "[" + objectName + "]";
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java b/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java
deleted file mode 100644
index abc0479..0000000
--- a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java
+++ /dev/null
@@ -1,368 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.sqoop.ConnFactory;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test the SQLServerManager implementation.
- *
- * This uses JDBC to import data from an SQLServer database into HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerManagerImportManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerManagerImportManualTest extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory.getLog(
-          SQLServerManagerImportManualTest.class.getName());
-
-  static final String HOST_URL = System.getProperty(
-          "sqoop.test.sqlserver.connectstring.host_url",
-          "jdbc:sqlserver://sqlserverhost:1433");
-  static final String DATABASE_NAME = System.getProperty(
-      "sqoop.test.sqlserver.database",
-      "sqooptest");
-  static final String DATABASE_USER = System.getProperty(
-      "ms.sqlserver.username",
-      "sqoopuser");
-  static final String DATABASE_PASSWORD = System.getProperty(
-      "ms.sqlserver.password",
-      "password");
-
-  static final String SCHEMA_DBO = "dbo";
-  static final String DBO_TABLE_NAME = "EMPLOYEES_MSSQL";
-  static final String SCHEMA_SCH = "sch";
-  static final String SCH_TABLE_NAME = "PRIVATE_TABLE";
-  static final String CONNECT_STRING = HOST_URL
-              + ";databaseName=" + DATABASE_NAME;
-
-  static final String CONNECTOR_FACTORY = System.getProperty(
-      "sqoop.test.msserver.connector.factory",
-      ConnFactory.DEFAULT_FACTORY_CLASS_NAMES);
-
-  // instance variables populated during setUp, used during tests
-  private SQLServerManager manager;
-
-  private Configuration conf = new Configuration();
-  private Connection conn = null;
-
-  @Override
-  protected Configuration getConf() {
-    return conf;
-  }
-
-  @Override
-  protected boolean useHsqldbTestServer() {
-    return false;
-  }
-
-  private String getDropTableStatement(String schema, String tableName) {
-    return "DROP TABLE IF EXISTS " + manager.escapeObjectName(schema)
-        + "." + manager.escapeObjectName(tableName);
-  }
-
-  @Before
-  public void setUp() {
-    super.setUp();
-
-    SqoopOptions options = new SqoopOptions(CONNECT_STRING,
-      DBO_TABLE_NAME);
-    options.setUsername(DATABASE_USER);
-    options.setPassword(DATABASE_PASSWORD);
-
-    manager = new SQLServerManager(options);
-
-    createTableAndPopulateData(SCHEMA_DBO, DBO_TABLE_NAME);
-    createTableAndPopulateData(SCHEMA_SCH, SCH_TABLE_NAME);
-
-    // To test with Microsoft SQL server connector, copy the connector jar to
-    // sqoop.thirdparty.lib.dir and set sqoop.test.msserver.connector.factory
-    // to com.microsoft.sqoop.SqlServer.MSSQLServerManagerFactory. By default,
-    // the built-in SQL server connector is used.
-    conf.setStrings(ConnFactory.FACTORY_CLASS_NAMES_KEY, CONNECTOR_FACTORY);
-  }
-
-  public void createTableAndPopulateData(String schema, String table) {
-    String fulltableName = manager.escapeObjectName(schema)
-      + "." + manager.escapeObjectName(table);
-
-    Statement stmt = null;
-
-    // Create schema if needed
-    try {
-      conn = manager.getConnection();
-      stmt = conn.createStatement();
-      stmt.execute("CREATE SCHEMA " + schema);
-    } catch (SQLException sqlE) {
-      LOG.info("Can't create schema: " + sqlE.getMessage());
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing stmt", ex);
-      }
-    }
-
-    // Drop the existing table, if there is one.
-    try {
-      conn = manager.getConnection();
-      stmt = conn.createStatement();
-      stmt.execute("DROP TABLE " + fulltableName);
-    } catch (SQLException sqlE) {
-      LOG.info("Table was not dropped: " + sqlE.getMessage());
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing stmt", ex);
-      }
-    }
-
-   // Create and populate table
-   try {
-      conn = manager.getConnection();
-      conn.setAutoCommit(false);
-      stmt = conn.createStatement();
-
-      // create the database table and populate it with data.
-      stmt.executeUpdate("CREATE TABLE " + fulltableName + " ("
-          + "id INT NOT NULL, "
-          + "name VARCHAR(24) NOT NULL, "
-          + "salary FLOAT, "
-          + "dept VARCHAR(32), "
-          + "PRIMARY KEY (id))");
-
-      stmt.executeUpdate("INSERT INTO " + fulltableName + " VALUES("
-          + "1,'Aaron', "
-          + "1000000.00,'engineering')");
-      stmt.executeUpdate("INSERT INTO " + fulltableName + " VALUES("
-          + "2,'Bob', "
-          + "400.00,'sales')");
-      stmt.executeUpdate("INSERT INTO " + fulltableName + " VALUES("
-          + "3,'Fred', 15.00,"
-          + "'marketing')");
-      conn.commit();
-    } catch (SQLException sqlE) {
-      LOG.error("Encountered SQL Exception: ", sqlE);
-      sqlE.printStackTrace();
-      fail("SQLException when running test setUp(): " + sqlE);
-    } finally {
-      try {
-        if (null != stmt) {
-          stmt.close();
-        }
-      } catch (Exception ex) {
-        LOG.warn("Exception while closing connection/stmt", ex);
-      }
-    }
-  }
-
-  @After
-  public void tearDown() {
-    try {
-      Statement stmt = conn.createStatement();
-      stmt.executeUpdate(getDropTableStatement(SCHEMA_DBO, DBO_TABLE_NAME));
-      stmt.executeUpdate(getDropTableStatement(SCHEMA_SCH, SCH_TABLE_NAME));
-    } catch (SQLException e) {
-      LOG.error("Can't clean up the database:", e);
-    }
-
-    super.tearDown();
-    try {
-      manager.close();
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    }
-  }
-
-  @Test
-  public void testImportSimple() throws IOException {
-    String [] expectedResults = {
-      "1,Aaron,1000000.0,engineering",
-      "2,Bob,400.0,sales",
-      "3,Fred,15.0,marketing",
-    };
-
-    doImportAndVerify(DBO_TABLE_NAME, expectedResults);
-  }
-
-  @Test
-  public void testImportExplicitDefaultSchema() throws IOException {
-    String [] expectedResults = {
-      "1,Aaron,1000000.0,engineering",
-      "2,Bob,400.0,sales",
-      "3,Fred,15.0,marketing",
-    };
-
-    String[] extraArgs = new String[] {"--schema", SCHEMA_DBO};
-
-    doImportAndVerify(DBO_TABLE_NAME, expectedResults, extraArgs);
-  }
-
-  @Test
-  public void testImportDifferentSchema() throws IOException {
-    String [] expectedResults = {
-      "1,Aaron,1000000.0,engineering",
-      "2,Bob,400.0,sales",
-      "3,Fred,15.0,marketing",
-    };
-
-    String[] extraArgs = new String[] {"--schema", SCHEMA_SCH};
-
-    doImportAndVerify(SCH_TABLE_NAME, expectedResults, extraArgs);
-  }
-
-  @Test
-  public void testImportTableHints() throws IOException {
-    String [] expectedResults = {
-      "1,Aaron,1000000.0,engineering",
-      "2,Bob,400.0,sales",
-      "3,Fred,15.0,marketing",
-    };
-
-    String[] extraArgs = new String[] {"--table-hints", "NOLOCK"};
-    doImportAndVerify(DBO_TABLE_NAME, expectedResults, extraArgs);
-  }
-
-  @Test
-  public void testImportTableHintsMultiple() throws IOException {
-    String [] expectedResults = {
-      "1,Aaron,1000000.0,engineering",
-      "2,Bob,400.0,sales",
-      "3,Fred,15.0,marketing",
-    };
-
-    String[] extraArgs = new String[] {"--table-hints", "NOLOCK,NOWAIT"};
-    doImportAndVerify(DBO_TABLE_NAME, expectedResults, extraArgs);
-  }
-
-  private String [] getArgv(String tableName, String ... extraArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    CommonArgs.addHadoopFlags(args);
-
-    args.add("--table");
-    args.add(tableName);
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(CONNECT_STRING);
-    args.add("--username");
-    args.add(DATABASE_USER);
-    args.add("--password");
-    args.add(DATABASE_PASSWORD);
-    args.add("--num-mappers");
-    args.add("1");
-
-    if (extraArgs.length > 0) {
-      args.add("--");
-      for (String arg : extraArgs) {
-        args.add(arg);
-      }
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  private void doImportAndVerify(String tableName,
-                                 String [] expectedResults,
-                                 String ... extraArgs) throws IOException {
-
-    Path warehousePath = new Path(this.getWarehouseDir());
-    Path tablePath = new Path(warehousePath, tableName);
-    Path filePath = new Path(tablePath, "part-m-00000");
-
-    File tableFile = new File(tablePath.toString());
-    if (tableFile.exists() && tableFile.isDirectory()) {
-      // remove the directory before running the import.
-      FileListing.recursiveDeleteDir(tableFile);
-    }
-
-    String [] argv = getArgv(tableName, extraArgs);
-    try {
-      runImport(argv);
-    } catch (IOException ioe) {
-      LOG.error("Got IOException during import: " + ioe.toString());
-      ioe.printStackTrace();
-      fail(ioe.toString());
-    }
-
-    File f = new File(filePath.toString());
-    assertTrue("Could not find imported data file", f.exists());
-    BufferedReader r = null;
-    try {
-      // Read through the file and make sure it's all there.
-      r = new BufferedReader(new InputStreamReader(new FileInputStream(f)));
-      for (String expectedLine : expectedResults) {
-        assertEquals(expectedLine, r.readLine());
-      }
-    } catch (IOException ioe) {
-      LOG.error("Got IOException verifying results: " + ioe.toString());
-      ioe.printStackTrace();
-      fail(ioe.toString());
-    } finally {
-      IOUtils.closeStream(r);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java b/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java
new file mode 100644
index 0000000..714a592
--- /dev/null
+++ b/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java
@@ -0,0 +1,370 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.cloudera.sqoop.manager;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.sqoop.ConnFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import com.cloudera.sqoop.util.FileListing;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test the SQLServerManager implementation.
+ *
+ * This uses JDBC to import data from an SQLServer database into HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerManagerImportTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerManagerImportTest extends ImportJobTestCase {
+
+  public static final Log LOG = LogFactory.getLog(
+          SQLServerManagerImportTest.class.getName());
+
+  static final String HOST_URL = System.getProperty(
+          "sqoop.test.sqlserver.connectstring.host_url",
+          "jdbc:sqlserver://sqlserverhost:1433");
+  static final String DATABASE_NAME = System.getProperty(
+      "sqoop.test.sqlserver.database",
+      "sqooptest");
+  static final String DATABASE_USER = System.getProperty(
+      "ms.sqlserver.username",
+      "sqoopuser");
+  static final String DATABASE_PASSWORD = System.getProperty(
+      "ms.sqlserver.password",
+      "password");
+
+  static final String SCHEMA_DBO = "dbo";
+  static final String DBO_TABLE_NAME = "EMPLOYEES_MSSQL";
+  static final String SCHEMA_SCH = "sch";
+  static final String SCH_TABLE_NAME = "PRIVATE_TABLE";
+  static final String CONNECT_STRING = HOST_URL
+              + ";databaseName=" + DATABASE_NAME;
+
+  static final String CONNECTOR_FACTORY = System.getProperty(
+      "sqoop.test.msserver.connector.factory",
+      ConnFactory.DEFAULT_FACTORY_CLASS_NAMES);
+
+  // instance variables populated during setUp, used during tests
+  private SQLServerManager manager;
+
+  private Configuration conf = new Configuration();
+  private Connection conn = null;
+
+  @Override
+  protected Configuration getConf() {
+    return conf;
+  }
+
+  @Override
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  private String getDropTableStatement(String schema, String tableName) {
+    return "DROP TABLE IF EXISTS " + manager.escapeObjectName(schema)
+        + "." + manager.escapeObjectName(tableName);
+  }
+
+  @Before
+  public void setUp() {
+    super.setUp();
+
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING,
+      DBO_TABLE_NAME);
+    options.setUsername(DATABASE_USER);
+    options.setPassword(DATABASE_PASSWORD);
+
+    manager = new SQLServerManager(options);
+
+    createTableAndPopulateData(SCHEMA_DBO, DBO_TABLE_NAME);
+    createTableAndPopulateData(SCHEMA_SCH, SCH_TABLE_NAME);
+
+    // To test with Microsoft SQL server connector, copy the connector jar to
+    // sqoop.thirdparty.lib.dir and set sqoop.test.msserver.connector.factory
+    // to com.microsoft.sqoop.SqlServer.MSSQLServerManagerFactory. By default,
+    // the built-in SQL server connector is used.
+    conf.setStrings(ConnFactory.FACTORY_CLASS_NAMES_KEY, CONNECTOR_FACTORY);
+  }
+
+  public void createTableAndPopulateData(String schema, String table) {
+    String fulltableName = manager.escapeObjectName(schema)
+      + "." + manager.escapeObjectName(table);
+
+    Statement stmt = null;
+
+    // Create schema if needed
+    try {
+      conn = manager.getConnection();
+      stmt = conn.createStatement();
+      stmt.execute("CREATE SCHEMA " + schema);
+    } catch (SQLException sqlE) {
+      LOG.info("Can't create schema: " + sqlE.getMessage());
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing stmt", ex);
+      }
+    }
+
+    // Drop the existing table, if there is one.
+    try {
+      conn = manager.getConnection();
+      stmt = conn.createStatement();
+      stmt.execute("DROP TABLE " + fulltableName);
+    } catch (SQLException sqlE) {
+      LOG.info("Table was not dropped: " + sqlE.getMessage());
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing stmt", ex);
+      }
+    }
+
+   // Create and populate table
+   try {
+      conn = manager.getConnection();
+      conn.setAutoCommit(false);
+      stmt = conn.createStatement();
+
+      // create the database table and populate it with data.
+      stmt.executeUpdate("CREATE TABLE " + fulltableName + " ("
+          + "id INT NOT NULL, "
+          + "name VARCHAR(24) NOT NULL, "
+          + "salary FLOAT, "
+          + "dept VARCHAR(32), "
+          + "PRIMARY KEY (id))");
+
+      stmt.executeUpdate("INSERT INTO " + fulltableName + " VALUES("
+          + "1,'Aaron', "
+          + "1000000.00,'engineering')");
+      stmt.executeUpdate("INSERT INTO " + fulltableName + " VALUES("
+          + "2,'Bob', "
+          + "400.00,'sales')");
+      stmt.executeUpdate("INSERT INTO " + fulltableName + " VALUES("
+          + "3,'Fred', 15.00,"
+          + "'marketing')");
+      conn.commit();
+    } catch (SQLException sqlE) {
+      LOG.error("Encountered SQL Exception: ", sqlE);
+      sqlE.printStackTrace();
+      fail("SQLException when running test setUp(): " + sqlE);
+    } finally {
+      try {
+        if (null != stmt) {
+          stmt.close();
+        }
+      } catch (Exception ex) {
+        LOG.warn("Exception while closing connection/stmt", ex);
+      }
+    }
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      Statement stmt = conn.createStatement();
+      stmt.executeUpdate(getDropTableStatement(SCHEMA_DBO, DBO_TABLE_NAME));
+      stmt.executeUpdate(getDropTableStatement(SCHEMA_SCH, SCH_TABLE_NAME));
+    } catch (SQLException e) {
+      LOG.error("Can't clean up the database:", e);
+    }
+
+    super.tearDown();
+    try {
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  @Test
+  public void testImportSimple() throws IOException {
+    String [] expectedResults = {
+      "1,Aaron,1000000.0,engineering",
+      "2,Bob,400.0,sales",
+      "3,Fred,15.0,marketing",
+    };
+
+    doImportAndVerify(DBO_TABLE_NAME, expectedResults);
+  }
+
+  @Test
+  public void testImportExplicitDefaultSchema() throws IOException {
+    String [] expectedResults = {
+      "1,Aaron,1000000.0,engineering",
+      "2,Bob,400.0,sales",
+      "3,Fred,15.0,marketing",
+    };
+
+    String[] extraArgs = new String[] {"--schema", SCHEMA_DBO};
+
+    doImportAndVerify(DBO_TABLE_NAME, expectedResults, extraArgs);
+  }
+
+  @Test
+  public void testImportDifferentSchema() throws IOException {
+    String [] expectedResults = {
+      "1,Aaron,1000000.0,engineering",
+      "2,Bob,400.0,sales",
+      "3,Fred,15.0,marketing",
+    };
+
+    String[] extraArgs = new String[] {"--schema", SCHEMA_SCH};
+
+    doImportAndVerify(SCH_TABLE_NAME, expectedResults, extraArgs);
+  }
+
+  @Test
+  public void testImportTableHints() throws IOException {
+    String [] expectedResults = {
+      "1,Aaron,1000000.0,engineering",
+      "2,Bob,400.0,sales",
+      "3,Fred,15.0,marketing",
+    };
+
+    String[] extraArgs = new String[] {"--table-hints", "NOLOCK"};
+    doImportAndVerify(DBO_TABLE_NAME, expectedResults, extraArgs);
+  }
+
+  @Test
+  public void testImportTableHintsMultiple() throws IOException {
+    String [] expectedResults = {
+      "1,Aaron,1000000.0,engineering",
+      "2,Bob,400.0,sales",
+      "3,Fred,15.0,marketing",
+    };
+
+    String[] extraArgs = new String[] {"--table-hints", "NOLOCK,NOWAIT"};
+    doImportAndVerify(DBO_TABLE_NAME, expectedResults, extraArgs);
+  }
+
+  private String [] getArgv(String tableName, String ... extraArgs) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    CommonArgs.addHadoopFlags(args);
+
+    args.add("--table");
+    args.add(tableName);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(CONNECT_STRING);
+    args.add("--username");
+    args.add(DATABASE_USER);
+    args.add("--password");
+    args.add(DATABASE_PASSWORD);
+    args.add("--num-mappers");
+    args.add("1");
+
+    if (extraArgs.length > 0) {
+      args.add("--");
+      for (String arg : extraArgs) {
+        args.add(arg);
+      }
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  private void doImportAndVerify(String tableName,
+                                 String [] expectedResults,
+                                 String ... extraArgs) throws IOException {
+
+    Path warehousePath = new Path(this.getWarehouseDir());
+    Path tablePath = new Path(warehousePath, tableName);
+    Path filePath = new Path(tablePath, "part-m-00000");
+
+    File tableFile = new File(tablePath.toString());
+    if (tableFile.exists() && tableFile.isDirectory()) {
+      // remove the directory before running the import.
+      FileListing.recursiveDeleteDir(tableFile);
+    }
+
+    String [] argv = getArgv(tableName, extraArgs);
+    try {
+      runImport(argv);
+    } catch (IOException ioe) {
+      LOG.error("Got IOException during import: " + ioe.toString());
+      ioe.printStackTrace();
+      fail(ioe.toString());
+    }
+
+    File f = new File(filePath.toString());
+    assertTrue("Could not find imported data file", f.exists());
+    BufferedReader r = null;
+    try {
+      // Read through the file and make sure it's all there.
+      r = new BufferedReader(new InputStreamReader(new FileInputStream(f)));
+      for (String expectedLine : expectedResults) {
+        assertEquals(expectedLine, r.readLine());
+      }
+    } catch (IOException ioe) {
+      LOG.error("Got IOException verifying results: " + ioe.toString());
+      ioe.printStackTrace();
+      fail(ioe.toString());
+    } finally {
+      IOUtils.closeStream(r);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java
deleted file mode 100644
index 539eeb3..0000000
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.manager.sqlserver;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
-
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.BufferedWriter;
-
-/**
- * Test to export delimited file to SQL Server.
- *
- * This uses JDBC to export data to an SQLServer database from HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerDatatypeExportDelimitedFileManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerDatatypeExportDelimitedFileManualTest
-    extends ManagerCompatExport {
-
-  @Override
-  public void createFile(DATATYPES dt, String[] data) throws IOException {
-    Path tablePath = getTablePath(dt);
-    Path filePath = new Path(tablePath, "part0000");
-
-    Configuration conf = new Configuration();
-    String hdfsroot;
-    hdfsroot = System.getProperty("ms.datatype.test.hdfsprefix");
-    if (hdfsroot == null) {
-      hdfsroot = "hdfs://localhost/";
-    }
-    conf.set("fs.default.name", hdfsroot);
-    FileSystem fs = FileSystem.get(conf);
-    fs.mkdirs(tablePath);
-    System.out.println("-----------------------------------Path : "
-        + filePath);
-    OutputStream os = fs.create(filePath);
-
-    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
-    for (int i = 0; i < data.length; i++) {
-      w.write(data[i] + "\n");
-    }
-    w.close();
-    os.close();
-  }
-
-  @Override
-  public void createFile(DATATYPES dt, String data) throws IOException {
-    createFile(dt, new String[] { data });
-  }
-
-  @Override
-  public String getOutputFileName() {
-    return "ManagerCompatExportDelim.txt";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileTest.java
new file mode 100644
index 0000000..9b70af1
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileTest.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.BufferedWriter;
+
+/**
+ * Test to export delimited file to SQL Server.
+ *
+ * This uses JDBC to export data to an SQLServer database from HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerDatatypeExportDelimitedFileTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerDatatypeExportDelimitedFileTest
+    extends ManagerCompatExport {
+
+  @Override
+  public void createFile(DATATYPES dt, String[] data) throws IOException {
+    Path tablePath = getTablePath(dt);
+    Path filePath = new Path(tablePath, "part0000");
+
+    Configuration conf = new Configuration();
+    String hdfsroot;
+    hdfsroot = System.getProperty("ms.datatype.test.hdfsprefix");
+    if (hdfsroot == null) {
+      hdfsroot = "hdfs://localhost/";
+    }
+    conf.set("fs.default.name", hdfsroot);
+    FileSystem fs = FileSystem.get(conf);
+    fs.mkdirs(tablePath);
+    System.out.println("-----------------------------------Path : "
+        + filePath);
+    OutputStream os = fs.create(filePath);
+
+    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
+    for (int i = 0; i < data.length; i++) {
+      w.write(data[i] + "\n");
+    }
+    w.close();
+    os.close();
+  }
+
+  @Override
+  public void createFile(DATATYPES dt, String data) throws IOException {
+    createFile(dt, new String[] { data });
+  }
+
+  @Override
+  public String getOutputFileName() {
+    return "ManagerCompatExportDelim.txt";
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
deleted file mode 100644
index 0f206d0..0000000
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.manager.sqlserver;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.tool.CodeGenTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-/**
- * Test to export sequence file to SQL Server.
- *
- * This uses JDBC to export data to an SQLServer database from HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerDatatypeExportSequenceFileManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerDatatypeExportSequenceFileManualTest
-    extends ManagerCompatExport {
-
-  private static Map jars = new HashMap();
-
-  @Override
-  public void createFile(DATATYPES dt, String[] data) throws Exception {
-    try {
-      codeGen(dt);
-      // Instantiate the value record object via reflection.
-      Class cls = Class.forName(getTableName(dt), true, Thread
-        .currentThread().getContextClassLoader());
-      SqoopRecord record = (SqoopRecord) ReflectionUtils.newInstance(cls,
-        new Configuration());
-
-      // Create the SequenceFile.
-      Configuration conf = new Configuration();
-      String hdfsroot;
-      hdfsroot = System.getProperty("ms.datatype.test.hdfsprefix");
-      if (hdfsroot == null){
-        hdfsroot ="hdfs://localhost/";
-      }
-      conf.set("fs.default.name", hdfsroot);
-      FileSystem fs = FileSystem.get(conf);
-      Path tablePath = getTablePath(dt);
-      Path filePath = new Path(tablePath, getTableName(dt));
-      fs.mkdirs(tablePath);
-      SequenceFile.Writer w = SequenceFile.createWriter(fs, conf,
-        filePath, LongWritable.class, cls);
-
-      int cnt = 0;
-      for (String tmp : data) {
-        record.parse(tmp + "\n");
-        w.append(new LongWritable(cnt), record);
-      }
-
-      w.close();
-    } catch (ClassNotFoundException cnfe) {
-     throw new IOException(cnfe);
-    } catch (RecordParser.ParseError pe) {
-     throw new IOException(pe);
-    }
-  }
-
-  @Override
-  public void createFile(DATATYPES dt, String data) throws Exception {
-    createFile(dt, new String[] { data });
-  }
-
-  public String[] codeGen(DATATYPES dt) throws Exception {
-
-    CodeGenTool codeGen = new CodeGenTool();
-
-    String[] codeGenArgs = getCodeGenArgv(dt);
-    SqoopOptions options = codeGen.parseArguments(codeGenArgs, null, null,
-      true);
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-
-    options.setUsername(username);
-    options.setPassword(password);
-    codeGen.validateOptions(options);
-
-    int ret = codeGen.run(options);
-    assertEquals(0, ret);
-    List<String> generatedJars = codeGen.getGeneratedJarFiles();
-
-    assertNotNull(generatedJars);
-    assertEquals("Expected 1 generated jar file", 1, generatedJars.size());
-    String jarFileName = generatedJars.get(0);
-    // Sqoop generates jars named "foo.jar"; by default, this should contain
-    // a class named 'foo'. Extract the class name.
-    Path jarPath = new Path(jarFileName);
-    String jarBaseName = jarPath.getName();
-    assertTrue(jarBaseName.endsWith(".jar"));
-    assertTrue(jarBaseName.length() > ".jar".length());
-    String className = jarBaseName.substring(0, jarBaseName.length()
-      - ".jar".length());
-
-    LOG.info("Using jar filename: " + jarFileName);
-    LOG.info("Using class name: " + className);
-
-    ClassLoader prevClassLoader = null;
-
-
-    if (null != jarFileName) {
-    prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-      className);
-    System.out.println("Jar,class =" + jarFileName + " , "
-      + className);
-    }
-
-    // Now run and verify the export.
-    LOG.info("Exporting SequenceFile-based data");
-    jars.put(dt, jarFileName);
-    return (getArgv(dt, "--class-name", className, "--jar-file",
-     jarFileName));
-  }
-
-  @Override
-  protected String[] getArgv(DATATYPES dt) {
-
-    String[] args = super.getArgv(dt);
-    String[] addtionalArgs = Arrays.copyOf(args, args.length + 4);
-
-    String[] additional = new String[4];
-    additional[0] = "--class-name";
-    additional[1] = getTableName(dt);
-    additional[2] = "--jar-file";
-    additional[3] = jars.get(dt).toString();
-    for (int i = args.length, j = 0; i < addtionalArgs.length; i++, j++) {
-     addtionalArgs[i] = additional[j];
-    }
-
-    for (String a : addtionalArgs) {
-     System.out.println(a);
-    }
-    return addtionalArgs;
-  }
-
-  /**
-  * @return an argv for the CodeGenTool to use when creating tables to
-  *         export.
-  */
-  protected String[] getCodeGenArgv(DATATYPES dt) {
-    List<String> codeGenArgv = new ArrayList<String>();
-
-    codeGenArgv.add("--table");
-    codeGenArgv.add(getTableName(dt));
-    codeGenArgv.add("--connect");
-    codeGenArgv.add(MSSQLTestUtils.getDBConnectString());
-    codeGenArgv.add("--fields-terminated-by");
-    codeGenArgv.add("\\t");
-    codeGenArgv.add("--lines-terminated-by");
-    codeGenArgv.add("\\n");
-
-    return codeGenArgv.toArray(new String[0]);
-  }
-
-  protected String[] getArgv(DATATYPES dt, String... additionalArgv) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    // Any additional Hadoop flags (-D foo=bar) are prepended.
-    if (null != additionalArgv) {
-      boolean prevIsFlag = false;
-      for (String arg : additionalArgv) {
-        if (arg.equals("-D")) {
-          args.add(arg);
-          prevIsFlag = true;
-        } else if (prevIsFlag) {
-          args.add(arg);
-          prevIsFlag = false;
-        }
-      }
-    }
-
-    // The sqoop-specific additional args are then added.
-    if (null != additionalArgv) {
-      boolean prevIsFlag = false;
-      for (String arg : additionalArgv) {
-        if (arg.equals("-D")) {
-          prevIsFlag = true;
-          continue;
-        } else if (prevIsFlag) {
-          prevIsFlag = false;
-          continue;
-        } else {
-         // normal argument.
-          args.add(arg);
-        }
-      }
-    }
-
-    args.add("--table");
-    args.add(getTableName(dt));
-    args.add("--export-dir");
-    args.add(getTablePath(dt).toString());
-    args.add("--connect");
-    args.add(MSSQLTestUtils.getDBConnectString());
-    args.add("--fields-terminated-by");
-    args.add("\\t");
-    args.add("--lines-terminated-by");
-    args.add("\\n");
-    args.add("-m");
-    args.add("1");
-
-    LOG.debug("args:");
-    for (String a : args) {
-     LOG.debug("  " + a);
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  public String getOutputFileName() {
-    return "ManagerCompatExportSeq.txt";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java
new file mode 100644
index 0000000..a68ed30
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java
@@ -0,0 +1,264 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.lib.RecordParser;
+import com.cloudera.sqoop.lib.SqoopRecord;
+import com.cloudera.sqoop.tool.CodeGenTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test to export sequence file to SQL Server.
+ *
+ * This uses JDBC to export data to an SQLServer database from HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerDatatypeExportSequenceFileTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerDatatypeExportSequenceFileTest
+    extends ManagerCompatExport {
+
+  private static Map jars = new HashMap();
+
+  @Override
+  public void createFile(DATATYPES dt, String[] data) throws Exception {
+    try {
+      codeGen(dt);
+      // Instantiate the value record object via reflection.
+      Class cls = Class.forName(getTableName(dt), true, Thread
+        .currentThread().getContextClassLoader());
+      SqoopRecord record = (SqoopRecord) ReflectionUtils.newInstance(cls,
+        new Configuration());
+
+      // Create the SequenceFile.
+      Configuration conf = new Configuration();
+      String hdfsroot;
+      hdfsroot = System.getProperty("ms.datatype.test.hdfsprefix");
+      if (hdfsroot == null){
+        hdfsroot ="hdfs://localhost/";
+      }
+      conf.set("fs.default.name", hdfsroot);
+      FileSystem fs = FileSystem.get(conf);
+      Path tablePath = getTablePath(dt);
+      Path filePath = new Path(tablePath, getTableName(dt));
+      fs.mkdirs(tablePath);
+      SequenceFile.Writer w = SequenceFile.createWriter(fs, conf,
+        filePath, LongWritable.class, cls);
+
+      int cnt = 0;
+      for (String tmp : data) {
+        record.parse(tmp + "\n");
+        w.append(new LongWritable(cnt), record);
+      }
+
+      w.close();
+    } catch (ClassNotFoundException cnfe) {
+     throw new IOException(cnfe);
+    } catch (RecordParser.ParseError pe) {
+     throw new IOException(pe);
+    }
+  }
+
+  @Override
+  public void createFile(DATATYPES dt, String data) throws Exception {
+    createFile(dt, new String[] { data });
+  }
+
+  public String[] codeGen(DATATYPES dt) throws Exception {
+
+    CodeGenTool codeGen = new CodeGenTool();
+
+    String[] codeGenArgs = getCodeGenArgv(dt);
+    SqoopOptions options = codeGen.parseArguments(codeGenArgs, null, null,
+      true);
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+
+    options.setUsername(username);
+    options.setPassword(password);
+    codeGen.validateOptions(options);
+
+    int ret = codeGen.run(options);
+    assertEquals(0, ret);
+    List<String> generatedJars = codeGen.getGeneratedJarFiles();
+
+    assertNotNull(generatedJars);
+    assertEquals("Expected 1 generated jar file", 1, generatedJars.size());
+    String jarFileName = generatedJars.get(0);
+    // Sqoop generates jars named "foo.jar"; by default, this should contain
+    // a class named 'foo'. Extract the class name.
+    Path jarPath = new Path(jarFileName);
+    String jarBaseName = jarPath.getName();
+    assertTrue(jarBaseName.endsWith(".jar"));
+    assertTrue(jarBaseName.length() > ".jar".length());
+    String className = jarBaseName.substring(0, jarBaseName.length()
+      - ".jar".length());
+
+    LOG.info("Using jar filename: " + jarFileName);
+    LOG.info("Using class name: " + className);
+
+    ClassLoader prevClassLoader = null;
+
+
+    if (null != jarFileName) {
+    prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+      className);
+    System.out.println("Jar,class =" + jarFileName + " , "
+      + className);
+    }
+
+    // Now run and verify the export.
+    LOG.info("Exporting SequenceFile-based data");
+    jars.put(dt, jarFileName);
+    return (getArgv(dt, "--class-name", className, "--jar-file",
+     jarFileName));
+  }
+
+  @Override
+  protected String[] getArgv(DATATYPES dt) {
+
+    String[] args = super.getArgv(dt);
+    String[] addtionalArgs = Arrays.copyOf(args, args.length + 4);
+
+    String[] additional = new String[4];
+    additional[0] = "--class-name";
+    additional[1] = getTableName(dt);
+    additional[2] = "--jar-file";
+    additional[3] = jars.get(dt).toString();
+    for (int i = args.length, j = 0; i < addtionalArgs.length; i++, j++) {
+     addtionalArgs[i] = additional[j];
+    }
+
+    for (String a : addtionalArgs) {
+     System.out.println(a);
+    }
+    return addtionalArgs;
+  }
+
+  /**
+  * @return an argv for the CodeGenTool to use when creating tables to
+  *         export.
+  */
+  protected String[] getCodeGenArgv(DATATYPES dt) {
+    List<String> codeGenArgv = new ArrayList<String>();
+
+    codeGenArgv.add("--table");
+    codeGenArgv.add(getTableName(dt));
+    codeGenArgv.add("--connect");
+    codeGenArgv.add(MSSQLTestUtils.getDBConnectString());
+    codeGenArgv.add("--fields-terminated-by");
+    codeGenArgv.add("\\t");
+    codeGenArgv.add("--lines-terminated-by");
+    codeGenArgv.add("\\n");
+
+    return codeGenArgv.toArray(new String[0]);
+  }
+
+  protected String[] getArgv(DATATYPES dt, String... additionalArgv) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    // Any additional Hadoop flags (-D foo=bar) are prepended.
+    if (null != additionalArgv) {
+      boolean prevIsFlag = false;
+      for (String arg : additionalArgv) {
+        if (arg.equals("-D")) {
+          args.add(arg);
+          prevIsFlag = true;
+        } else if (prevIsFlag) {
+          args.add(arg);
+          prevIsFlag = false;
+        }
+      }
+    }
+
+    // The sqoop-specific additional args are then added.
+    if (null != additionalArgv) {
+      boolean prevIsFlag = false;
+      for (String arg : additionalArgv) {
+        if (arg.equals("-D")) {
+          prevIsFlag = true;
+          continue;
+        } else if (prevIsFlag) {
+          prevIsFlag = false;
+          continue;
+        } else {
+         // normal argument.
+          args.add(arg);
+        }
+      }
+    }
+
+    args.add("--table");
+    args.add(getTableName(dt));
+    args.add("--export-dir");
+    args.add(getTablePath(dt).toString());
+    args.add("--connect");
+    args.add(MSSQLTestUtils.getDBConnectString());
+    args.add("--fields-terminated-by");
+    args.add("\\t");
+    args.add("--lines-terminated-by");
+    args.add("\\n");
+    args.add("-m");
+    args.add("1");
+
+    LOG.debug("args:");
+    for (String a : args) {
+     LOG.debug("  " + a);
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  public String getOutputFileName() {
+    return "ManagerCompatExportSeq.txt";
+  }
+
+}


Mime
View raw message