sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jar...@apache.org
Subject [2/2] git commit: SQOOP-1035: Add MS Sqoop Connecter tests
Date Tue, 18 Jun 2013 22:31:10 GMT
SQOOP-1035: Add MS Sqoop Connecter tests

(Shuaishuai Nie via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/64878c64
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/64878c64
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/64878c64

Branch: refs/heads/trunk
Commit: 64878c643e5c9cef77e6c74b8392df17b72e7096
Parents: 7a2079e
Author: Jarek Jarcec Cecho <jarcec@apache.org>
Authored: Tue Jun 18 15:30:24 2013 -0700
Committer: Jarek Jarcec Cecho <jarcec@apache.org>
Committed: Tue Jun 18 15:30:24 2013 -0700

----------------------------------------------------------------------
 build.xml                                       |  15 +
 .../com/cloudera/sqoop/ThirdPartyTests.java     |  26 +
 .../com/cloudera/sqoop/hive/TestHiveImport.java |  20 +-
 .../sqoop/manager/sqlserver/MSSQLTestData.java  | 103 +++
 .../sqlserver/MSSQLTestDataFileParser.java      | 158 ++++
 .../sqoop/manager/sqlserver/MSSQLTestUtils.java | 183 +++++
 .../manager/sqlserver/ManagerCompatExport.java  | 685 ++++++++++++++++
 ...erDatatypeExportDelimitedFileManualTest.java |  71 ++
 ...verDatatypeExportSequenceFileManualTest.java | 255 ++++++
 ...erDatatypeImportDelimitedFileManualTest.java | 240 ++++++
 ...verDatatypeImportSequenceFileManualTest.java | 803 +++++++++++++++++++
 .../SQLServerHiveImportManualTest.java          | 162 ++++
 .../sqlserver/SQLServerManagerManualTest.java   | 348 ++++++++
 .../sqlserver/SQLServerMultiColsManualTest.java | 103 +++
 .../sqlserver/SQLServerMultiMapsManualTest.java | 299 +++++++
 .../SQLServerParseMethodsManualTest.java        | 252 ++++++
 .../sqlserver/SQLServerQueryManualTest.java     | 278 +++++++
 .../sqlserver/SQLServerSplitByManualTest.java   | 247 ++++++
 .../sqlserver/SQLServerWhereManualTest.java     | 270 +++++++
 testdata/DatatypeTestData-export-lite.txt       |  17 +
 testdata/DatatypeTestData-import-lite.txt       |  20 +
 21 files changed, 4548 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index ffc878e..ef657e3 100644
--- a/build.xml
+++ b/build.xml
@@ -262,6 +262,12 @@
 
   <property name="java.security.krb5.kdc"
             value="kdc0.ox.ac.uk:kdc1.ox.ac.uk"/>
+  <property name="ms.sqlserver.username"
+            value="SQOOPUSER"/>
+
+  <property name="ms.sqlserver.password"
+            value="PASSWORD"/>
+
 
   <condition property="windows">
     <os family="windows" />
@@ -744,6 +750,15 @@
       <sysproperty key="test.build.data" value="${build.test}/data"/>
       <sysproperty key="build.test" value="${build.test}"/>
 
+      <!-- microsoft sqlserver manual test related properties-->
+      <sysproperty key="test.data.dir" value="${basedir}/testdata"/>
+      <sysproperty key="ms.datatype.test.data.file.export" value="DatatypeTestData-export-lite.txt"/>
+      <sysproperty key="ms.datatype.test.data.file.import" value="DatatypeTestData-import-lite.txt"/>
+      <sysproperty key="ms.datatype.test.data.file.delim" value=","/>
+      <sysproperty key="ms.datatype.test.hdfsprefix" value="file:///"/>
+      <sysproperty key="ms.sqlserver.username" value="${ms.sqlserver.username}"/>
+      <sysproperty key="ms.sqlserver.password" value="${ms.sqlserver.password}"/>
+
       <sysproperty key="net.sourceforge.cobertura.datafile"
           value="${cobertura.dir}/cobertura-${cobertura.testset}.ser" />
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/com/cloudera/sqoop/ThirdPartyTests.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/ThirdPartyTests.java b/src/test/com/cloudera/sqoop/ThirdPartyTests.java
index 7b1fa26..7fae052 100644
--- a/src/test/com/cloudera/sqoop/ThirdPartyTests.java
+++ b/src/test/com/cloudera/sqoop/ThirdPartyTests.java
@@ -41,6 +41,18 @@ import com.cloudera.sqoop.manager.OracleManagerTest;
 import com.cloudera.sqoop.manager.OracleCompatTest;
 import com.cloudera.sqoop.manager.PostgresqlExportTest;
 import com.cloudera.sqoop.manager.PostgresqlImportTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeExportDelimitedFileManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeExportSequenceFileManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeImportDelimitedFileManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeImportSequenceFileManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerHiveImportManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerManagerManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerMultiColsManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerMultiMapsManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerParseMethodsManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerQueryManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerSplitByManualTest;
+import org.apache.sqoop.manager.sqlserver.SQLServerWhereManualTest;
 
 /**
  * Test battery including all tests of vendor-specific ConnManager
@@ -68,6 +80,20 @@ public final class ThirdPartyTests extends TestCase {
     suite.addTestSuite(OracleManagerTest.class);
     suite.addTestSuite(OracleCompatTest.class);
 
+    // SQL Server
+    suite.addTestSuite(SQLServerDatatypeExportDelimitedFileManualTest.class);
+    suite.addTestSuite(SQLServerDatatypeExportSequenceFileManualTest.class);
+    suite.addTestSuite(SQLServerDatatypeImportDelimitedFileManualTest.class);
+    suite.addTestSuite(SQLServerDatatypeImportSequenceFileManualTest.class);
+    suite.addTestSuite(SQLServerHiveImportManualTest.class);
+    suite.addTestSuite(SQLServerManagerManualTest.class);
+    suite.addTestSuite(SQLServerMultiColsManualTest.class);
+    suite.addTestSuite(SQLServerMultiMapsManualTest.class);
+    suite.addTestSuite(SQLServerParseMethodsManualTest.class);
+    suite.addTestSuite(SQLServerQueryManualTest.class);
+    suite.addTestSuite(SQLServerSplitByManualTest.class);
+    suite.addTestSuite(SQLServerWhereManualTest.class);
+
     // PostgreSQL
     suite.addTestSuite(PostgresqlImportTest.class);
     suite.addTestSuite(PostgresqlExportTest.class);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index 9c47bad..9c2a91c 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -20,6 +20,7 @@ package com.cloudera.sqoop.hive;
 
 import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.FileReader;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -67,7 +68,7 @@ public class TestHiveImport extends ImportJobTestCase {
    * to DATA_COLi for 0 &lt;= i &lt; numCols.
    * @param numCols the number of columns to be created.
    */
-  private void setNumCols(int numCols) {
+  protected void setNumCols(int numCols) {
     String [] cols = new String[numCols];
     for (int i = 0; i < numCols; i++) {
       cols[i] = "DATA_COL" + i;
@@ -76,6 +77,11 @@ public class TestHiveImport extends ImportJobTestCase {
     setColNames(cols);
   }
 
+  protected String[] getTypesNewLineTest() {
+    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    return types;
+  }
+
   /**
    * Create the argv to pass to Sqoop.
    * @return the argv as an array of strings.
@@ -98,7 +104,7 @@ public class TestHiveImport extends ImportJobTestCase {
     args.add("--warehouse-dir");
     args.add(getWarehouseDir());
     args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
+    args.add(getConnectString());
     args.add("--hive-import");
     String [] colNames = getColNames();
     if (null != colNames) {
@@ -135,7 +141,7 @@ public class TestHiveImport extends ImportJobTestCase {
     args.add("--table");
     args.add(getTableName());
     args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
+    args.add(getConnectString());
 
     return args.toArray(new String[0]);
   }
@@ -149,7 +155,7 @@ public class TestHiveImport extends ImportJobTestCase {
     args.add("--table");
     args.add(getTableName());
     args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
+    args.add(getConnectString());
     args.add("--hive-import");
 
     return args.toArray(new String[0]);
@@ -164,7 +170,7 @@ public class TestHiveImport extends ImportJobTestCase {
     args.add("--table");
     args.add(getTableName());
     args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
+    args.add(getConnectString());
 
     if (null != extraArgs) {
       for (String arg : extraArgs) {
@@ -357,7 +363,7 @@ public class TestHiveImport extends ImportJobTestCase {
     LOG.info("Doing import of single row into FIELD_WITH_NL_HIVE_IMPORT table");
     setCurTableName(TABLE_NAME);
     setNumCols(3);
-    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String[] types = getTypesNewLineTest();
     String[] vals = { "'test with \n new lines \n'", "42",
         "'oh no " + '\01' + " field delims " + '\01' + "'", };
     String[] moreArgs = { "--"+ BaseSqoopTool.HIVE_DROP_DELIMS_ARG };
@@ -406,7 +412,7 @@ public class TestHiveImport extends ImportJobTestCase {
         + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
     setCurTableName(TABLE_NAME);
     setNumCols(3);
-    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String[] types = getTypesNewLineTest();
     String[] vals = { "'test with\nnew lines\n'", "42",
         "'oh no " + '\01' + " field delims " + '\01' + "'", };
     String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " "};

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestData.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestData.java b/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestData.java
new file mode 100644
index 0000000..02f06e4
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestData.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.util.HashMap;
+import java.util.Map;
+/**
+ * Utilities wrapper class to store properties for
+ * a value of a specific data type used for test
+ * The properties are enumerated in the KEY_STRING
+ */
+public class MSSQLTestData implements Comparable {
+
+  MSSQLTestData(String datatypename) {
+    this.datatype = datatypename;
+    this.data = new HashMap();
+  }
+
+  private MSSQLTestData() {
+
+  }
+
+  private String datatype;
+
+  public String getDatatype() {
+    return datatype;
+  }
+
+  public void setDatatype(String datatype1) {
+    this.datatype = datatype1;
+  }
+
+  //SCALE: scale of the data
+  //PREC: precision of the data
+  //TO_INSERT: value of the data type
+  //DB_READBACK: expected value read from database
+  //HDFS_READBACK: expected value read from HDFS
+  //NEG_POS_FLAG: mark if the test on the data type is expected to fail
+  //OFFSET: line offset of the data in the input file
+  enum KEY_STRINGS {
+    SCALE, PREC, TO_INSERT, DB_READBACK, HDFS_READBACK, NEG_POS_FLAG, OFFSET,
+  }
+
+  private Map data;
+
+  public String getData(KEY_STRINGS ks) {
+    String ret;
+    try {
+      ret = data.get(ks).toString();
+    } catch (Exception e) {
+      return null;
+    }
+    return ret;
+  }
+
+  public void setData(KEY_STRINGS ks, String value) {
+    this.data.put(ks, value);
+  }
+
+  public String toString() {
+    String tmp = this.datatype;
+
+    for (KEY_STRINGS a : KEY_STRINGS.values()) {
+
+      tmp += "\n" + a.toString() + " : " + this.getData(a);
+    }
+    return tmp;
+  }
+
+  public int hashCode() {
+    return super.hashCode();
+  }
+
+  public boolean equals(Object o) {
+    int c1 = Integer.getInteger(this.getData(KEY_STRINGS.OFFSET));
+    int c2 = Integer.getInteger(((MSSQLTestData) o)
+        .getData(KEY_STRINGS.OFFSET));
+    return (c1 == c2);
+  }
+
+  public int compareTo(Object o) {
+    int c1 = Integer.getInteger(this.getData(KEY_STRINGS.OFFSET));
+    int c2 = Integer.getInteger(((MSSQLTestData) o)
+        .getData(KEY_STRINGS.OFFSET));
+    return (c1 - c2);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestDataFileParser.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestDataFileParser.java b/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestDataFileParser.java
new file mode 100644
index 0000000..db7dcc2
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestDataFileParser.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestData.KEY_STRINGS;
+
+
+/**
+* Class to parse sql server data types
+*/
+public class MSSQLTestDataFileParser {
+
+  public static final Log LOG = LogFactory.getLog(
+      MSSQLTestDataFileParser.class.getName());
+
+  private String filename;
+  private String delim;
+  private List records;
+
+  MSSQLTestDataFileParser(String filename) throws Exception {
+    this.filename = filename;
+
+  }
+
+  enum DATATYPES {
+    DECIMAL, NUMERIC, VARBINARY, TIME, SMALLDATETIME, DATETIME, DATETIME2,
+    DATETIMEOFFSET, BIGINT, INT, MONEY, SMALLMONEY, TEXT, NTEXT, NCHAR,
+    NVARCHAR, IMAGE, SMALLINT, FLOAT, REAL, DATE, CHAR, VARCHAR, BINARY,
+    TINYINT;
+
+  }
+
+  public void parse() throws Exception {
+    if (this.filename == null) {
+      throw new Exception("No test data file specified.");
+    }
+
+    BufferedReader br = new BufferedReader(new FileReader(this.filename));
+
+    if (br != null) {
+      records = new ArrayList();
+
+      String tmp;
+      String del = this.getDelim();
+      int offset = 0;
+      while ((tmp = br.readLine()) != null) {
+        offset++;
+        String[] splits = tmp.split(del);
+
+        if (splits.length == 5 || splits.length == 6
+            || splits.length == 7) {
+          System.out.println(Integer.toString(offset));
+          MSSQLTestData td = new MSSQLTestData(splits[0]);
+          td.setData(KEY_STRINGS.OFFSET, Integer.toString(offset));
+
+          if (splits[0].equals(DATATYPES.DECIMAL.toString())
+              || splits[0].equals(DATATYPES.NUMERIC.toString())) {
+
+            td.setData(KEY_STRINGS.TO_INSERT, splits[1]);
+            td.setData(KEY_STRINGS.DB_READBACK, splits[2]);
+            td.setData(KEY_STRINGS.HDFS_READBACK, splits[3]);
+            td.setData(KEY_STRINGS.SCALE, splits[4]);
+            td.setData(KEY_STRINGS.PREC, splits[5]);
+            td.setData(KEY_STRINGS.NEG_POS_FLAG, splits[6]);
+
+            records.add(td);
+          } else if (splits[0].equals(DATATYPES.NCHAR.toString())
+              || splits[0].equals(DATATYPES.VARBINARY.toString())
+              || splits[0].equals(DATATYPES.NVARCHAR.toString())
+              || splits[0].equals(DATATYPES.CHAR.toString())
+              || splits[0].equals(DATATYPES.VARCHAR.toString())
+              || splits[0].equals(DATATYPES.BINARY.toString())) {
+
+            td.setData(KEY_STRINGS.TO_INSERT, splits[1]);
+            td.setData(KEY_STRINGS.DB_READBACK, splits[2]);
+            td.setData(KEY_STRINGS.HDFS_READBACK, splits[3]);
+            td.setData(KEY_STRINGS.SCALE, splits[4]);
+            td.setData(KEY_STRINGS.NEG_POS_FLAG, splits[5]);
+
+            records.add(td);
+
+          } else {
+            td.setData(KEY_STRINGS.TO_INSERT, splits[1]);
+            td.setData(KEY_STRINGS.DB_READBACK, splits[2]);
+            td.setData(KEY_STRINGS.HDFS_READBACK, splits[3]);
+            td.setData(KEY_STRINGS.NEG_POS_FLAG, splits[4]);
+
+            records.add(td);
+          }
+
+        }
+
+      }
+      System.out.println("\n\n Records" + records.size() + "\n\n");
+    }
+
+  }
+
+  public List getRecords() {
+    return records;
+  }
+
+  public List getTestdata(DATATYPES dt) {
+    List l;
+    l = new ArrayList();
+
+    if (records != null) {
+      for (Iterator<MSSQLTestData> i = records.iterator(); i.hasNext();) {
+        MSSQLTestData tmp = i.next();
+        if (tmp.getDatatype().equals(dt.toString())) {
+          l.add(tmp);
+        }
+      }
+    }
+
+    return l;
+  }
+
+  private void trim(String[] strings) {
+    for (int i = 0; i < strings.length; i++) {
+      strings[i] = strings[i].trim();
+    }
+
+  }
+
+  public String getDelim() {
+    return delim;
+  }
+
+  public void setDelim(String delim1) {
+    this.delim = delim1;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestUtils.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestUtils.java b/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestUtils.java
new file mode 100644
index 0000000..851bf49
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/MSSQLTestUtils.java
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+* Test utilities for SQL Server manual tests.
+*/
+public class MSSQLTestUtils {
+
+  public static final Log LOG = LogFactory.getLog(
+      MSSQLTestUtils.class.getName());
+
+  static final String DATABASE_USER = System.getProperty(
+          "ms.sqlserver.username", "SQOOPUSER");
+  static final String DATABASE_PASSWORD = System.getProperty(
+          "ms.sqlserver.password", "PASSWORD");
+  public static final String HOST_URL = System.getProperty(
+          "sqoop.test.sqlserver.connectstring.host_url",
+          "jdbc:sqlserver://sqlserverhost:1433");
+
+  public static final String CREATE_TALBE_LINEITEM
+    = "CREATE TABLE TPCH1M_LINEITEM"
+    + "( [L_ORDERKEY] [int] NULL, [L_PARTKEY] "
+    + "[int] NULL, [L_SUPPKEY] [int] NULL, [L_LINENUMBER] [int] NULL, "
+    + "[L_QUANTITY] [int] NULL, [L_EXTENDEDPRICE] [decimal](15, 2) NULL, "
+    + "[L_DISCOUNT] [decimal](15, 2) NULL, [L_TAX] [decimal](15, 2) NULL,"
+    + " [L_RETURNFLAG] [varchar](max) NULL, [L_LINESTATUS] [varchar](max)"
+    + " NULL, [L_SHIPDATE] [varchar](max) NULL, [L_COMMITDATE] [varchar](max)"
+    + " NULL, [L_RECEIPTDATE] [varchar](max) NULL, [L_SHIPINSTRUCT] [varchar]"
+    + "(max) NULL, [L_SHIPMODE] [varchar](max) NULL, [L_COMMENT] [varchar]"
+    + "(max) NULL) ";
+
+  private Connection conn = null;
+
+  private Connection getConnection() {
+
+    if (conn == null) {
+
+      try {
+        Connection con = DriverManager.getConnection(HOST_URL,
+            DATABASE_USER, DATABASE_PASSWORD);
+        conn = con;
+        return con;
+      } catch (SQLException e) {
+        LOG.error("Get SQLException during setting up connection: " + StringUtils.stringifyException(e));
+        return null;
+      }
+    }
+
+    return conn;
+  }
+
+  public void createTableFromSQL(String sql) throws SQLException {
+    Connection dbcon = this.getConnection();
+
+    System.out.println("SQL : " + sql);
+    this.dropTableIfExists("TPCH1M_LINEITEM");
+
+    try {
+      Statement st = dbcon.createStatement();
+      int res = st.executeUpdate(sql);
+      System.out.println("Result : " + res);
+
+    } catch (SQLException e) {
+      LOG.error("Got SQLException during creating table: " + StringUtils.stringifyException(e));
+    }
+
+  }
+
+  public void populateLineItem() {
+    String sql = "insert into tpch1m_lineitem values (1,2,3,4,5,6,7,8,'AB',"
+        + "'CD','abcd','efgh','hijk','dothis','likethis','nocomments')";
+    String sql2 = "insert into tpch1m_lineitem values (2,3,4,5,6,7,8,9,'AB'"
+        + ",'CD','abcd','efgh','hijk','dothis','likethis','nocomments')";
+    String sql3 = "insert into tpch1m_lineitem values (3,4,5,6,7,8,9,10,'AB',"
+        + "'CD','abcd','efgh','hijk','dothis','likethis','nocomments')";
+    String sql4 = "insert into tpch1m_lineitem values (4,5,6,7,8,9,10,11,'AB'"
+        + ",'CD','abcd','efgh','hijk','dothis','likethis','nocomments')";
+    Connection dbcon = this.getConnection();
+    Statement st;
+    try {
+      st = dbcon.createStatement();
+      st.addBatch(sql);
+      st.addBatch(sql2);
+      st.addBatch(sql3);
+      st.addBatch(sql4);
+      int[] res = st.executeBatch();
+
+      System.out.println(res);
+    } catch (SQLException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+
+  }
+
+  public void metadataStuff(String table) {
+    Connection dbcon = this.getConnection();
+    String sql = "select top 1 * from " + table;
+
+    Statement st;
+    try {
+
+      st = dbcon.createStatement();
+      ResultSet rs = st.executeQuery(sql);
+      ResultSetMetaData rsmd = rs.getMetaData();
+
+      for (int i = 1; i <= rsmd.getColumnCount(); i++) {
+        System.out.println(rsmd.getColumnName(i) + "\t"
+            + rsmd.getColumnClassName(i) + "\t"
+            + rsmd.getColumnType(i) + "\t"
+            + rsmd.getColumnTypeName(i) + "\n");
+      }
+
+    } catch (SQLException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+
+  }
+
+  public static String getDBUserName() {
+    return DATABASE_USER;
+  }
+
+  public static String getDBPassWord() {
+    return DATABASE_PASSWORD;
+  }
+
+  public void dropTableIfExists(String table) throws SQLException {
+    conn = getConnection();
+    System.out.println("Dropping table : " + table);
+    String sqlStmt = "IF OBJECT_ID('" + table
+        + "') IS NOT NULL  DROP TABLE " + table;
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  public static String[] getColumns() {
+    return new String[] { "L_ORDERKEY", "L_PARTKEY", "L_SUPPKEY",
+        "L_LINENUMBER", "L_QUANTITY", "L_EXTENDEDPRICE", "L_DISCOUNT",
+        "L_TAX", "L_RETURNFLAG", "L_LINESTATUS", "L_SHIPDATE",
+        "L_COMMITDATE", "L_RECEIPTDATE", "L_SHIPINSTRUCT",
+        "L_SHIPMODE", "L_COMMENT", };
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java b/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
new file mode 100644
index 0000000..73976a3
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
@@ -0,0 +1,685 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.FileWriter;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestData.KEY_STRINGS;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+import org.junit.Before;
+import org.junit.Test;
+import com.cloudera.sqoop.Sqoop;
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import com.cloudera.sqoop.tool.ExportTool;
+
+/**
+ * Test utilities for export to SQL Server.
+*/
+public abstract class ManagerCompatExport extends ExportJobTestCase {
+
+  private MSSQLTestDataFileParser tdfs;
+
+  public void createTable(DATATYPES dt) throws SQLException {
+    String tname = getTableName(dt);
+    String createTableSql = "CREATE TABLE " + tname + " ( " + getColName()
+        + " " + dt.toString() + " )";
+
+    dropTableIfExists(tname);
+
+    Connection conn = getManager().getConnection();
+    PreparedStatement statement = conn.prepareStatement(createTableSql,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    statement.executeUpdate();
+    conn.commit();
+    statement.close();
+
+  }
+
+  public void createTable(DATATYPES dt, MSSQLTestData td) throws Exception {
+    String tname = getTableName(dt);
+    String scale = td.getData(KEY_STRINGS.SCALE);
+    String precision = td.getData(KEY_STRINGS.PREC);
+    String createTableSql = "";
+    if (scale != null && precision != null) {
+      // this is decimal/numeric thing
+      createTableSql = "CREATE TABLE " + tname + " (" + getColName()
+          + " " + dt.toString() + "(" + scale + "," + precision
+          + ") )";
+    } else if (scale != null && precision == null) {
+      // this is decimal/numeric thing
+      createTableSql = "CREATE TABLE " + tname + " ( " + getColName()
+          + " " + dt.toString() + "(" + scale + ") )";
+    } else {
+      throw new Exception("Invalid data for create table");
+    }
+
+    dropTableIfExists(tname);
+
+    Connection conn = getManager().getConnection();
+    PreparedStatement statement = conn.prepareStatement(createTableSql,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    statement.executeUpdate();
+    conn.commit();
+    statement.close();
+
+  }
+
+  public String getColName() {
+    return "COL_1";
+  }
+
+  public String getTableName(DATATYPES dt) {
+    return "t_" + dt.toString();
+  }
+
+  public Path getTablePath(DATATYPES dt) {
+    Path warehousePath = new Path(getWarehouseDir());
+    Path tablePath = new Path(warehousePath, getTableName(dt));
+    return tablePath;
+
+  }
+
+  public abstract void createFile(DATATYPES dt, String[] data)
+      throws Exception;
+
+  public abstract void createFile(DATATYPES dt, String data) throws Exception;
+
+  public void testVarBinary() {
+
+    exportTestMethod(DATATYPES.VARBINARY);
+
+  }
+
+  public void testTime() {
+
+    exportTestMethod(DATATYPES.TIME);
+
+  }
+
+  @Test
+  public void testSmalldatetime() {
+
+    exportTestMethod(DATATYPES.SMALLDATETIME);
+
+  }
+
+  @Test
+  public void testdatetime2() {
+
+    exportTestMethod(DATATYPES.DATETIME2);
+
+  }
+
+  @Test
+  public void testdatetime() {
+
+    exportTestMethod(DATATYPES.DATETIME);
+
+  }
+
+  @Test
+  public void testdatetimeoffset() {
+
+    exportTestMethod(DATATYPES.DATETIMEOFFSET);
+
+  }
+
+  public void testDecimal() {
+    exportTestMethod(DATATYPES.DECIMAL);
+
+  }
+
+  public void testNumeric() {
+    exportTestMethod(DATATYPES.NUMERIC);
+
+  }
+
+  @Test
+  public void testBigInt() {
+
+    exportTestMethod(DATATYPES.BIGINT);
+  }
+
+  @Test
+  public void testInt() {
+    exportTestMethod(DATATYPES.INT);
+
+  }
+
+  @Test
+  public void testSmallInt() {
+    exportTestMethod(DATATYPES.SMALLINT);
+
+  }
+
+  @Test
+  public void testTinyint() {
+    exportTestMethod(DATATYPES.TINYINT);
+
+  }
+
+  @Test
+  public void testFloat() {
+    exportTestMethod(DATATYPES.FLOAT);
+
+  }
+
+  @Test
+  public void testReal() {
+    exportTestMethod(DATATYPES.REAL);
+
+  }
+
+  @Test
+  public void testDate() {
+    exportTestMethod(DATATYPES.DATE);
+
+  }
+
+  public void testMoney() {
+    exportTestMethod(DATATYPES.MONEY);
+
+  }
+
+  @Test
+  public void testSmallMoney() {
+    exportTestMethod(DATATYPES.SMALLMONEY);
+
+  }
+
+  @Test
+  public void testText() {
+    exportTestMethod(DATATYPES.TEXT);
+
+  }
+
+  @Test
+  public void testVarchar() {
+    exportTestMethod(DATATYPES.VARCHAR);
+
+  }
+
+  @Test
+  public void testChar() {
+    exportTestMethod(DATATYPES.CHAR);
+
+  }
+
+  @Test
+  public void testNText() {
+    exportTestMethod(DATATYPES.NTEXT);
+
+  }
+
+  @Test
+  public void testNChar() {
+    exportTestMethod(DATATYPES.NCHAR);
+
+  }
+
+  @Test
+  public void testNVarchar() {
+    exportTestMethod(DATATYPES.NVARCHAR);
+
+  }
+
+  public void testImage() {
+    exportTestMethod(DATATYPES.IMAGE);
+
+  }
+
+  public void testBinary() {
+    exportTestMethod(DATATYPES.BINARY);
+
+  }
+
+  public void exportTestMethod(DATATYPES dt) {
+    int exceptionCount = 0;
+
+    List testdata = tdfs.getTestdata(dt);
+    System.out.println("Total Samples found : " + testdata.size());
+    for (Iterator<MSSQLTestData> itr = testdata.iterator(); itr.hasNext();) {
+      MSSQLTestData current = itr.next();
+      System.out.println("Testing with : \n" + current);
+
+      try {
+
+        if (dt.equals(DATATYPES.INT) || dt.equals(DATATYPES.BIGINT)
+            || dt.equals(DATATYPES.SMALLINT)
+            || dt.equals(DATATYPES.TINYINT)
+            || dt.equals(DATATYPES.MONEY)
+            || dt.equals(DATATYPES.SMALLMONEY)
+            || dt.equals(DATATYPES.TIME)
+            || dt.equals(DATATYPES.DATETIME)
+            || dt.equals(DATATYPES.DATE)
+            || dt.equals(DATATYPES.DATETIME2)
+            || dt.equals(DATATYPES.DATETIMEOFFSET)
+            || dt.equals(DATATYPES.REAL)
+            || dt.equals(DATATYPES.FLOAT)
+            || dt.equals(DATATYPES.SMALLDATETIME)
+            || dt.equals(DATATYPES.NTEXT)
+            || dt.equals(DATATYPES.TEXT)
+            || dt.equals(DATATYPES.IMAGE)) {
+
+          createTable(dt);
+          createFile(dt, current.getData(KEY_STRINGS.HDFS_READBACK));
+          runExport(getArgv(dt));
+          verifyExport(dt, current.getData(KEY_STRINGS.DB_READBACK));
+          addToReport(current, null);
+        } else if (dt.equals(DATATYPES.DECIMAL)
+            || (dt.equals(DATATYPES.NUMERIC)
+                || dt.equals(DATATYPES.CHAR)
+                || dt.equals(DATATYPES.VARCHAR)
+                || dt.equals(DATATYPES.NCHAR)
+                || dt.equals(DATATYPES.NVARCHAR)
+                || dt.equals(DATATYPES.VARBINARY) || dt
+                .equals(DATATYPES.BINARY))) {
+
+          createTable(dt, current);
+          createFile(dt, current.getData(KEY_STRINGS.HDFS_READBACK));
+          runExport(getArgv(dt));
+          verifyExport(dt, current.getData(KEY_STRINGS.DB_READBACK));
+          addToReport(current, null);
+        }
+
+      } catch (AssertionError ae) {
+        if (current.getData(KEY_STRINGS.NEG_POS_FLAG).equals("NEG")) {
+          System.out.println("failure was expected, PASS");
+          addToReport(current, null);
+        } else {
+          System.out
+              .println("------------------------------------------------------"
+                + "-----");
+          System.out.println("Failure for following Test Data :\n"
+              + current.toString());
+          System.out
+              .println("------------------------------------------------------"
+                + "-----");
+          System.out.println("Exception details : \n");
+          System.out.println(ae.getMessage());
+          System.out
+              .println("------------------------------------------------------"
+                + "-----");
+          addToReport(current, ae);
+          exceptionCount++;
+
+        }
+
+      } catch (Exception ae) {
+        if (current.getData(KEY_STRINGS.NEG_POS_FLAG).equals("NEG")) {
+          System.out.println("failure was expected, PASS");
+          addToReport(current, null);
+        } else {
+          System.out
+              .println("------------------------------------------------------"
+                + "-----");
+          System.out.println("Failure for following Test Data :\n"
+              + current.toString());
+          System.out
+              .println("------------------------------------------------------"
+                + "-----");
+          System.out.println("Exception details : \n");
+          System.out.println(ae.getMessage());
+          System.out
+              .println("------------------------------------------------------"
+               + "-----");
+          addToReport(current, ae);
+          exceptionCount++;
+
+        }
+
+      } catch (Error e) {
+        addToReport(current, e);
+        exceptionCount++;
+      }
+    }
+    if (exceptionCount > 0) {
+
+      System.out.println("There were failures for :" + dt.toString());
+      System.out.println("Failed for " + exceptionCount + "/"
+          + testdata.size() + " test data samples\n");
+      System.out.println("Sroll up for detailed errors");
+      System.out
+          .println("----------------------------------------------------------"
+            + "-");
+      throw new AssertionError("Failed for " + exceptionCount
+          + " test data sample");
+    }
+
+  }
+
+  /*
+
+ */
+
+  public String[] extractData(List data, String negPosFlag,
+      KEY_STRINGS readBackType) {
+    List<String> filtered = new ArrayList<String>();
+    for (Iterator<MSSQLTestData> itr = data.iterator(); itr.hasNext();) {
+      MSSQLTestData current = itr.next();
+      if (current.getData(KEY_STRINGS.NEG_POS_FLAG).toString().equals(
+          negPosFlag)) {
+        filtered.add(current.getData(readBackType));
+      }
+    }
+    String[] ret = new String[filtered.size()];
+    filtered.toArray(ret);
+    return ret;
+  }
+
+  public void verifyExport(DATATYPES dt, String[] data) throws SQLException {
+    LOG.info("Verifying export: " + getTableName());
+    // Check that we got back the correct number of records.
+    Connection conn = getManager().getConnection();
+
+    PreparedStatement statement = conn.prepareStatement("SELECT "
+        + getColName() + " FROM " + getTableName(dt),
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    System.out.println("data samples being compared : " + data.length);
+
+    ResultSet rs = null;
+    try {
+      rs = statement.executeQuery();
+      int cnt = 0;
+      try {
+        while (rs.next()) {
+          String tmp = rs.getString(1);
+          String expected = data[cnt++];
+          System.out.println("Readback, expected" + tmp + " :"
+              + expected);
+          if (tmp == null) {
+            assertNull("Must be null", expected);
+          } else {
+            assertEquals("Data must match", expected, tmp);
+          }
+        }
+        System.out.println("expected samples : " + data.length
+            + " Actual samples : " + cnt);
+        assertEquals("Resultset must contain expected samples",
+            data.length, cnt);
+      } finally {
+
+        rs.close();
+      }
+    } finally {
+      statement.close();
+    }
+
+  }
+
+  public void verifyNegativeExport(DATATYPES dt, String[] data)
+      throws SQLException {
+    LOG.info("Verifying export: " + getTableName());
+    // Check that we got back the correct number of records.
+    Connection conn = getManager().getConnection();
+
+    PreparedStatement statement = conn.prepareStatement("SELECT "
+        + getColName() + " FROM " + getTableName(dt),
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    System.out.println("data samples being compared : " + data.length);
+
+    ResultSet rs = null;
+    try {
+      rs = statement.executeQuery();
+      int cnt = 0;
+      try {
+        while (rs.next()) {
+          String tmp = rs.getString(1);
+          String expected = data[cnt++];
+          System.out.println("Readback, expected" + tmp + " :"
+              + expected);
+          if (tmp == null) {
+            assertNull("Must be null", expected);
+          } else {
+            assertNotSame("Data must match", expected, tmp);
+          }
+        }
+
+      } finally {
+        rs.close();
+      }
+    } finally {
+      statement.close();
+    }
+
+  }
+
+  public void verifyExport(DATATYPES dt, String data) throws SQLException {
+    verifyExport(dt, new String[] { data });
+
+  }
+
+  /**
+   * Run a MapReduce-based export (using the argv provided to control
+   * execution).
+   *
+   * @return the generated jar filename
+   */
+  protected List<String> runExport(String[] argv) throws IOException {
+    // run the tool through the normal entry-point.
+    int ret;
+    List<String> generatedJars = null;
+    try {
+      ExportTool exporter = new ExportTool();
+
+      Sqoop sqoop = new Sqoop(exporter);
+
+      String username = MSSQLTestUtils.getDBUserName();
+      String password = MSSQLTestUtils.getDBPassWord();
+      sqoop.getOptions().setUsername(username);
+      sqoop.getOptions().setPassword(password);
+
+      ret = Sqoop.runSqoop(sqoop, argv);
+      generatedJars = exporter.getGeneratedJarFiles();
+    } catch (Exception e) {
+      LOG.error("Got exception running Sqoop: "
+          + StringUtils.stringifyException(e));
+      ret = 1;
+    }
+
+    // expect a successful return.
+    if (0 != ret) {
+      throw new IOException("Failure during job; return status " + ret);
+    }
+
+    return generatedJars;
+  }
+
+  @Before
+  public void setUp() {
+    // start the server
+    super.setUp();
+    String warehouseDir = getWarehouseDir();
+    Path tablePath = new Path(warehouseDir);
+    try {
+      String testfile = System.getProperty("test.data.dir")
+          + "/" + System.getProperty("ms.datatype.test.data.file.export");
+      String delim = System.getProperty("ms.datatype.test.data.file.delim", ",");
+      tdfs = new MSSQLTestDataFileParser(testfile);
+      tdfs.setDelim(delim);
+      tdfs.parse();
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+      System.out.println("Error with test data file;");
+      System.out
+          .println("check stack trace for cause.\nTests cannont continue.");
+      System.exit(0);
+    }
+    try {
+      FileSystem fs = FileSystem.get(new Configuration());
+      fs.delete(tablePath, true);
+      System.out.println("Warehouse dir deleted");
+    } catch (IOException e) {
+      LOG.error("Setup fail with IOException: " +
+          StringUtils.stringifyException(e));
+    }
+    if (useHsqldbTestServer()) {
+      // throw away any existing data that might be in the database.
+      try {
+        this.getTestServer().dropExistingSchema();
+      } catch (SQLException sqlE) {
+        LOG.error("Setup fail with SQLException: " +
+            StringUtils.stringifyException(sqlE));
+        fail(sqlE.toString());
+      }
+    }
+  }
+
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  protected String getConnectString() {
+    return System.getProperty(
+          "sqoop.test.sqlserver.connectstring.host_url",
+          "jdbc:sqlserver://sqlserverhost:1433");
+  }
+
+  /**
+   * Drop a table if it already exists in the database.
+   *
+   * @param table
+   *            the name of the table to drop.
+   * @throws SQLException
+   *             if something goes wrong.
+   */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    System.out.println("DROPing Table " + table);
+    String sqlStmt = "IF OBJECT_ID('" + table
+        + "') IS NOT NULL  DROP TABLE " + table;
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+    return opts;
+
+  }
+
+  /**
+   * Create the argv to pass to Sqoop.
+   *
+   * @param includeHadoopFlags
+   *            if true, then include -D various.settings=values
+   * @param rowsPerStmt
+   *            number of rows to export in a single INSERT statement.
+   * @param statementsPerTx
+   *            ## of statements to use in a transaction.
+   * @return the argv as an array of strings.
+   */
+  protected String[] getArgv(DATATYPES dt) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    args.add("--table");
+    args.add(getTableName(dt));
+    args.add("--export-dir");
+    args.add(getTablePath(dt).toString());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--fields-terminated-by");
+    args.add(",");
+    args.add("--lines-terminated-by");
+    args.add("\\n");
+    args.add("-m");
+    args.add("1");
+
+    LOG.debug("args:");
+    for (String a : args) {
+      LOG.debug("  " + a);
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  public String getOutputFileName() {
+    return "ManagerCompatExport.txt";
+  }
+
+  public void addToReport(MSSQLTestData td, Object result) {
+    try {
+      FileWriter fr = new FileWriter(getOutputFileName(), true);
+      String offset = td.getData(KEY_STRINGS.OFFSET);
+      String dt = td.getDatatype();
+      String res = "_";
+      if (result == null) {
+        res = "Success";
+      } else {
+        try {
+          res = "FAILED "
+              + removeNewLines(((AssertionError) result)
+                  .getMessage());
+
+        } catch (Exception ae) {
+          if (result instanceof Exception) {
+            res = "FAILED "
+                + removeNewLines(((Exception) result)
+                    .getMessage());
+          } else {
+            res = "FAILED " + result.toString();
+          }
+        }
+      }
+
+      fr.append(offset + "\t" + "\t" + res + "\t" + dt + "\t"
+          + removeNewLines(td.toString()) + "\n");
+      fr.close();
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+
+  }
+
+  public static String removeNewLines(String str) {
+    if (str != null) {
+      String[] tmp = str.split("\n");
+      String result = "";
+      for (String a : tmp) {
+        result += " " + a;
+      }
+      return result;
+    } else {
+      return "";
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java
new file mode 100644
index 0000000..099d734
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportDelimitedFileManualTest.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils.*;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.BufferedWriter;
+
+/**
+* Export delimited file SQL Server.
+*/
+public class SQLServerDatatypeExportDelimitedFileManualTest
+    extends ManagerCompatExport {
+
+  public void createFile(DATATYPES dt, String[] data) throws IOException {
+    Path tablePath = getTablePath(dt);
+    Path filePath = new Path(tablePath, "part0000");
+
+    Configuration conf = new Configuration();
+    String hdfsroot;
+    hdfsroot = System.getProperty("ms.datatype.test.hdfsprefix");
+    if (hdfsroot == null) {
+      hdfsroot = "hdfs://localhost/";
+    }
+    conf.set("fs.default.name", hdfsroot);
+    FileSystem fs = FileSystem.get(conf);
+    fs.mkdirs(tablePath);
+    System.out.println("-----------------------------------Path : "
+        + filePath);
+    OutputStream os = fs.create(filePath);
+
+    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
+    for (int i = 0; i < data.length; i++) {
+      w.write(data[i] + "\n");
+    }
+    w.close();
+    os.close();
+  }
+
+  public void createFile(DATATYPES dt, String data) throws IOException {
+    createFile(dt, new String[] { data });
+  }
+
+  public String getOutputFileName() {
+    return "ManagerCompatExportDelim.txt";
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
new file mode 100644
index 0000000..97034a1
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils.*;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.lib.RecordParser;
+import com.cloudera.sqoop.lib.SqoopRecord;
+import com.cloudera.sqoop.tool.CodeGenTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+
+/**
+* Export sequence file to SQL Server test.
+*/
+public class SQLServerDatatypeExportSequenceFileManualTest
+    extends ManagerCompatExport {
+
+  private static Map jars = new HashMap();
+
+   @Override
+  public void createFile(DATATYPES dt, String[] data) throws Exception {
+    try {
+      codeGen(dt);
+      // Instantiate the value record object via reflection.
+      Class cls = Class.forName(getTableName(dt), true, Thread
+        .currentThread().getContextClassLoader());
+      SqoopRecord record = (SqoopRecord) ReflectionUtils.newInstance(cls,
+        new Configuration());
+
+      // Create the SequenceFile.
+      Configuration conf = new Configuration();
+      String hdfsroot;
+      hdfsroot = System.getProperty("ms.datatype.test.hdfsprefix");
+      if (hdfsroot == null){
+        hdfsroot ="hdfs://localhost/";
+      }
+      conf.set("fs.default.name", hdfsroot);
+      FileSystem fs = FileSystem.get(conf);
+      Path tablePath = getTablePath(dt);
+      Path filePath = new Path(tablePath, getTableName(dt));
+      fs.mkdirs(tablePath);
+      SequenceFile.Writer w = SequenceFile.createWriter(fs, conf,
+        filePath, LongWritable.class, cls);
+
+      int cnt = 0;
+      for (String tmp : data) {
+        record.parse(tmp + "\n");
+        w.append(new LongWritable(cnt), record);
+      }
+
+      w.close();
+    } catch (ClassNotFoundException cnfe) {
+     throw new IOException(cnfe);
+    } catch (RecordParser.ParseError pe) {
+     throw new IOException(pe);
+    }
+  }
+
+  @Override
+  public void createFile(DATATYPES dt, String data) throws Exception {
+    createFile(dt, new String[] { data });
+  }
+
+  public String[] codeGen(DATATYPES dt) throws Exception {
+
+    CodeGenTool codeGen = new CodeGenTool();
+
+    String[] codeGenArgs = getCodeGenArgv(dt);
+    SqoopOptions options = codeGen.parseArguments(codeGenArgs, null, null,
+      true);
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+
+    options.setUsername(username);
+    options.setPassword(password);
+    codeGen.validateOptions(options);
+
+    int ret = codeGen.run(options);
+    assertEquals(0, ret);
+    List<String> generatedJars = codeGen.getGeneratedJarFiles();
+
+    assertNotNull(generatedJars);
+    assertEquals("Expected 1 generated jar file", 1, generatedJars.size());
+    String jarFileName = generatedJars.get(0);
+    // Sqoop generates jars named "foo.jar"; by default, this should contain
+    // a class named 'foo'. Extract the class name.
+    Path jarPath = new Path(jarFileName);
+    String jarBaseName = jarPath.getName();
+    assertTrue(jarBaseName.endsWith(".jar"));
+    assertTrue(jarBaseName.length() > ".jar".length());
+    String className = jarBaseName.substring(0, jarBaseName.length()
+      - ".jar".length());
+
+    LOG.info("Using jar filename: " + jarFileName);
+    LOG.info("Using class name: " + className);
+
+    ClassLoader prevClassLoader = null;
+
+
+    if (null != jarFileName) {
+    prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+      className);
+    System.out.println("Jar,class =" + jarFileName + " , "
+      + className);
+    }
+
+    // Now run and verify the export.
+    LOG.info("Exporting SequenceFile-based data");
+    jars.put(dt, jarFileName);
+    return (getArgv(dt, "--class-name", className, "--jar-file",
+     jarFileName));
+
+
+
+  }
+
+  protected String[] getArgv(DATATYPES dt) {
+
+    String[] args = super.getArgv(dt);
+    String[] addtionalArgs = Arrays.copyOf(args, args.length + 4);
+
+    String[] additional = new String[4];
+    additional[0] = "--class-name";
+    additional[1] = getTableName(dt);
+    additional[2] = "--jar-file";
+    additional[3] = jars.get(dt).toString();
+    for (int i = args.length, j = 0; i < addtionalArgs.length; i++, j++) {
+     addtionalArgs[i] = additional[j];
+    }
+
+    for (String a : addtionalArgs) {
+     System.out.println(a);
+    }
+    return addtionalArgs;
+  }
+
+  /**
+  * @return an argv for the CodeGenTool to use when creating tables to
+  *         export.
+  */
+  protected String[] getCodeGenArgv(DATATYPES dt) {
+    List<String> codeGenArgv = new ArrayList<String>();
+
+    codeGenArgv.add("--table");
+    codeGenArgv.add(getTableName(dt));
+    codeGenArgv.add("--connect");
+    codeGenArgv.add(getConnectString());
+    codeGenArgv.add("--fields-terminated-by");
+    codeGenArgv.add("\\t");
+    codeGenArgv.add("--lines-terminated-by");
+    codeGenArgv.add("\\n");
+
+    return codeGenArgv.toArray(new String[0]);
+  }
+
+  /**
+  * Create the argv to pass to Sqoop.
+  *
+  * @param includeHadoopFlags
+  *            if true, then include -D various.settings=values
+  * @param rowsPerStmt
+  *            number of rows to export in a single INSERT statement.
+  * @param statementsPerTx
+  *            ## of statements to use in a transaction.
+  * @return the argv as an array of strings.
+  */
+  protected String[] getArgv(DATATYPES dt, String... additionalArgv) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    // Any additional Hadoop flags (-D foo=bar) are prepended.
+    if (null != additionalArgv) {
+      boolean prevIsFlag = false;
+      for (String arg : additionalArgv) {
+        if (arg.equals("-D")) {
+          args.add(arg);
+          prevIsFlag = true;
+        } else if (prevIsFlag) {
+          args.add(arg);
+          prevIsFlag = false;
+        }
+      }
+    }
+
+    // The sqoop-specific additional args are then added.
+    if (null != additionalArgv) {
+      boolean prevIsFlag = false;
+      for (String arg : additionalArgv) {
+        if (arg.equals("-D")) {
+          prevIsFlag = true;
+          continue;
+        } else if (prevIsFlag) {
+          prevIsFlag = false;
+          continue;
+        } else {
+         // normal argument.
+          args.add(arg);
+        }
+      }
+    }
+
+    args.add("--table");
+    args.add(getTableName(dt));
+    args.add("--export-dir");
+    args.add(getTablePath(dt).toString());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--fields-terminated-by");
+    args.add("\\t");
+    args.add("--lines-terminated-by");
+    args.add("\\n");
+    args.add("-m");
+    args.add("1");
+
+    LOG.debug("args:");
+    for (String a : args) {
+     LOG.debug("  " + a);
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  public String getOutputFileName() {
+    return "ManagerCompatExportSeq.txt";
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java
new file mode 100644
index 0000000..87bc203
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java
@@ -0,0 +1,240 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.BufferedReader;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils.*;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+import com.cloudera.sqoop.Sqoop;
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.orm.CompilationManager;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.tool.ImportTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+
+/**
+ * Test import delimited file from SQL Server.
+ */
+public class SQLServerDatatypeImportDelimitedFileManualTest
+  extends SQLServerDatatypeImportSequenceFileManualTest {
+
+/**
+ * Create the argv to pass to Sqoop.
+ *
+ * @param includeHadoopFlags
+ *            if true, then include -D various.settings=values
+ * @param colNames
+ *            the columns to import. If null, all columns are used.
+ * @param conf
+ *            a Configuration specifying additional properties to use when
+ *            determining the arguments.
+ * @return the argv as an array of strings.
+*/
+  protected String[] getArgv(boolean includeHadoopFlags, String[] colNames,
+    Configuration conf) {
+    if (null == colNames) {
+    colNames = getColNames();
+    }
+
+    String splitByCol = colNames[0];
+    String columnsString = "";
+    for (String col : colNames) {
+      columnsString += col + ",";
+    }
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--columns");
+    args.add(columnsString);
+    args.add("--split-by");
+    args.add(splitByCol);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(getConnectString());
+
+    args.add("--num-mappers");
+    args.add("2");
+
+    args.addAll(getExtraArgs(conf));
+
+    return args.toArray(new String[0]);
+  }
+
+
+  private void runSqoopImport(String[] importCols) {
+    Configuration conf = getConf();
+      SqoopOptions opts = getSqoopOptions(conf);
+      String username = MSSQLTestUtils.getDBUserName();
+      String password = MSSQLTestUtils.getDBPassWord();
+      opts.setUsername(username);
+      opts.setPassword(password);
+
+      // run the tool through the normal entry-point.
+      int ret;
+      try {
+        Sqoop importer = new Sqoop(new ImportTool(), conf, opts);
+        ret = Sqoop.runSqoop(importer, getArgv(true, importCols, conf));
+      } catch (Exception e) {
+        LOG.error("Got exception running Sqoop: " + e.toString());
+        throw new RuntimeException(e);
+      }
+
+      // expect a successful return.
+      assertEquals("Failure during job", 0, ret);
+  }
+
+  /**
+  * Do a MapReduce-based import of the table and verify that the results were
+  * imported as expected. (tests readFields(ResultSet) and toString())
+  *
+  * @param expectedVal
+  *            the value we injected into the table.
+  * @param importCols
+  *            the columns to import. If null, all columns are used.
+  */
+  protected void verifyImport(String expectedVal, String[] importCols) {
+
+    // paths to where our output file will wind up.
+    Path tableDirPath = getTablePath();
+
+    removeTableDir();
+
+    runSqoopImport(importCols);
+    Configuration conf = getConf();
+
+    SqoopOptions opts = getSqoopOptions(conf);
+    try {
+      ImportTool importTool = new ImportTool();
+      opts = importTool.parseArguments(getArgv(false, importCols, conf),
+       conf, opts, true);
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+      fail(e.toString());
+    }
+
+    CompilationManager compileMgr = new CompilationManager(opts);
+    String jarFileName = compileMgr.getJarFilename();
+    ClassLoader prevClassLoader = null;
+    try {
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+       getTableName());
+
+      // Now open and check all part-files in the table path until we find
+      // a non-empty one that we can verify contains the value.
+
+      FileSystem fs = FileSystem.getLocal(conf);
+      FileStatus[] stats = fs.listStatus(tableDirPath);
+
+      if (stats == null || stats.length == 0) {
+        fail("Error: no files in " + tableDirPath);
+      }
+
+      boolean foundRecord = false;
+      for (FileStatus stat : stats) {
+        if (!stat.getPath().getName().startsWith("part-")
+          && !stat.getPath().getName().startsWith("data-")) {
+          // This isn't a data file. Ignore it.
+          continue;
+        }
+
+        try {
+          String line;
+          String fname = stat.getPath().toString();
+          fname = fname.substring(5, fname.length());
+
+          BufferedReader reader = new BufferedReader(
+           new InputStreamReader(new FileInputStream(new File(
+             fname))));
+          try {
+            line = reader.readLine();
+            assertEquals(" expected a different string",
+              expectedVal, line);
+          } finally {
+            IOUtils.closeStream(reader);
+          }
+          LOG.info("Read back from sequencefile: " + line);
+          foundRecord = true;
+          // Add trailing '\n' to expected value since
+          // SqoopRecord.toString()
+          // encodes the record delim.
+          if (null == expectedVal) {
+            assertEquals("Error validating result from SeqFile",
+              "null\n", line);
+          }
+        } catch (EOFException eoe) {
+          // EOF in a file isn't necessarily a problem. We may have
+          // some
+          // empty sequence files, which will throw this. Just
+          // continue
+          // in the loop.
+        }
+      }
+
+      if (!foundRecord) {
+        fail("Couldn't read any records from SequenceFiles");
+      }
+    } catch (IOException ioe) {
+      LOG.error(StringUtils.stringifyException(ioe));
+      fail("IOException: " + ioe.toString());
+    } finally {
+      if (null != prevClassLoader) {
+      ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+
+  public void testVarBinary() {
+    if (!supportsVarBinary()) {
+      return;
+    }
+    dataTypeTest(DATATYPES.VARBINARY);
+  }
+
+  public void testTime() {
+    if (!supportsTime()) {
+      skipped = true;
+      return;
+    }
+
+    dataTypeTest(DATATYPES.TIME);
+  }
+
+  public String getResportFileName(){
+    return this.getClass().toString()+".txt";
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/64878c64/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java
new file mode 100644
index 0000000..8b30da0
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java
@@ -0,0 +1,803 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.FileWriter;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils.*;
+import org.junit.Test;
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
+import org.apache.sqoop.manager.sqlserver.MSSQLTestData.KEY_STRINGS;
+
+/**
+ * Testing import of a sequence file to SQL Server.
+ */
+public class SQLServerDatatypeImportSequenceFileManualTest extends
+    ManagerCompatTestCase {
+
+  public static final Log LOG = LogFactory.getLog(
+      SQLServerDatatypeImportSequenceFileManualTest.class.getName());
+  private static MSSQLTestDataFileParser tdfs;
+  private static Map report;
+
+  static {
+    try {
+
+      String testfile = null;
+      testfile = System.getProperty("test.data.dir")
+        + "/" + System.getProperty("ms.datatype.test.data.file.import");
+      String delim = System.getProperty("ms.datatype.test.data.file.delim", ",");
+      System.out.println("Using data file : " + testfile);
+      LOG.info("Using data file : " + testfile);
+      tdfs = new MSSQLTestDataFileParser(testfile);
+      tdfs.setDelim(delim);
+      tdfs.parse();
+      report = new HashMap();
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+      System.out
+       .println("Error with test data file, check stack trace for cause"
+         + ".\nTests cannont continue.");
+      System.exit(0);
+    }
+  }
+
+  @Override
+  protected String getDbFriendlyName() {
+    return "MSSQL";
+  }
+
+  @Override
+  protected Log getLogger() {
+    return LOG;
+  }
+
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  protected String getConnectString() {
+    return System.getProperty(
+          "sqoop.test.sqlserver.connectstring.host_url",
+          "jdbc:sqlserver://sqlserverhost:1433");
+  }
+
+  /**
+  * Drop a table if it already exists in the database.
+  *
+  * @param table
+  *            the name of the table to drop.
+  * @throws SQLException
+  *             if something goes wrong.
+  */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    String sqlStmt = "IF OBJECT_ID('" + table
+      + "') IS NOT NULL  DROP TABLE " + table;
+
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+    return opts;
+  }
+
+  public void setUp() {
+    try {
+      super.setUp();
+    } catch (Exception e) {
+      try {
+        FileWriter fr = new FileWriter(getResportFileName(), true);
+        String res = removeNewLines(e.getMessage());
+        fr.append("Error\t" + res + "\n");
+        fr.close();
+      } catch (Exception e2) {
+        LOG.error(StringUtils.stringifyException(e2));
+        fail(e2.toString());
+      }
+    } catch (Error e) {
+      try {
+        FileWriter fr = new FileWriter(getResportFileName(), true);
+
+        String res = removeNewLines(e.getMessage());
+
+        fr.append("Error\t" + res + "\n");
+        fr.close();
+        fail(res);
+      } catch (Exception e2) {
+        LOG.error(StringUtils.stringifyException(e2));
+        fail(e2.toString());
+      }
+    }
+  }
+
+  public void tearDown() {
+    try {
+      super.tearDown();
+    } catch (Exception e) {
+      try {
+        FileWriter fr = new FileWriter(getResportFileName(), true);
+        String res = removeNewLines(e.getMessage());
+        fr.append("Error\t" + res + "\n");
+        fr.close();
+      } catch (Exception e2) {
+        LOG.error(StringUtils.stringifyException(e2));
+        fail(e2.toString());
+      }
+    } catch (Error e) {
+      try {
+        FileWriter fr = new FileWriter(getResportFileName(), true);
+        String res = removeNewLines(e.getMessage());
+        fr.append("Error\t" + res + "\n");
+        fr.close();
+        fail(res);
+      } catch (Exception e2) {
+        LOG.error(StringUtils.stringifyException(e2));
+        fail(e2.toString());
+      }
+    }
+  }
+
+  protected boolean supportsBoolean() {
+    return true;
+  }
+
+  @Test
+  public void testBit() {
+    if (!supportsBoolean()) {
+      skipped = true;
+      return;
+    }
+    verifyType("BIT", getTrueBoolNumericSqlInput(), getTrueBoolSeqOutput());
+  }
+
+  @Test
+  public void testBit2() {
+    if (!supportsBoolean()) {
+      skipped = true;
+    return;
+  }
+    verifyType("BIT", getFalseBoolNumericSqlInput(), getFalseBoolSeqOutput());
+  }
+
+  @Test
+  public void testBit3() {
+    if (!supportsBoolean()) {
+      skipped = true;
+      return;
+  }
+  verifyType("BIT", getFalseBoolLiteralSqlInput(), getFalseBoolSeqOutput());
+  }
+
+  public void testBoolean() {
+    try {
+      super.testBoolean();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testBoolean2() {
+    try {
+      super.testBoolean2();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testBoolean3() {
+    try {
+      super.testBoolean3();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testDouble1() {
+    try {
+      super.testDouble1();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  @Test
+  public void testDouble2() {
+    try {
+      super.testDouble2();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testClob1() {
+    try {
+      super.testClob1();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testBlob1() {
+    try {
+    super.testBlob1();
+    assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+    System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testLongVarChar() {
+    try {
+      super.testLongVarChar();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testTimestamp1() {
+    try {
+      super.testTimestamp1();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testTimestamp2() {
+    try {
+      super.testTimestamp2();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testTimestamp3() {
+    try {
+      super.testTimestamp3();
+      assertTrue("This test should not pass on sql server", false);
+    } catch (AssertionError a) {
+      System.out.println("Test failed, this was expected");
+    }
+  }
+
+  public void testVarBinary() {
+    if (!supportsVarBinary()) {
+      return;
+    }
+    dataTypeTest(DATATYPES.VARBINARY);
+  }
+
+  public void testTime() {
+    if (!supportsTime()) {
+      skipped = true;
+      return;
+    }
+    dataTypeTest(DATATYPES.TIME);
+  }
+
+  @Test
+  public void testSmalldatetime() {
+    if (!supportsTime()) {
+      skipped = true;
+      return;
+    }
+    dataTypeTest(DATATYPES.SMALLDATETIME);
+  }
+
+  @Test
+  public void testdatetime2() {
+    if (!supportsTime()) {
+      skipped = true;
+      return;
+    }
+    dataTypeTest(DATATYPES.DATETIME2);
+  }
+
+  @Test
+  public void testdatetime() {
+    if (!supportsTime()) {
+      skipped = true;
+      return;
+    }
+    dataTypeTest(DATATYPES.DATETIME);
+  }
+
+  @Test
+  public void testdatetimeoffset() {
+    if (!supportsTime()) {
+      skipped = true;
+      return;
+    }
+    dataTypeTest(DATATYPES.DATETIMEOFFSET);
+  }
+
+  public void testDecimal() {
+    dataTypeTest(DATATYPES.DECIMAL);
+  }
+
+  public void testNumeric() {
+    dataTypeTest(DATATYPES.NUMERIC);
+  }
+
+  public void testNumeric1() {
+  }
+
+  public void testNumeric2() {
+  }
+
+  public void testDecimal1() {
+  }
+
+  public void testDecimal2() {
+  }
+
+  @Test
+  public void testBigInt() {
+    dataTypeTest(DATATYPES.BIGINT);
+  }
+
+  @Test
+  public void testBigInt1() {
+  }
+
+  @Test
+  public void testInt() {
+    dataTypeTest(DATATYPES.INT);
+  }
+
+  @Test
+  public void testSmallInt() {
+    dataTypeTest(DATATYPES.SMALLINT);
+  }
+
+  @Test
+  public void testSmallInt1() {
+  }
+
+  @Test
+  public void testSmallInt2() {
+  }
+
+  @Test
+  public void testTinyint() {
+    dataTypeTest(DATATYPES.TINYINT);
+
+  }
+
+  @Test
+  public void testTinyInt1() {
+  }
+
+  @Test
+    public void testTinyInt2() {
+  }
+
+  @Test
+  public void testFloat() {
+    dataTypeTest(DATATYPES.FLOAT);
+  }
+
+  @Test
+  public void testReal() {
+    dataTypeTest(DATATYPES.REAL);
+  }
+
+  @Test
+  public void testDate() {
+    dataTypeTest(DATATYPES.DATE);
+  }
+
+  public void testMoney() {
+    dataTypeTest(DATATYPES.MONEY);
+  }
+
+  @Test
+  public void testSmallMoney() {
+    dataTypeTest(DATATYPES.SMALLMONEY);
+  }
+
+  @Test
+  public void testText() {
+    dataTypeTest(DATATYPES.TEXT);
+  }
+
+  @Test
+  public void testVarchar() {
+    dataTypeTest(DATATYPES.VARCHAR);
+  }
+
+  @Test
+  public void testChar() {
+    dataTypeTest(DATATYPES.CHAR);
+  }
+
+  @Test
+  public void testNText() {
+    dataTypeTest(DATATYPES.NTEXT);
+  }
+
+  @Test
+  public void testNChar() {
+    dataTypeTest(DATATYPES.NCHAR);
+  }
+
+  @Test
+  public void testNVarchar() {
+    dataTypeTest(DATATYPES.NVARCHAR);
+  }
+
+  public void testImage() {
+    dataTypeTest(DATATYPES.IMAGE);
+  }
+
+  public void testBinary() {
+    dataTypeTest(DATATYPES.BINARY);
+  }
+
+  //---------------disabled tests-----
+  @Test
+  public void testTime1() {
+  }
+
+  @Test
+  public void testTime2() {
+  }
+
+  @Test
+  public void testTime3() {
+  }
+
+  @Test
+  public void testTime4() {
+  }
+
+  @Test
+  public void testStringCol1() {
+
+  }
+
+  @Test
+  public void testStringCol2() {
+
+  }
+
+  @Test
+  public void testEmptyStringCol() {
+
+  }
+
+  @Test
+  public void testNullStringCol() {
+
+  }
+
+  @Test
+  public void testNullInt() {
+
+  }
+
+  @Test
+  public void testReal1() {
+
+  }
+
+  @Test
+  public void testReal2() {
+
+  }
+
+  @Test
+  public void testFloat1() {
+
+  }
+
+  @Test
+  public void testFloat2() {
+
+  }
+
+  @Test
+  public void testDate1() {
+
+  }
+
+  @Test
+  public void testDate2() {
+
+  }
+
+
+  @Test
+  public void testNumeric3() {
+
+  }
+
+  @Test
+  public void testNumeric4() {
+
+  }
+
+  @Test
+  public void testNumeric5() {
+
+
+  }
+
+  @Test
+  public void testNumeric6() {
+
+  }
+
+
+
+  @Test
+  public void testDecimal3() {
+
+  }
+
+  @Test
+  public void testDecimal4() {
+
+  }
+
+  @Test
+  public void testDecimal5() {
+
+
+  }
+
+  @Test
+  public void testDecimal6() {
+
+  }
+
+
+
+  //end disabled tests----------------------------
+
+  public String getTrueBoolDbOutput() {
+    return "1";
+  }
+
+  public String getFalseBoolDbOutput() {
+    return "0";
+  }
+
+  protected String getFalseBoolSeqOutput() {
+    return "false";
+  }
+
+  protected String getFalseBoolLiteralSqlInput() {
+    return "0";
+  }
+
+  protected String getFixedCharSeqOut(int len, String val) {
+    return val + nSpace(len - val.length());
+  }
+
+  protected String getFixedCharDbOut(int len, String val) {
+    return val + nSpace(len - val.length());
+  }
+
+  public String nSpace(int n) {
+    String tmp = "";
+    for (int i = 0; i < n; i++) {
+      tmp += " ";
+    }
+
+    return tmp;
+  }
+
+  public String nZeros(int n) {
+    String tmp = "";
+    for (int i = 0; i < n; i++) {
+      tmp += "0";
+    }
+
+    return tmp;
+  }
+
+  public void dataTypeTest(DATATYPES datatype) {
+    int exceptionCount = 0;
+
+    List testdata = tdfs.getTestdata(datatype);
+
+    for (Iterator<MSSQLTestData> itr = testdata.iterator(); itr.hasNext();) {
+      MSSQLTestData current = itr.next();
+      System.out.println("Testing with : \n" + current);
+
+      try {
+        if (datatype == DATATYPES.DECIMAL
+           || datatype == DATATYPES.NUMERIC) {
+
+          verifyType(current.getDatatype() + "("
+            + current.getData(KEY_STRINGS.SCALE) + ","
+            + current.getData(KEY_STRINGS.PREC) + ")", current
+          .getData(KEY_STRINGS.TO_INSERT), current
+          .getData(KEY_STRINGS.HDFS_READBACK));
+
+        } else if (datatype == DATATYPES.TIME
+           || datatype == DATATYPES.SMALLDATETIME
+           || datatype == DATATYPES.DATETIME2
+           || datatype == DATATYPES.DATETIME
+           || datatype == DATATYPES.DATETIMEOFFSET
+           || datatype == DATATYPES.TEXT
+           || datatype == DATATYPES.NTEXT
+           || datatype == DATATYPES.DATE) {
+          verifyType(current.getDatatype(), "'"
+            + current.getData(KEY_STRINGS.TO_INSERT) + "'", current
+          .getData(KEY_STRINGS.HDFS_READBACK));
+        } else if (datatype == DATATYPES.VARBINARY) {
+          verifyType(
+          current.getDatatype() + "("
+          + current.getData(KEY_STRINGS.SCALE) + ")",
+          "cast('" + current.getData(KEY_STRINGS.TO_INSERT)
+          + "' as varbinary("
+          + current.getData(KEY_STRINGS.SCALE) + "))",
+          current.getData(KEY_STRINGS.HDFS_READBACK));
+        } else if (datatype == DATATYPES.BINARY) {
+          verifyType(
+          current.getDatatype() + "("
+          + current.getData(KEY_STRINGS.SCALE) + ")",
+          "cast('" + current.getData(KEY_STRINGS.TO_INSERT)
+          + "' as binary("
+          + current.getData(KEY_STRINGS.SCALE) + "))",
+          current.getData(KEY_STRINGS.HDFS_READBACK));
+        } else if (datatype == DATATYPES.NCHAR
+        || datatype == DATATYPES.VARCHAR
+        || datatype == DATATYPES.CHAR
+        || datatype == DATATYPES.NVARCHAR) {
+        System.out.println("------>"
+        + current.getData(KEY_STRINGS.DB_READBACK)
+        + "<----");
+        verifyType(current.getDatatype() + "("
+        + current.getData(KEY_STRINGS.SCALE) + ")", "'"
+        + current.getData(KEY_STRINGS.TO_INSERT) + "'",
+        current.getData(KEY_STRINGS.HDFS_READBACK));
+        } else if (datatype == DATATYPES.IMAGE) {
+        verifyType(current.getDatatype(), "cast('"
+        + current.getData(KEY_STRINGS.TO_INSERT)
+        + "' as image )",
+        current.getData(KEY_STRINGS.HDFS_READBACK));
+        } else {
+          verifyType(current.getDatatype(), current
+          .getData(KEY_STRINGS.TO_INSERT), current
+          .getData(KEY_STRINGS.HDFS_READBACK));
+        }
+
+        addToReport(current, null);
+
+      } catch (AssertionError ae) {
+        if (current.getData(KEY_STRINGS.NEG_POS_FLAG).equals("NEG")) {
+          System.out.println("failure was expected, PASS");
+          addToReport(current, null);
+        } else {
+          System.out
+          .println("----------------------------------------------------------"
+            + "-");
+          System.out.println("Failure for following Test Data :\n"
+          + current.toString());
+          System.out
+          .println("----------------------------------------------------------"
+            + "-");
+          System.out.println("Exception details : \n");
+          System.out.println(ae.getMessage());
+          System.out
+          .println("----------------------------------------------------------"
+            + "-");
+          addToReport(current, ae);
+          exceptionCount++;
+        }
+      } catch (Exception e) {
+        addToReport(current, e);
+        exceptionCount++;
+      }
+    }
+
+    if (exceptionCount > 0) {
+      System.out.println("There were failures for :"
+      + datatype.toString());
+      System.out.println("Failed for " + exceptionCount
+      + " test data samples\n");
+      System.out.println("Sroll up for detailed errors");
+      System.out
+      .println("-----------------------------------------------------------");
+      throw new AssertionError("Failed for " + exceptionCount
+      + " test data sample");
+    }
+  }
+
+  public  synchronized void addToReport(MSSQLTestData td, Object result) {
+    System.out.println("called");
+    try {
+      FileWriter fr = new FileWriter(getResportFileName(), true);
+      String offset = td.getData(KEY_STRINGS.OFFSET);
+      String res = "_";
+      if (result == null) {
+      res = "Success";
+    } else {
+      try {
+      res = "FAILED "
+      + removeNewLines(((AssertionError) result)
+      .getMessage());
+      } catch (Exception ae) {
+        if (result instanceof Exception
+          && ((Exception) result) != null) {
+          res = "FAILED "
+          + removeNewLines(((Exception) result)
+          .getMessage());
+        } else {
+          res = "FAILED " + result.toString();
+        }
+      }
+    }
+
+    fr.append(offset + "\t" + res + "\n");
+    fr.close();
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+  }
+
+  public static String removeNewLines(String str) {
+    String[] tmp = str.split("\n");
+    String result = "";
+    for (String a : tmp) {
+      result += " " + a;
+    }
+    return result;
+  }
+
+  public String getResportFileName(){
+    return this.getClass().toString()+".txt";
+  }
+}


Mime
View raw message