sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jar...@apache.org
Subject [1/3] SQOOP-931: Integrate HCatalog with Sqoop
Date Fri, 07 Jun 2013 14:34:34 GMT
Updated Branches:
  refs/heads/trunk b07906a2a -> 5e88d43b5


http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
new file mode 100644
index 0000000..77bafcc
--- /dev/null
+++ b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
@@ -0,0 +1,377 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hcat;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.SQLException;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.sqoop.hcat.HCatalogTestUtils.ColumnGenerator;
+import org.apache.sqoop.hcat.HCatalogTestUtils.CreateMode;
+import org.apache.sqoop.hcat.HCatalogTestUtils.KeyType;
+import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
+import org.junit.Before;
+
+import com.cloudera.sqoop.testutil.ExportJobTestCase;
+
+/**
+ * Test that we can export HCatalog tables into databases.
+ */
+public class HCatalogExportTest extends ExportJobTestCase {
+  private static final Log LOG =
+    LogFactory.getLog(HCatalogExportTest.class);
+  private HCatalogTestUtils utils = HCatalogTestUtils.instance();
+  @Before
+  @Override
+  public void setUp() {
+    super.setUp();
+    try {
+      utils.initUtils();
+    } catch (Exception e) {
+      throw new RuntimeException("Error initializing HCatTestUtilis", e);
+    }
+  }
+  /**
+   * @return an argv for the CodeGenTool to use when creating tables to export.
+   */
+  protected String[] getCodeGenArgv(String... extraArgs) {
+    List<String> codeGenArgv = new ArrayList<String>();
+
+    if (null != extraArgs) {
+      for (String arg : extraArgs) {
+        codeGenArgv.add(arg);
+      }
+    }
+
+    codeGenArgv.add("--table");
+    codeGenArgv.add(getTableName());
+    codeGenArgv.add("--connect");
+    codeGenArgv.add(getConnectString());
+    codeGenArgv.add("--hcatalog-table");
+    codeGenArgv.add(getTableName());
+
+    return codeGenArgv.toArray(new String[0]);
+  }
+
+  /**
+   * Verify that for the max and min values of the 'id' column, the values for a
+   * given column meet the expected values.
+   */
+  protected void assertColMinAndMax(String colName, ColumnGenerator generator)
+    throws SQLException {
+    Connection conn = getConnection();
+    int minId = getMinRowId(conn);
+    int maxId = getMaxRowId(conn);
+    String table = getTableName();
+    LOG.info("Checking min/max for column " + colName + " with type "
+      + SqoopHCatUtilities.sqlTypeString(generator.getSqlType()));
+
+    Object expectedMin = generator.getDBValue(minId);
+    Object expectedMax = generator.getDBValue(maxId);
+
+    utils.assertSqlColValForRowId(conn, table, minId, colName, expectedMin);
+    utils.assertSqlColValForRowId(conn, table, maxId, colName, expectedMax);
+  }
+
+  private void runHCatExport(List<String> addlArgsArray,
+    final int totalRecords, String table,
+    ColumnGenerator[] cols) throws Exception {
+    utils.createHCatTable(CreateMode.CREATE_AND_LOAD,
+      totalRecords, table, cols);
+    utils.createSqlTable(getConnection(), true, totalRecords, table, cols);
+    Map<String, String> addlArgsMap = utils.getAddlTestArgs();
+    addlArgsArray.add("--verbose");
+    addlArgsArray.add("-m");
+    addlArgsArray.add("1");
+    addlArgsArray.add("--hcatalog-table");
+    addlArgsArray.add(table);
+    String[] argv = {};
+
+    if (addlArgsMap.containsKey("-libjars")) {
+      argv = new String[2];
+      argv[0] = "-libjars";
+      argv[1] = addlArgsMap.get("-libjars");
+    }
+    for (String k : addlArgsMap.keySet()) {
+      if (!k.equals("-libjars")) {
+        addlArgsArray.add(k);
+        addlArgsArray.add(addlArgsMap.get(k));
+      }
+    }
+    String[] exportArgs = getArgv(true, 10, 10, newStrArray(argv,
+      addlArgsArray.toArray(new String[0])));
+    LOG.debug("Export args = " + Arrays.toString(exportArgs));
+    SqoopHCatUtilities.instance().setConfigured(false);
+    runExport(exportArgs);
+    verifyExport(totalRecords);
+    for (int i = 0; i < cols.length; i++) {
+      assertColMinAndMax(HCatalogTestUtils.forIdx(i), cols[i]);
+    }
+  }
+
+  public void testIntTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN,
+        Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "tinyint", Types.INTEGER, HCatFieldSchema.Type.INT, 10,
+        10, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "smallint", Types.INTEGER, HCatFieldSchema.Type.INT, 100,
+        100, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
+        "int", Types.INTEGER, HCatFieldSchema.Type.INT, 1000,
+        1000, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(4),
+        "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 10000L,
+        10000L, KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testFloatTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "float", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 10.0F,
+        10.F, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "real", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 20.0F,
+        20.0F, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "double", Types.DOUBLE, HCatFieldSchema.Type.DOUBLE, 30.0D,
+        30.0D, KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testNumberTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, "1000",
+        new BigDecimal("1000"), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, "2000",
+        new BigDecimal("2000"), KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testDateTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "date", Types.DATE, HCatFieldSchema.Type.STRING, "2013-12-31",
+        new Date(113, 11, 31), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "time", Types.TIME, HCatFieldSchema.Type.STRING, "10:11:12",
+        new Time(10, 11, 12), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.STRING,
+        "2013-12-31 10:11:12", new Timestamp(113, 11, 31, 10, 11, 12, 0),
+        KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testDateTypesToBigInt() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    long offset = TimeZone.getDefault().getRawOffset();
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "date", Types.DATE, HCatFieldSchema.Type.BIGINT, 0 - offset,
+        new Date(70, 0, 1), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "time", Types.TIME, HCatFieldSchema.Type.BIGINT, 36672000L - offset,
+        new Time(10, 11, 12), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.BIGINT,
+        36672000L - offset, new Timestamp(70, 0, 1, 10, 11, 12, 0),
+        KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--map-column-hive");
+    addlArgsArray.add("COL0=bigint,COL1=bigint,COL2=bigint");
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testStringTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "char(10)", Types.CHAR, HCatFieldSchema.Type.STRING, "string to test",
+        "string to test", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING,
+        "string to test", "string to test", KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+
+  public void testBinaryTypes() throws Exception {
+    ByteBuffer bb = ByteBuffer.wrap(new byte[] { 0, 1, 2 });
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "binary(10)", Types.BINARY, HCatFieldSchema.Type.BINARY,
+        bb.array(), bb.array(), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varbinary(10)", Types.BINARY, HCatFieldSchema.Type.BINARY,
+        bb.array(), bb.array(), KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testColumnProjection() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", null, KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--columns");
+    addlArgsArray.add("ID,MSG");
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+
+  }
+  public void testStaticPartitioning() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.STATIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testDynamicPartitioning() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.DYNAMIC_KEY),
+    };
+
+    List<String> addlArgsArray = new ArrayList<String>();
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testStaicAndDynamicPartitioning() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.STATIC_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "2", "2", KeyType.DYNAMIC_KEY),
+    };
+
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  /**
+   * Test other file formats.
+   */
+  public void testSequenceFile() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+            "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "1",
+            "1", KeyType.STATIC_KEY),
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+            "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "2",
+            "2", KeyType.DYNAMIC_KEY), };
+
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    utils.setStorageInfo(HCatalogTestUtils.STORED_AS_SEQFILE);
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+
+  public void testTextFile() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+            "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "1",
+            "1", KeyType.STATIC_KEY),
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+            "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "2",
+            "2", KeyType.DYNAMIC_KEY), };
+
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    utils.setStorageInfo(HCatalogTestUtils.STORED_AS_TEXT);
+    runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
new file mode 100644
index 0000000..293015e
--- /dev/null
+++ b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
@@ -0,0 +1,712 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hcat;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.sqoop.hcat.HCatalogTestUtils.ColumnGenerator;
+import org.apache.sqoop.hcat.HCatalogTestUtils.CreateMode;
+import org.apache.sqoop.hcat.HCatalogTestUtils.KeyType;
+import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
+import org.junit.Before;
+
+import com.cloudera.sqoop.Sqoop;
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import com.cloudera.sqoop.tool.ImportTool;
+import com.cloudera.sqoop.tool.SqoopTool;
+
+/**
+ * Test that we can export HCatalog tables into databases.
+ */
+public class HCatalogImportTest extends ImportJobTestCase {
+  private static final Log LOG =
+    LogFactory.getLog(HCatalogImportTest.class);
+  private final HCatalogTestUtils utils = HCatalogTestUtils.instance();
+  private List<String> extraTestArgs = null;
+  private List<String> configParams = null;
+
+  @Override
+  @Before
+  public void setUp() {
+    super.setUp();
+    try {
+      utils.initUtils();
+      extraTestArgs = new ArrayList<String>();
+      configParams = new ArrayList<String>();
+    } catch (Exception e) {
+      throw new RuntimeException("Error initializing HCatTestUtilis", e);
+    }
+  }
+
+  /**
+   * @return an argv for the CodeGenTool to use when creating tables to export.
+   */
+  protected String[] getCodeGenArgv(String... extraArgs) {
+    List<String> codeGenArgv = new ArrayList<String>();
+
+    if (null != extraArgs) {
+      for (String arg : extraArgs) {
+        codeGenArgv.add(arg);
+      }
+    }
+
+    codeGenArgv.add("--table");
+    codeGenArgv.add(getTableName());
+    codeGenArgv.add("--connect");
+    codeGenArgv.add(getConnectString());
+    codeGenArgv.add("--hcatalog-table");
+    codeGenArgv.add(getTableName());
+
+    return codeGenArgv.toArray(new String[0]);
+  }
+
+  private void setExtraArgs(List<String> args) {
+    extraTestArgs.clear();
+    if (args != null && args.size() > 0) {
+      extraTestArgs.addAll(args);
+    }
+  }
+
+  private List<String> getConfigParams() {
+    return configParams;
+  }
+
+  private void setConfigParams(List<String> params) {
+    configParams.clear();
+    if (params != null && params.size() > 0) {
+      configParams.addAll(params);
+    }
+  }
+  @Override
+  protected List<String> getExtraArgs(Configuration conf) {
+    List<String> addlArgsArray = new ArrayList<String>();
+    if (extraTestArgs != null && extraTestArgs.size() > 0) {
+      addlArgsArray.addAll(extraTestArgs);
+    }
+    Map<String, String> addlArgsMap = utils.getAddlTestArgs();
+    String[] argv = {};
+
+    if (addlArgsMap.containsKey("-libjars")) {
+      argv = new String[2];
+      argv[0] = "-libjars";
+      argv[1] = addlArgsMap.get("-libjars");
+    }
+    addlArgsArray.add("-m");
+    addlArgsArray.add("1");
+    addlArgsArray.add("--hcatalog-table");
+    addlArgsArray.add(getTableName());
+    for (String k : addlArgsMap.keySet()) {
+      if (!k.equals("-libjars")) {
+        addlArgsArray.add(k);
+        addlArgsArray.add(addlArgsMap.get(k));
+      }
+    }
+    return addlArgsArray;
+  }
+
+  @Override
+  protected String[] getArgv(boolean includeHadoopFlags, String[] colNames,
+    Configuration conf) {
+    if (null == colNames) {
+      colNames = getColNames();
+    }
+    String columnsString = "";
+    String splitByCol = null;
+    if (colNames != null) {
+      splitByCol = colNames[0];
+      for (String col : colNames) {
+        columnsString += col + ",";
+      }
+    }
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+    args.addAll(getConfigParams());
+    args.add("--table");
+    args.add(getTableName());
+    if (colNames != null) {
+      args.add("--columns");
+      args.add(columnsString);
+      args.add("--split-by");
+      args.add(splitByCol);
+    }
+    args.add("--hcatalog-table");
+    args.add(getTableName());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.addAll(getExtraArgs(conf));
+
+    return args.toArray(new String[0]);
+  }
+
+  private void validateHCatRecords(final List<HCatRecord> recs,
+    final HCatSchema schema, int expectedCount,
+    ColumnGenerator... cols) throws IOException {
+    if (recs.size() != expectedCount) {
+      fail("Expected records = " + expectedCount
+        + ", actual = " + recs.size());
+      return;
+    }
+    schema.getFieldNames();
+    Collections.sort(recs, new Comparator<HCatRecord>()
+    {
+      @Override
+      public int compare(HCatRecord hr1, HCatRecord hr2) {
+        try {
+          return hr1.getInteger("id", schema)
+            - hr2.getInteger("id", schema);
+        } catch (Exception e) {
+          LOG.warn("Exception caught while sorting hcat records " + e);
+        }
+        return 0;
+      }
+    });
+
+    Object expectedVal = null;
+    Object actualVal = null;
+    for (int i = 0; i < recs.size(); ++i) {
+      HCatRecord rec = recs.get(i);
+      expectedVal = i;
+      actualVal = rec.get("id", schema);
+      LOG.info("Validating field: id (expected = "
+        + expectedVal + ", actual = " + actualVal + ")");
+      HCatalogTestUtils.assertEquals(expectedVal, actualVal);
+      expectedVal = "textfield" + i;
+      actualVal = rec.get("msg", schema);
+      LOG.info("Validating field: msg (expected = "
+        + expectedVal + ", actual = " + actualVal + ")");
+      HCatalogTestUtils.assertEquals(rec.get("msg", schema), "textfield" + i);
+      for (ColumnGenerator col : cols) {
+        String name = col.getName().toLowerCase();
+        expectedVal = col.getHCatValue(i);
+        actualVal = rec.get(name, schema);
+        LOG.info("Validating field: " + name + " (expected = "
+          + expectedVal + ", actual = " + actualVal + ")");
+        HCatalogTestUtils.assertEquals(expectedVal, actualVal);
+      }
+    }
+  }
+
+  protected void runImport(SqoopTool tool, String[] argv) throws IOException {
+    // run the tool through the normal entry-point.
+    int ret;
+    try {
+      Configuration conf = getConf();
+      SqoopOptions opts = getSqoopOptions(conf);
+      Sqoop sqoop = new Sqoop(tool, conf, opts);
+      ret = Sqoop.runSqoop(sqoop, argv);
+    } catch (Exception e) {
+      LOG.error("Got exception running import: " + e.toString());
+      e.printStackTrace();
+      ret = 1;
+    }
+    if (0 != ret) {
+      throw new IOException("Import failure; return status " + ret);
+    }
+  }
+
+  private void runHCatImport(List<String> addlArgsArray,
+    int totalRecords, String table, ColumnGenerator[] cols,
+    String[] cNames) throws Exception {
+    runHCatImport(addlArgsArray, totalRecords, table, cols, cNames, false);
+  }
+
+  private void runHCatImport(List<String> addlArgsArray,
+    int totalRecords, String table, ColumnGenerator[] cols,
+    String[] cNames, boolean dontCreate) throws Exception {
+    CreateMode mode = CreateMode.CREATE;
+    if (dontCreate) {
+      mode = CreateMode.NO_CREATION;
+    }
+    HCatSchema tblSchema =
+      utils.createHCatTable(mode, totalRecords, table, cols);
+    utils.createSqlTable(getConnection(), false, totalRecords, table, cols);
+    Map<String, String> addlArgsMap = utils.getAddlTestArgs();
+    String[] argv = {};
+    addlArgsArray.add("-m");
+    addlArgsArray.add("1");
+    addlArgsArray.add("--hcatalog-table");
+    addlArgsArray.add(table);
+    if (addlArgsMap.containsKey("-libjars")) {
+      argv = new String[2];
+      argv[0] = "-libjars";
+      argv[1] = addlArgsMap.get("-libjars");
+    }
+    for (String k : addlArgsMap.keySet()) {
+      if (!k.equals("-libjars")) {
+        addlArgsArray.add(k);
+        addlArgsArray.add(addlArgsMap.get(k));
+      }
+    }
+    String[] colNames = null;
+    if (cNames != null) {
+      colNames = cNames;
+    } else {
+      colNames = new String[2 + cols.length];
+      colNames[0] = "ID";
+      colNames[1] = "MSG";
+      for (int i = 0; i < cols.length; ++i) {
+        colNames[2 + i] = cols[i].getName().toUpperCase();
+      }
+    }
+    String[] importArgs = getArgv(true, colNames, new Configuration());
+    LOG.debug("Import args = " + Arrays.toString(importArgs));
+    SqoopHCatUtilities.instance().setConfigured(false);
+    runImport(new ImportTool(), importArgs);
+    List<HCatRecord> recs = utils.readHCatRecords(null, table, null);
+    LOG.debug("HCat records ");
+    LOG.debug(utils.hCatRecordDump(recs, tblSchema));
+    validateHCatRecords(recs, tblSchema, 10, cols);
+  }
+
+  public void testIntTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN,
+        Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "tinyint", Types.INTEGER, HCatFieldSchema.Type.INT, 10,
+        10, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "smallint", Types.INTEGER, HCatFieldSchema.Type.INT, 100,
+        100, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
+        "int", Types.INTEGER, HCatFieldSchema.Type.INT, 1000,
+        1000, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(4),
+        "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 10000L,
+        10000L, KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testFloatTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "float", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 10.0F,
+        10.F, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "real", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 20.0F,
+        20.0F, KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "double", Types.DOUBLE, HCatFieldSchema.Type.DOUBLE, 30.0D,
+        30.0D, KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testNumberTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, "1000",
+        new BigDecimal("1000"), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, "2000",
+        new BigDecimal("2000"), KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testDateTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "date", Types.DATE, HCatFieldSchema.Type.STRING, "2013-12-31",
+        new Date(113, 11, 31), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "time", Types.TIME, HCatFieldSchema.Type.STRING, "10:11:12",
+        new Time(10, 11, 12), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.STRING,
+        "2013-12-31 10:11:12.0", new Timestamp(113, 11, 31, 10, 11, 12, 0),
+        KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testDateTypesToBigInt() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    long offset = TimeZone.getDefault().getRawOffset();
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "date", Types.DATE, HCatFieldSchema.Type.BIGINT, 0 - offset,
+        new Date(70, 0, 1), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "time", Types.TIME, HCatFieldSchema.Type.BIGINT, 36672000L - offset,
+        new Time(10, 11, 12), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
+        "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.BIGINT,
+        36672000L - offset, new Timestamp(70, 0, 1, 10, 11, 12, 0),
+        KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--map-column-hive");
+    addlArgsArray.add("COL0=bigint,COL1=bigint,COL2=bigint");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testStringTypes() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "char(10)", Types.CHAR, HCatFieldSchema.Type.STRING, "string to test",
+        "string to test", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING,
+        "string to test", "string to test", KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testBinaryTypes() throws Exception {
+    ByteBuffer bb = ByteBuffer.wrap(new byte[] { 0, 1, 2 });
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "binary(10)", Types.BINARY, HCatFieldSchema.Type.BINARY,
+        bb.array(), bb.array(), KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "longvarbinary", Types.BINARY, HCatFieldSchema.Type.BINARY,
+        bb.array(), bb.array(), KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testColumnProjection() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        null, null, KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    List<String> cfgParams = new ArrayList<String>();
+    cfgParams.add("-D");
+    cfgParams.add(SqoopHCatUtilities.DEBUG_HCAT_IMPORT_MAPPER_PROP
+      + "=true");
+    setConfigParams(cfgParams);
+    String[] colNames = new String[] { "ID", "MSG" };
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, colNames);
+  }
+
+  public void testColumnProjectionMissingPartKeys() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        null, null, KeyType.DYNAMIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    List<String> cfgParams = new ArrayList<String>();
+    cfgParams.add("-D");
+    cfgParams.add(SqoopHCatUtilities.DEBUG_HCAT_IMPORT_MAPPER_PROP
+      + "=true");
+    setConfigParams(cfgParams);
+    String[] colNames = new String[] { "ID", "MSG" };
+    try {
+      runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, colNames);
+      fail("Column projection with missing dynamic partition keys must fail");
+    } catch (Throwable t) {
+      LOG.info("Job fails as expected : " + t);
+      StringWriter sw = new StringWriter();
+      t.printStackTrace(new PrintWriter(sw));
+      LOG.info("Exception stack trace = " + sw);
+    }
+  }
+  public void testStaticPartitioning() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.STATIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testDynamicPartitioning() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.DYNAMIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testStaicAndDynamicPartitioning() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.STATIC_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "2", "2", KeyType.DYNAMIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  /**
+   * Test other file formats.
+   */
+  public void testSequenceFile() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+            "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "1",
+            "1", KeyType.STATIC_KEY),
+        HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+            "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "2",
+            "2", KeyType.DYNAMIC_KEY), };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    setExtraArgs(addlArgsArray);
+    utils.setStorageInfo(HCatalogTestUtils.STORED_AS_SEQFILE);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testTextFile() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.STATIC_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "2", "2", KeyType.DYNAMIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col0");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("1");
+    setExtraArgs(addlArgsArray);
+    utils.setStorageInfo(HCatalogTestUtils.STORED_AS_TEXT);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testTableCreation() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.STATIC_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "2", "2", KeyType.DYNAMIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--create-hcatalog-table");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true);
+  }
+
+  public void testTableCreationWithPartition() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "2", "2", KeyType.STATIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col1");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("2");
+    addlArgsArray.add("--create-hcatalog-table");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true);
+  }
+
+  public void testTableCreationWithStorageStanza() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "1", "1", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "2", "2", KeyType.STATIC_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-partition-key");
+    addlArgsArray.add("col1");
+    addlArgsArray.add("--hive-partition-value");
+    addlArgsArray.add("2");
+    addlArgsArray.add("--create-hcatalog-table");
+    addlArgsArray.add("--hcatalog-storage-stanza");
+    addlArgsArray.add(HCatalogTestUtils.STORED_AS_TEXT);
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true);
+  }
+
+  public void testHiveDropDelims() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "Test", "\u0001\n\rTest", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "Test2", "\u0001\r\nTest2", KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-drop-import-delims");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testHiveDelimsReplacement() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "^^^Test", "\u0001\n\rTest", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING,
+        "^^^Test2", "\u0001\r\nTest2", KeyType.NOT_A_KEY),
+    };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--hive-delims-replacement");
+    addlArgsArray.add("^");
+    setExtraArgs(addlArgsArray);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testDynamicKeyInMiddle() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "1",
+        "1", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "2",
+        "2", KeyType.DYNAMIC_KEY), };
+    List<String> addlArgsArray = new ArrayList<String>();
+    setExtraArgs(addlArgsArray);
+    utils.setStorageInfo(HCatalogTestUtils.STORED_AS_SEQFILE);
+    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
+  }
+
+  public void testCreateTableWithPreExistingTable() throws Exception {
+    final int TOTAL_RECORDS = 1 * 10;
+    String table = getTableName().toUpperCase();
+    ColumnGenerator[] cols = new ColumnGenerator[] {
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "1",
+        "1", KeyType.NOT_A_KEY),
+      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
+        "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, "2",
+        "2", KeyType.DYNAMIC_KEY), };
+    List<String> addlArgsArray = new ArrayList<String>();
+    addlArgsArray.add("--create-hcatalog-table");
+    setExtraArgs(addlArgsArray);
+    try {
+      // Precreate table
+      utils.createHCatTable(CreateMode.CREATE, TOTAL_RECORDS, table, cols);
+      runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true);
+      fail("HCatalog job with --create-hcatalog-table and pre-existing"
+        + " table should fail");
+    } catch (Exception e) {
+      LOG.debug("Caught expected exception while running "
+        + " create-hcatalog-table with pre-existing table test", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
new file mode 100644
index 0000000..ddae5f5
--- /dev/null
+++ b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
@@ -0,0 +1,855 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hcat;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.sqoop.config.ConfigurationConstants;
+import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
+import org.junit.Assert;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
+import com.cloudera.sqoop.testutil.CommonArgs;
+
+/**
+ * HCatalog common test utilities.
+ *
+ */
+public final class HCatalogTestUtils {
+  protected Configuration conf;
+  private static List<HCatRecord> recsToLoad = new ArrayList<HCatRecord>();
+  private static List<HCatRecord> recsRead = new ArrayList<HCatRecord>();
+  private static final Log LOG = LogFactory.getLog(HCatalogTestUtils.class);
+  private FileSystem fs;
+  private final SqoopHCatUtilities utils = SqoopHCatUtilities.instance();
+  private static final double DELTAVAL = 1e-10;
+  public static final String SQOOP_HCATALOG_TEST_ARGS =
+    "sqoop.hcatalog.test.args";
+  private final boolean initialized = false;
+  private static String storageInfo = null;
+  public static final String STORED_AS_RCFILE = "stored as\n\trcfile\n";
+  public static final String STORED_AS_SEQFILE = "stored as\n\tsequencefile\n";
+  public static final String STORED_AS_TEXT = "stored as\n\ttextfile\n";
+
+  private HCatalogTestUtils() {
+  }
+
+  private static final class Holder {
+    @SuppressWarnings("synthetic-access")
+    private static final HCatalogTestUtils INSTANCE = new HCatalogTestUtils();
+
+    private Holder() {
+    }
+  }
+
+  @SuppressWarnings("synthetic-access")
+  public static HCatalogTestUtils instance() {
+    return Holder.INSTANCE;
+  }
+
+  public void initUtils() throws IOException, MetaException {
+    if (initialized) {
+      return;
+    }
+    conf = new Configuration();
+    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
+      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
+    }
+    fs = FileSystem.get(conf);
+    fs.initialize(fs.getWorkingDirectory().toUri(), conf);
+    storageInfo = null;
+    SqoopHCatUtilities.setTestMode(true);
+  }
+
+  public static String getStorageInfo() {
+    if (null != storageInfo && storageInfo.length() > 0) {
+      return storageInfo;
+    } else {
+      return STORED_AS_RCFILE;
+    }
+  }
+
+  public void setStorageInfo(String info) {
+    storageInfo = info;
+  }
+
+  private static String getDropTableCmd(final String dbName,
+    final String tableName) {
+    return "DROP TABLE IF EXISTS " + dbName.toLowerCase() + "."
+      + tableName.toLowerCase();
+  }
+
+  private static String getHCatCreateTableCmd(String dbName,
+    String tableName, List<HCatFieldSchema> tableCols,
+    List<HCatFieldSchema> partKeys) {
+    StringBuilder sb = new StringBuilder();
+    sb.append("create table ").append(dbName.toLowerCase()).append('.');
+    sb.append(tableName.toLowerCase()).append(" (\n\t");
+    for (int i = 0; i < tableCols.size(); ++i) {
+      HCatFieldSchema hfs = tableCols.get(i);
+      if (i > 0) {
+        sb.append(",\n\t");
+      }
+      sb.append(hfs.getName().toLowerCase());
+      sb.append(' ').append(hfs.getTypeString());
+    }
+    sb.append(")\n");
+    if (partKeys != null && partKeys.size() > 0) {
+      sb.append("partitioned by (\n\t");
+      for (int i = 0; i < partKeys.size(); ++i) {
+        HCatFieldSchema hfs = partKeys.get(i);
+        if (i > 0) {
+          sb.append("\n\t,");
+        }
+        sb.append(hfs.getName().toLowerCase());
+        sb.append(' ').append(hfs.getTypeString());
+      }
+      sb.append(")\n");
+    }
+    sb.append(getStorageInfo());
+    LOG.info("Create table command : " + sb);
+    return sb.toString();
+  }
+
+  /**
+   * The record writer mapper for HCatalog tables that writes records from an in
+   * memory list.
+   */
+  public void createHCatTableUsingSchema(String dbName,
+    String tableName, List<HCatFieldSchema> tableCols,
+    List<HCatFieldSchema> partKeys)
+    throws Exception {
+
+    String databaseName = dbName == null
+      ? SqoopHCatUtilities.DEFHCATDB : dbName;
+    LOG.info("Dropping HCatalog table if it exists " + databaseName
+      + '.' + tableName);
+    String dropCmd = getDropTableCmd(databaseName, tableName);
+
+    try {
+      utils.launchHCatCli(dropCmd);
+    } catch (Exception e) {
+      LOG.debug("Drop hcatalog table exception : " + e);
+      LOG.info("Unable to drop table." + dbName + "."
+        + tableName + ".   Assuming it did not exist");
+    }
+    LOG.info("Creating HCatalog table if it exists " + databaseName
+      + '.' + tableName);
+    String createCmd = getHCatCreateTableCmd(databaseName, tableName,
+      tableCols, partKeys);
+    utils.launchHCatCli(createCmd);
+    LOG.info("Created HCatalog table " + dbName + "." + tableName);
+  }
+
+  /**
+   * The record writer mapper for HCatalog tables that writes records from an in
+   * memory list.
+   */
+  public static class HCatWriterMapper extends
+    Mapper<LongWritable, Text, BytesWritable, HCatRecord> {
+
+    private static int writtenRecordCount = 0;
+
+    public static int getWrittenRecordCount() {
+      return writtenRecordCount;
+    }
+
+    public static void setWrittenRecordCount(int count) {
+      HCatWriterMapper.writtenRecordCount = count;
+    }
+
+    @Override
+    public void map(LongWritable key, Text value,
+      Context context)
+      throws IOException, InterruptedException {
+      try {
+        HCatRecord rec = recsToLoad.get(writtenRecordCount);
+        context.write(null, rec);
+        writtenRecordCount++;
+      } catch (Exception e) {
+        if (LOG.isDebugEnabled()) {
+          e.printStackTrace(System.err);
+        }
+        throw new IOException(e);
+      }
+    }
+  }
+
+  /**
+   * The record reader mapper for HCatalog tables that reads records into an in
+   * memory list.
+   */
+  public static class HCatReaderMapper extends
+    Mapper<WritableComparable, HCatRecord, BytesWritable, Text> {
+
+    private static int readRecordCount = 0; // test will be in local mode
+
+    public static int getReadRecordCount() {
+      return readRecordCount;
+    }
+
+    public static void setReadRecordCount(int count) {
+      HCatReaderMapper.readRecordCount = count;
+    }
+
+    @Override
+    public void map(WritableComparable key, HCatRecord value,
+      Context context) throws IOException, InterruptedException {
+      try {
+        recsRead.add(value);
+        readRecordCount++;
+      } catch (Exception e) {
+        if (LOG.isDebugEnabled()) {
+          e.printStackTrace(System.err);
+        }
+        throw new IOException(e);
+      }
+    }
+  }
+
+  private void createInputFile(Path path, int rowCount)
+    throws IOException {
+    if (fs.exists(path)) {
+      fs.delete(path, true);
+    }
+    FSDataOutputStream os = fs.create(path);
+    for (int i = 0; i < rowCount; i++) {
+      String s = i + "\n";
+      os.writeChars(s);
+    }
+    os.close();
+  }
+
+  public List<HCatRecord> loadHCatTable(String dbName,
+    String tableName, Map<String, String> partKeyMap,
+    HCatSchema tblSchema, List<HCatRecord> records)
+    throws Exception {
+
+    Job job = new Job(conf, "HCat load job");
+
+    job.setJarByClass(this.getClass());
+    job.setMapperClass(HCatWriterMapper.class);
+
+
+    // Just writ 10 lines to the file to drive the mapper
+    Path path = new Path(fs.getWorkingDirectory(),
+      "mapreduce/HCatTableIndexInput");
+
+    job.getConfiguration()
+      .setInt(ConfigurationConstants.PROP_MAPRED_MAP_TASKS, 1);
+    int writeCount = records.size();
+    recsToLoad.clear();
+    recsToLoad.addAll(records);
+    createInputFile(path, writeCount);
+    // input/output settings
+    HCatWriterMapper.setWrittenRecordCount(0);
+
+    FileInputFormat.setInputPaths(job, path);
+    job.setInputFormatClass(TextInputFormat.class);
+    job.setOutputFormatClass(HCatOutputFormat.class);
+    OutputJobInfo outputJobInfo = OutputJobInfo.create(dbName, tableName,
+      partKeyMap);
+
+    HCatOutputFormat.setOutput(job, outputJobInfo);
+    HCatOutputFormat.setSchema(job, tblSchema);
+    job.setMapOutputKeyClass(BytesWritable.class);
+    job.setMapOutputValueClass(DefaultHCatRecord.class);
+
+    job.setNumReduceTasks(0);
+    SqoopHCatUtilities.addJars(job, new SqoopOptions());
+    boolean success = job.waitForCompletion(true);
+
+    if (!success) {
+      throw new IOException("Loading HCatalog table with test records failed");
+    }
+    utils.invokeOutputCommitterForLocalMode(job);
+    LOG.info("Loaded " + HCatWriterMapper.writtenRecordCount + " records");
+    return recsToLoad;
+  }
+
+  /**
+   * Run a local map reduce job to read records from HCatalog table.
+   * @param readCount
+   * @param filter
+   * @return
+   * @throws Exception
+   */
+  public List<HCatRecord> readHCatRecords(String dbName,
+    String tableName, String filter) throws Exception {
+
+    HCatReaderMapper.setReadRecordCount(0);
+    recsRead.clear();
+
+    // Configuration conf = new Configuration();
+    Job job = new Job(conf, "HCatalog reader job");
+    job.setJarByClass(this.getClass());
+    job.setMapperClass(HCatReaderMapper.class);
+    job.getConfiguration()
+      .setInt(ConfigurationConstants.PROP_MAPRED_MAP_TASKS, 1);
+    // input/output settings
+    job.setInputFormatClass(HCatInputFormat.class);
+    job.setOutputFormatClass(TextOutputFormat.class);
+
+    HCatInputFormat.setInput(job, dbName, tableName).setFilter(filter);
+
+    job.setMapOutputKeyClass(BytesWritable.class);
+    job.setMapOutputValueClass(Text.class);
+
+    job.setNumReduceTasks(0);
+
+    Path path = new Path(fs.getWorkingDirectory(),
+      "mapreduce/HCatTableIndexOutput");
+    if (fs.exists(path)) {
+      fs.delete(path, true);
+    }
+
+    FileOutputFormat.setOutputPath(job, path);
+
+    job.waitForCompletion(true);
+    LOG.info("Read " + HCatReaderMapper.readRecordCount + " records");
+
+    return recsRead;
+  }
+
+  /**
+   * An enumeration type to hold the partition key type of the ColumnGenerator
+   * defined columns.
+   */
+  public enum KeyType {
+    NOT_A_KEY,
+    STATIC_KEY,
+    DYNAMIC_KEY
+  };
+
+  /**
+   * An enumeration type to hold the creation mode of the HCatalog table.
+   */
+  public enum CreateMode {
+    NO_CREATION,
+    CREATE,
+    CREATE_AND_LOAD,
+  };
+
+  /**
+   * When generating data for export tests, each column is generated according
+   * to a ColumnGenerator.
+   */
+  public interface ColumnGenerator {
+    /*
+     * The column name
+     */
+    String getName();
+
+    /**
+     * For a row with id rowNum, what should we write into that HCatalog column
+     * to export?
+     */
+    Object getHCatValue(int rowNum);
+
+    /**
+     * For a row with id rowNum, what should the database return for the given
+     * column's value?
+     */
+    Object getDBValue(int rowNum);
+
+    /** Return the column type to put in the CREATE TABLE statement. */
+    String getDBTypeString();
+
+    /** Return the SqlType for this column. */
+    int getSqlType();
+
+    /** Return the HCat type for this column. */
+    HCatFieldSchema.Type getHCatType();
+
+
+    /**
+     * If the field is a partition key, then whether is part of the static
+     * partitioning specification in imports or exports. Only one key can be a
+     * static partitioning key. After the first column marked as static, rest of
+     * the keys will be considered dynamic even if they are marked static.
+     */
+    KeyType getKeyType();
+  }
+
+  /**
+   * Return the column name for a column index. Each table contains two columns
+   * named 'id' and 'msg', and then an arbitrary number of additional columns
+   * defined by ColumnGenerators. These columns are referenced by idx 0, 1, 2
+   * and on.
+   * @param idx
+   *          the index of the ColumnGenerator in the array passed to
+   *          createTable().
+   * @return the name of the column
+   */
+  public static String forIdx(int idx) {
+    return "col" + idx;
+  }
+
+  public static ColumnGenerator colGenerator(final String name,
+    final String dbType, final int sqlType,
+    final HCatFieldSchema.Type hCatType, final Object hCatValue,
+    final Object dbValue, final KeyType keyType) {
+    return new ColumnGenerator() {
+
+      @Override
+      public String getName() {
+        return name;
+      }
+
+      @Override
+      public Object getDBValue(int rowNum) {
+        return dbValue;
+      }
+
+      @Override
+      public Object getHCatValue(int rowNum) {
+        return hCatValue;
+      }
+
+      @Override
+      public String getDBTypeString() {
+        return dbType;
+      }
+
+      @Override
+      public int getSqlType() {
+        return sqlType;
+      }
+
+      @Override
+      public HCatFieldSchema.Type getHCatType() {
+        return hCatType;
+      }
+
+      public KeyType getKeyType() {
+        return keyType;
+      }
+
+    };
+  }
+
+  public static void assertEquals(Object expectedVal,
+    Object actualVal) {
+
+    if (expectedVal != null && expectedVal instanceof byte[]) {
+      Assert
+        .assertArrayEquals((byte[]) expectedVal, (byte[]) actualVal);
+    } else {
+      if (expectedVal instanceof Float) {
+        if (actualVal instanceof Double) {
+          Assert.assertEquals(((Float) expectedVal).floatValue(),
+            ((Double) actualVal).doubleValue(), DELTAVAL);
+        } else {
+          Assert
+            .assertEquals("Got unexpected column value", expectedVal,
+              actualVal);
+        }
+      } else if (expectedVal instanceof Double) {
+        if (actualVal instanceof Float) {
+          Assert.assertEquals(((Double) expectedVal).doubleValue(),
+            ((Float) actualVal).doubleValue(), DELTAVAL);
+        } else {
+          Assert
+            .assertEquals("Got unexpected column value", expectedVal,
+              actualVal);
+        }
+      } else {
+        Assert
+          .assertEquals("Got unexpected column value", expectedVal,
+            actualVal);
+      }
+    }
+  }
+
+  /**
+   * Verify that on a given row, a column has a given value.
+   *
+   * @param id
+   *          the id column specifying the row to test.
+   */
+  public void assertSqlColValForRowId(Connection conn,
+    String table, int id, String colName,
+    Object expectedVal) throws SQLException {
+    LOG.info("Verifying column " + colName + " has value " + expectedVal);
+
+    PreparedStatement statement = conn.prepareStatement(
+      "SELECT " + colName + " FROM " + table + " WHERE id = " + id,
+      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    Object actualVal = null;
+    try {
+      ResultSet rs = statement.executeQuery();
+      try {
+        rs.next();
+        actualVal = rs.getObject(1);
+      } finally {
+        rs.close();
+      }
+    } finally {
+      statement.close();
+    }
+
+    assertEquals(expectedVal, actualVal);
+  }
+
+  /**
+   * Verify that on a given row, a column has a given value.
+   *
+   * @param id
+   *          the id column specifying the row to test.
+   */
+  public static void assertHCatColValForRowId(List<HCatRecord> recs,
+    HCatSchema schema, int id, String fieldName,
+    Object expectedVal) throws IOException {
+    LOG.info("Verifying field " + fieldName + " has value " + expectedVal);
+
+    Object actualVal = null;
+    for (HCatRecord rec : recs) {
+      if (rec.getInteger("id", schema).equals(id)) {
+        actualVal = rec.get(fieldName, schema);
+        break;
+      }
+    }
+    if (actualVal == null) {
+      throw new IOException("No record found with id = " + id);
+    }
+    if (expectedVal != null && expectedVal instanceof byte[]) {
+      Assert
+        .assertArrayEquals((byte[]) expectedVal, (byte[]) actualVal);
+    } else {
+      if (expectedVal instanceof Float) {
+        if (actualVal instanceof Double) {
+          Assert.assertEquals(((Float) expectedVal).floatValue(),
+            ((Double) actualVal).doubleValue(), DELTAVAL);
+        } else {
+          Assert
+            .assertEquals("Got unexpected column value", expectedVal,
+              actualVal);
+        }
+      } else if (expectedVal instanceof Double) {
+        if (actualVal instanceof Float) {
+          Assert.assertEquals(((Double) expectedVal).doubleValue(),
+            ((Float) actualVal).doubleValue(), DELTAVAL);
+        } else {
+          Assert
+            .assertEquals("Got unexpected column value", expectedVal,
+              actualVal);
+        }
+      } else {
+        Assert
+          .assertEquals("Got unexpected column value", expectedVal,
+            actualVal);
+      }
+    }
+  }
+
+  /**
+   * Return a SQL statement that drops a table, if it exists.
+   *
+   * @param tableName
+   *          the table to drop.
+   * @return the SQL statement to drop that table.
+   */
+  public static String getSqlDropTableStatement(String tableName) {
+    return "DROP TABLE " + tableName + " IF EXISTS";
+  }
+
+  public static String getSqlCreateTableStatement(String tableName,
+    ColumnGenerator... extraCols) {
+    StringBuilder sb = new StringBuilder();
+    sb.append("CREATE TABLE ");
+    sb.append(tableName);
+    sb.append(" (id INT NOT NULL PRIMARY KEY, msg VARCHAR(64)");
+    int colNum = 0;
+    for (ColumnGenerator gen : extraCols) {
+      sb.append(", " + forIdx(colNum++) + " " + gen.getDBTypeString());
+    }
+    sb.append(")");
+    String cmd = sb.toString();
+    LOG.debug("Generated SQL create table command : " + cmd);
+    return cmd;
+  }
+
+  public static String getSqlInsertTableStatement(String tableName,
+    ColumnGenerator... extraCols) {
+    StringBuilder sb = new StringBuilder();
+    sb.append("INSERT INTO ");
+    sb.append(tableName);
+    sb.append(" (id, msg");
+    int colNum = 0;
+    for (ColumnGenerator gen : extraCols) {
+      sb.append(", " + forIdx(colNum++));
+    }
+    sb.append(") VALUES ( ?, ?");
+    for (int i = 0; i < extraCols.length; ++i) {
+      sb.append(",?");
+    }
+    sb.append(")");
+    String s = sb.toString();
+    LOG.debug("Generated SQL insert table command : " + s);
+    return s;
+  }
+
+  public void createSqlTable(Connection conn, boolean generateOnly,
+    int count, String table, ColumnGenerator... extraCols)
+    throws Exception {
+    PreparedStatement statement = conn.prepareStatement(
+      getSqlDropTableStatement(table),
+      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+    statement = conn.prepareStatement(
+      getSqlCreateTableStatement(table, extraCols),
+      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+    if (!generateOnly) {
+      loadSqlTable(conn, table, count, extraCols);
+    }
+  }
+
+  public HCatSchema createHCatTable(CreateMode mode, int count,
+    String table, ColumnGenerator... extraCols)
+    throws Exception {
+    HCatSchema hCatTblSchema = generateHCatTableSchema(extraCols);
+    HCatSchema hCatPartSchema = generateHCatPartitionSchema(extraCols);
+    HCatSchema hCatFullSchema = new HCatSchema(hCatTblSchema.getFields());
+    for (HCatFieldSchema hfs : hCatPartSchema.getFields()) {
+      hCatFullSchema.append(hfs);
+    }
+    if (mode != CreateMode.NO_CREATION) {
+
+      createHCatTableUsingSchema(null, table,
+        hCatTblSchema.getFields(), hCatPartSchema.getFields());
+      if (mode == CreateMode.CREATE_AND_LOAD) {
+        HCatSchema hCatLoadSchema = new HCatSchema(hCatTblSchema.getFields());
+        HCatSchema dynPartSchema =
+          generateHCatDynamicPartitionSchema(extraCols);
+        for (HCatFieldSchema hfs : dynPartSchema.getFields()) {
+          hCatLoadSchema.append(hfs);
+        }
+        loadHCatTable(hCatLoadSchema, table, count, extraCols);
+      }
+    }
+    return hCatFullSchema;
+  }
+
+  private void loadHCatTable(HCatSchema hCatSchema, String table,
+    int count, ColumnGenerator... extraCols)
+    throws Exception {
+    Map<String, String> staticKeyMap = new HashMap<String, String>();
+    for (ColumnGenerator col : extraCols) {
+      if (col.getKeyType() == KeyType.STATIC_KEY) {
+        staticKeyMap.put(col.getName(), (String) col.getHCatValue(0));
+      }
+    }
+    loadHCatTable(null, table, staticKeyMap,
+      hCatSchema, generateHCatRecords(count, hCatSchema, extraCols));
+  }
+
+  private void loadSqlTable(Connection conn, String table, int count,
+    ColumnGenerator... extraCols) throws Exception {
+    PreparedStatement statement = conn.prepareStatement(
+      getSqlInsertTableStatement(table, extraCols),
+      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      for (int i = 0; i < count; ++i) {
+        statement.setObject(1, i, Types.INTEGER);
+        statement.setObject(2, "textfield" + i, Types.VARCHAR);
+        for (int j = 0; j < extraCols.length; ++j) {
+          statement.setObject(j + 3, extraCols[j].getDBValue(i),
+            extraCols[j].getSqlType());
+        }
+        statement.executeUpdate();
+      }
+      if (!conn.getAutoCommit()) {
+        conn.commit();
+      }
+    } finally {
+      statement.close();
+    }
+  }
+
+  private HCatSchema generateHCatTableSchema(ColumnGenerator... extraCols)
+    throws Exception {
+    List<HCatFieldSchema> hCatTblCols = new ArrayList<HCatFieldSchema>();
+    hCatTblCols.clear();
+    hCatTblCols.add(new HCatFieldSchema("id", HCatFieldSchema.Type.INT, ""));
+    hCatTblCols
+      .add(new HCatFieldSchema("msg", HCatFieldSchema.Type.STRING, ""));
+    for (ColumnGenerator gen : extraCols) {
+      if (gen.getKeyType() == KeyType.NOT_A_KEY) {
+        hCatTblCols
+          .add(new HCatFieldSchema(gen.getName(), gen.getHCatType(), ""));
+      }
+    }
+    HCatSchema hCatTblSchema = new HCatSchema(hCatTblCols);
+    return hCatTblSchema;
+  }
+
+  private HCatSchema generateHCatPartitionSchema(ColumnGenerator... extraCols)
+    throws Exception {
+    List<HCatFieldSchema> hCatPartCols = new ArrayList<HCatFieldSchema>();
+
+    for (ColumnGenerator gen : extraCols) {
+      if (gen.getKeyType() != KeyType.NOT_A_KEY) {
+        hCatPartCols
+          .add(new HCatFieldSchema(gen.getName(), gen.getHCatType(), ""));
+      }
+    }
+    HCatSchema hCatPartSchema = new HCatSchema(hCatPartCols);
+    return hCatPartSchema;
+  }
+
+  private HCatSchema generateHCatDynamicPartitionSchema(
+    ColumnGenerator... extraCols) throws Exception {
+    List<HCatFieldSchema> hCatPartCols = new ArrayList<HCatFieldSchema>();
+    hCatPartCols.clear();
+    boolean staticFound = false;
+    for (ColumnGenerator gen : extraCols) {
+      if (gen.getKeyType() != KeyType.NOT_A_KEY) {
+        if (gen.getKeyType() == KeyType.STATIC_KEY && !staticFound) {
+          staticFound = true;
+          continue;
+        }
+        hCatPartCols
+          .add(new HCatFieldSchema(gen.getName(), gen.getHCatType(), ""));
+      }
+    }
+    HCatSchema hCatPartSchema = new HCatSchema(hCatPartCols);
+    return hCatPartSchema;
+
+  }
+
+  private HCatSchema generateHCatStaticPartitionSchema(
+    ColumnGenerator... extraCols) throws Exception {
+    List<HCatFieldSchema> hCatPartCols = new ArrayList<HCatFieldSchema>();
+    hCatPartCols.clear();
+    for (ColumnGenerator gen : extraCols) {
+      if (gen.getKeyType() == KeyType.STATIC_KEY) {
+        hCatPartCols
+          .add(new HCatFieldSchema(gen.getName(), gen.getHCatType(), ""));
+        break;
+      }
+    }
+    HCatSchema hCatPartSchema = new HCatSchema(hCatPartCols);
+    return hCatPartSchema;
+  }
+
+  private List<HCatRecord> generateHCatRecords(int numRecords,
+    HCatSchema hCatTblSchema, ColumnGenerator... extraCols) throws Exception {
+    List<HCatRecord> records = new ArrayList<HCatRecord>();
+    List<HCatFieldSchema> hCatTblCols = hCatTblSchema.getFields();
+    int size = hCatTblCols.size();
+    for (int i = 0; i < numRecords; ++i) {
+      DefaultHCatRecord record = new DefaultHCatRecord(size);
+      record.set(hCatTblCols.get(0).getName(), hCatTblSchema, i);
+      record.set(hCatTblCols.get(1).getName(), hCatTblSchema, "textfield" + i);
+      boolean staticFound = false;
+      int idx = 0;
+      for (int j = 0; j < extraCols.length; ++j) {
+        if (extraCols[j].getKeyType() == KeyType.STATIC_KEY
+          && !staticFound) {
+          staticFound = true;
+          continue;
+        }
+        record.set(hCatTblCols.get(idx + 2).getName(), hCatTblSchema,
+          extraCols[j].getHCatValue(i));
+        ++idx;
+      }
+
+      records.add(record);
+    }
+    return records;
+  }
+
+  public String hCatRecordDump(List<HCatRecord> recs,
+    HCatSchema schema) throws Exception {
+    List<String> fields = schema.getFieldNames();
+    int count = 0;
+    StringBuilder sb = new StringBuilder(1024);
+    for (HCatRecord rec : recs) {
+      sb.append("HCat Record : " + ++count).append('\n');
+      for (String field : fields) {
+        sb.append('\t').append(field).append('=');
+        sb.append(rec.get(field, schema)).append('\n');
+        sb.append("\n\n");
+      }
+    }
+    return sb.toString();
+  }
+
+  public Map<String, String> getAddlTestArgs() {
+    String addlArgs = System.getProperty(SQOOP_HCATALOG_TEST_ARGS);
+    Map<String, String> addlArgsMap = new HashMap<String, String>();
+    if (addlArgs != null) {
+      String[] argsArray = addlArgs.split(",");
+      for (String s : argsArray) {
+        String[] keyVal = s.split("=");
+        if (keyVal.length == 2) {
+          addlArgsMap.put(keyVal[0], keyVal[1]);
+        } else {
+          LOG.info("Ignoring malformed addl arg " + s);
+        }
+      }
+    }
+    return addlArgsMap;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
new file mode 100644
index 0000000..da803d0
--- /dev/null
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -0,0 +1,251 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hcat;
+
+import junit.framework.TestCase;
+
+import org.junit.Before;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.tool.ExportTool;
+import com.cloudera.sqoop.tool.ImportTool;
+
+/**
+ * Test basic HCatalog related features.
+ */
+public class TestHCatalogBasic extends TestCase {
+  private static ImportTool importTool;
+  private static ExportTool exportTool;
+
+  @Before
+  @Override
+  public void setUp() {
+    importTool = new ImportTool();
+    exportTool = new ExportTool();
+  }
+  private SqoopOptions parseImportArgs(String[] argv) throws Exception {
+    SqoopOptions opts = importTool.parseArguments(argv, null, null, false);
+    return opts;
+  }
+
+  private SqoopOptions parseExportArgs(String[] argv) throws Exception {
+    SqoopOptions opts = exportTool.parseArguments(argv, null, null, false);
+    return opts;
+  }
+
+  public void testHCatalogHomeWithImport() throws Exception {
+    String[] args = {
+      "--hcatalog-home",
+      "/usr/lib/hcatalog",
+    };
+
+    SqoopOptions opts = parseImportArgs(args);
+  }
+
+  public void testHCatalogHomeWithExport() throws Exception {
+    String[] args = {
+      "--hcatalog-home",
+      "/usr/lib/hcatalog",
+    };
+
+    SqoopOptions opts = parseExportArgs(args);
+  }
+
+  public void testHCatalogImport() throws Exception {
+    String[] args = {
+      "--hcatalog-table",
+      "table",
+    };
+
+    SqoopOptions opts = parseImportArgs(args);
+  }
+
+  public void testHCatalogExport() throws Exception {
+    String[] args = {
+      "--hcatalog-table",
+      "table",
+    };
+
+    SqoopOptions opts = parseExportArgs(args);
+  }
+
+  public void testHCatImportWithTargetDir() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--target-dir",
+      "/target/dir",
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testHCatImportWithWarehouseDir() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--warehouse-dir",
+      "/target/dir",
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testHCatImportWithHiveImport() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--hive-import",
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testHCatExportWithExportDir() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--export-dir",
+      "/export/dir",
+    };
+    try {
+      SqoopOptions opts = parseExportArgs(args);
+      exportTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testHCatImportWithDirect() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--direct",
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testHCatImportWithSequenceFile() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--as-sequencefile"
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testHCatImportWithAvroFile() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--as-avrofile"
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+  public void testHCatImportWithCreateTable() throws Exception {
+    String[] args = {
+      "--hcatalog-table",
+      "table",
+      "--create-hcatalog-table",
+    };
+    SqoopOptions opts = parseImportArgs(args);
+  }
+
+  public void testHCatImportWithStorageStanza() throws Exception {
+    String[] args = {
+      "--hcatalog-table",
+      "table",
+      "--hcatalog-storage-stanza",
+      "stored as textfile",
+    };
+    SqoopOptions opts = parseImportArgs(args);
+  }
+
+  public void testHCatImportWithDatabase() throws Exception {
+    String[] args = {
+      "--hcatalog-table",
+      "table",
+      "--hcatalog-database",
+      "default",
+    };
+    SqoopOptions opts = parseImportArgs(args);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/testdata/hcatalog/conf/hive-log4j.properties
----------------------------------------------------------------------
diff --git a/testdata/hcatalog/conf/hive-log4j.properties b/testdata/hcatalog/conf/hive-log4j.properties
new file mode 100644
index 0000000..7fa0546
--- /dev/null
+++ b/testdata/hcatalog/conf/hive-log4j.properties
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+
+sqoop.root.logger=DEBUG,console,DRFA
+hive.root.logger=DEBUG,console,DRFA
+hcatalog.root.logger=DEBUG,console,DRFA
+sqoop.log.dir=${user.dir}/sqoop/logs
+hive.log.dir=${user.dir}/sqoop/logs/
+sqoop.log.file=sqoop.log
+hive.log.file=hive.log
+org.apache.sqoop=DEBUG, console
+org.apache.hadoop=DEBUG, console
+org.apache.hive=DEBUG, console
+org.apache.hcatalog=DEBUG, console
+
+# Define the root logger to the system property "sqoop.root.logger".
+log4j.rootLogger=${sqoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=WARN
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
+
+
+log4j.category.DataNucleus=INFO,DRFA
+log4j.category.Datastore=INFO,DRFA
+log4j.category.Datastore.Schema=INFO,DRFA
+log4j.category.JPOX.Datastore=INFO,DRFA
+log4j.category.JPOX.Plugin=INFO,DRFA
+log4j.category.JPOX.MetaData=INFO,DRFA
+log4j.category.JPOX.Query=INFO,DRFA
+log4j.category.JPOX.General=INFO,DRFA
+log4j.category.JPOX.Enhancer=INFO,DRFA
+log4j.logger.org.apache.hadoop.conf.Configuration=INFO,DRFA
+

http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/testdata/hcatalog/conf/hive-site.xml
----------------------------------------------------------------------
diff --git a/testdata/hcatalog/conf/hive-site.xml b/testdata/hcatalog/conf/hive-site.xml
new file mode 100644
index 0000000..c84af28
--- /dev/null
+++ b/testdata/hcatalog/conf/hive-site.xml
@@ -0,0 +1,26 @@
+<configuration>
+  <property>
+    <name>hive.metastore.local</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${test.build.data}/sqoop/warehouse</value>
+  </property>
+  <property>
+    <name>hive.metastore.uris</name>
+    <value></value>
+  </property>
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=${test.build.data}/sqoop/metastore_db;create=true</value>
+  </property>
+  <property>
+    <name>javax.jdo.option.ConnectionDriverName</name>
+    <value>org.apache.derby.jdbc.EmbeddedDriver</value>
+  </property>
+  <property>
+    <name>hive.querylog.location</name>
+    <value>${test.build.data}/sqoop/logs</value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/sqoop/blob/5e88d43b/testdata/hcatalog/conf/log4j.properties
----------------------------------------------------------------------
diff --git a/testdata/hcatalog/conf/log4j.properties b/testdata/hcatalog/conf/log4j.properties
new file mode 100644
index 0000000..370fbfa
--- /dev/null
+++ b/testdata/hcatalog/conf/log4j.properties
@@ -0,0 +1,55 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.sqoop=DEBUG, console
+org.apache.hadoop=DEBUG, console
+org.apache.hive=DEBUG, console
+org.apache.hcatalog=DEBUG, console
+
+
+sqoop.root.logger=DEBUG,console,DRFA
+hive.root.logger=DEBUG,console,DRFA
+hcatalog.root.logger=DEBUG,console,DRFA
+sqoop.log.dir=${user.dir}/sqoop/logs
+sqoop.log.file=sqoop.log
+
+
+
+# Define the root logger to the system property "sqoop.root.logger".
+log4j.rootLogger=${sqoop.root.logger}
+
+#
+# DRFA
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${sqoop.log.dir}/${sqoop.log.file}
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+log4j.appender.DRFA.layout.ConversionPattern=%d (%t) [%p - %l] %m%n
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n


Mime
View raw message