sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject sqoop git commit: SQOOP-2201: Sqoop2: Add possibility to read Hadoop configuration files to HFDS connector
Date Tue, 17 Mar 2015 05:44:37 GMT
Repository: sqoop
Updated Branches:
  refs/heads/sqoop2 639fdbe0a -> 6ca31c505


SQOOP-2201: Sqoop2: Add possibility to read Hadoop configuration files to HFDS connector

(Jarek Jarcec Cecho via Abraham Elmahrek)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/6ca31c50
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/6ca31c50
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/6ca31c50

Branch: refs/heads/sqoop2
Commit: 6ca31c5054331d74ba9b099c87a33bf8c081afb1
Parents: 639fdbe
Author: Abraham Elmahrek <abe@apache.org>
Authored: Mon Mar 16 22:43:23 2015 -0700
Committer: Abraham Elmahrek <abe@apache.org>
Committed: Mon Mar 16 22:43:23 2015 -0700

----------------------------------------------------------------------
 .../sqoop/connector/hdfs/HdfsExtractor.java     |  9 +--
 .../connector/hdfs/HdfsFromInitializer.java     |  7 +-
 .../apache/sqoop/connector/hdfs/HdfsLoader.java |  4 +-
 .../sqoop/connector/hdfs/HdfsPartitioner.java   |  5 +-
 .../sqoop/connector/hdfs/HdfsToInitializer.java |  7 +-
 .../apache/sqoop/connector/hdfs/HdfsUtils.java  | 61 ++++++++++++++++
 .../sqoop/connector/hdfs/TestHdfsConnector.java | 77 ++++++++++++++++++++
 .../hdfs/configuration/LinkConfig.java          |  4 +
 .../resources/hdfs-connector-config.properties  |  3 +
 .../sqoop/test/testcases/ConnectorTestCase.java |  5 ++
 .../sqoop/test/testcases/TomcatTestCase.java    |  5 ++
 .../connector/jdbc/generic/AllTypesTest.java    |  2 +
 .../jdbc/generic/FromHDFSToRDBMSTest.java       |  1 +
 .../jdbc/generic/FromRDBMSToHDFSTest.java       |  5 ++
 .../jdbc/generic/IncrementalReadTest.java       |  2 +
 .../connector/jdbc/generic/PartitionerTest.java |  1 +
 .../jdbc/generic/TableStagedRDBMSTest.java      |  1 +
 .../connector/kafka/FromHDFSToKafkaTest.java    |  1 +
 .../SubmissionWithDisabledModelObjectsTest.java |  1 +
 19 files changed, 184 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsExtractor.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsExtractor.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsExtractor.java
index 8237e51..b35c957 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsExtractor.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsExtractor.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.util.LineReader;
 import org.apache.log4j.Logger;
-import org.apache.sqoop.common.PrefixContext;
 import org.apache.sqoop.common.SqoopException;
 import org.apache.sqoop.connector.common.SqoopIDFUtils;
 import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
@@ -49,16 +48,14 @@ public class HdfsExtractor extends Extractor<LinkConfiguration, FromJobConfigura
 
   public static final Logger LOG = Logger.getLogger(HdfsExtractor.class);
 
-  private Configuration conf;
+  private Configuration conf = new Configuration();
   private DataWriter dataWriter;
   private Schema schema;
   private long rowsRead = 0;
 
   @Override
-  public void extract(ExtractorContext context, LinkConfiguration linkConfiguration,
-      FromJobConfiguration jobConfiguration, HdfsPartition partition) {
-
-    conf = HdfsUtils.configureURI(((PrefixContext) context.getContext()).getConfiguration(),
linkConfiguration);
+  public void extract(ExtractorContext context, LinkConfiguration linkConfiguration, FromJobConfiguration
jobConfiguration, HdfsPartition partition) {
+    HdfsUtils.contextToConfiguration(context.getContext(), conf);
     dataWriter = context.getDataWriter();
     schema = context.getSchema();
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsFromInitializer.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsFromInitializer.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsFromInitializer.java
index 0a95e07..902549d 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsFromInitializer.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsFromInitializer.java
@@ -17,6 +17,7 @@
  */
 package org.apache.sqoop.connector.hdfs;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
 import org.apache.sqoop.job.etl.Initializer;
@@ -34,8 +35,8 @@ public class HdfsFromInitializer extends Initializer<LinkConfiguration,
FromJobC
    * @param jobConfig FROM job configuration object
    */
   @Override
-  public void initialize(InitializerContext context, LinkConfiguration linkConfig,
-      FromJobConfiguration jobConfig) {
-    // do nothing at this point
+  public void initialize(InitializerContext context, LinkConfiguration linkConfig, FromJobConfiguration
jobConfig) {
+    Configuration configuration = HdfsUtils.createConfiguration(linkConfig);
+    HdfsUtils.configurationToContext(configuration, context.getContext());
   }
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsLoader.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsLoader.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsLoader.java
index cee0a91..0ced6d0 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsLoader.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsLoader.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.sqoop.common.PrefixContext;
 import org.apache.sqoop.common.SqoopException;
 import org.apache.sqoop.connector.common.SqoopIDFUtils;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
@@ -54,9 +53,10 @@ public class HdfsLoader extends Loader<LinkConfiguration, ToJobConfiguration>
{
   @Override
   public void load(LoaderContext context, LinkConfiguration linkConfiguration,
                    ToJobConfiguration toJobConfig) throws Exception {
+    Configuration conf = new Configuration();
+    HdfsUtils.contextToConfiguration(context.getContext(), conf);
 
     DataReader reader = context.getDataReader();
-    Configuration conf = HdfsUtils.configureURI(((PrefixContext) context.getContext()).getConfiguration(),
linkConfiguration);
     String directoryName = toJobConfig.toJobConfig.outputDirectory;
     String codecname = getCompressionCodecName(toJobConfig);
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsPartitioner.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsPartitioner.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsPartitioner.java
index 78fd60a..dcc1157 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsPartitioner.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsPartitioner.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.net.NodeBase;
-import org.apache.sqoop.common.PrefixContext;
 import org.apache.sqoop.common.SqoopException;
 import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
@@ -71,8 +70,8 @@ public class HdfsPartitioner extends Partitioner<LinkConfiguration, FromJobConfi
   public List<Partition> getPartitions(PartitionerContext context,
                                        LinkConfiguration linkConfiguration,
                                        FromJobConfiguration fromJobConfig) {
-
-    Configuration conf = HdfsUtils.configureURI(((PrefixContext) context.getContext()).getConfiguration(),
linkConfiguration);
+    Configuration conf = new Configuration();
+    HdfsUtils.contextToConfiguration(context.getContext(), conf);
 
     try {
       long numInputBytes = getInputSize(conf, fromJobConfig.fromJobConfig.inputDirectory);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsToInitializer.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsToInitializer.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsToInitializer.java
index 991e6c9..ad500c2 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsToInitializer.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsToInitializer.java
@@ -17,6 +17,7 @@
  */
 package org.apache.sqoop.connector.hdfs;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.ToJobConfiguration;
 import org.apache.sqoop.job.etl.Initializer;
@@ -33,8 +34,8 @@ public class HdfsToInitializer extends Initializer<LinkConfiguration,
ToJobConfi
    * @param jobConfig TO job configuration object
    */
   @Override
-  public void initialize(InitializerContext context, LinkConfiguration linkConfig,
-      ToJobConfiguration jobConfig) {
-    // do nothing at this point
+  public void initialize(InitializerContext context, LinkConfiguration linkConfig, ToJobConfiguration
jobConfig) {
+    Configuration configuration = HdfsUtils.createConfiguration(linkConfig);
+    HdfsUtils.configurationToContext(configuration, context.getContext());
   }
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsUtils.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsUtils.java
index fce7728..46e09ab 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsUtils.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/HdfsUtils.java
@@ -17,16 +17,77 @@
  */
 package org.apache.sqoop.connector.hdfs;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.apache.sqoop.common.ImmutableContext;
+import org.apache.sqoop.common.MutableContext;
 import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.ToJobConfiguration;
 
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.MalformedURLException;
+import java.util.Map;
+
 /**
  * Utilities for HDFS.
  */
 public class HdfsUtils {
 
+  public static final String DEFAULT_HADOOP_CONF_DIR = "/etc/hadoop/conf";
+
+  private static final Logger LOG = Logger.getLogger(HdfsUtils.class);
+  /**
+   * Create Hadoop configuration object
+   */
+  public static Configuration createConfiguration(LinkConfiguration linkConfig) {
+    Configuration configuration = new Configuration();
+    String confDir = linkConfig.linkConfig.confDir;
+
+    // If the configuration directory wasn't specify we will use default
+    if (StringUtils.isBlank(confDir)) {
+      confDir = DEFAULT_HADOOP_CONF_DIR;
+    }
+
+    // In case that the configuration directory is valid, load all config files
+    File dir = new File(confDir);
+    if (dir.exists() && dir.isDirectory()) {
+      String[] files = dir.list(new FilenameFilter() {
+        @Override
+        public boolean accept(File dir, String name) {
+          return name.endsWith("-site.xml");
+        }
+      });
+
+      if (files != null) {
+        for (String file : files) {
+          LOG.info("Found Hadoop configuration file " + file);
+          try {
+            configuration.addResource(new File(confDir, file).toURI().toURL());
+          } catch (MalformedURLException e) {
+            LOG.warn("Can't load configuration file: " + file, e);
+          }
+        }
+      }
+    }
+
+    return configureURI(configuration, linkConfig);
+  }
+
+  public static void configurationToContext(Configuration configuration, MutableContext context)
{
+    for (Map.Entry<String, String> entry : configuration) {
+      context.setString(entry.getKey(), entry.getValue());
+    }
+  }
+
+  public static void contextToConfiguration(ImmutableContext context, Configuration configuration)
{
+    for (Map.Entry<String, String> entry : context) {
+      configuration.set(entry.getKey(), entry.getValue());
+    }
+  }
+
   /**
    * Configures the URI to connect to.
    * @param conf Configuration object to be configured.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/TestHdfsConnector.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/TestHdfsConnector.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/TestHdfsConnector.java
new file mode 100644
index 0000000..b41bd5a
--- /dev/null
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/TestHdfsConnector.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.connector.hdfs;
+
+import org.apache.sqoop.common.Direction;
+import org.apache.sqoop.model.ConfigUtils;
+import org.apache.sqoop.model.MConfig;
+import org.apache.sqoop.model.MInput;
+import org.testng.annotations.Test;
+
+import java.util.List;
+import java.util.Locale;
+import java.util.ResourceBundle;
+
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
+
+/**
+ */
+public class TestHdfsConnector {
+
+  @Test
+  public void testBundleForLink() {
+    HdfsConnector connector = new HdfsConnector();
+    verifyBundleForConfigClass(connector.getBundle(Locale.getDefault()), connector.getLinkConfigurationClass());
+  }
+
+  @Test
+  void testBundleForJobToDirection() {
+    HdfsConnector connector = new HdfsConnector();
+    verifyBundleForConfigClass(connector.getBundle(Locale.getDefault()), connector.getJobConfigurationClass(Direction.TO));
+  }
+
+  @Test
+  void testBundleForJobFromDirection() {
+    HdfsConnector connector = new HdfsConnector();
+    verifyBundleForConfigClass(connector.getBundle(Locale.getDefault()), connector.getJobConfigurationClass(Direction.FROM));
+  }
+
+  void verifyBundleForConfigClass(ResourceBundle bundle, Class klass) {
+    assertNotNull(bundle);
+    assertNotNull(klass);
+
+    List<MConfig> configs = ConfigUtils.toConfigs(klass);
+
+    for(MConfig config : configs) {
+      assertNotNull(config.getHelpKey());
+      assertNotNull(config.getLabelKey());
+
+      assertTrue(bundle.containsKey(config.getHelpKey()), "Can't find help for " + config.getName());
+      assertTrue(bundle.containsKey(config.getLabelKey()), "Can't find label for " + config.getName());
+
+      for(MInput input : config.getInputs()) {
+        assertNotNull(input.getHelpKey());
+        assertNotNull(input.getLabelKey());
+
+        assertTrue(bundle.containsKey(input.getHelpKey()), "Can't find help for " + input.getName());
+        assertTrue(bundle.containsKey(input.getLabelKey()), "Can't find label for " + input.getName());
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/configuration/LinkConfig.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/configuration/LinkConfig.java
b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/configuration/LinkConfig.java
index 146c3b1..b54ad15 100644
--- a/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/configuration/LinkConfig.java
+++ b/connector/connector-hdfs/src/main/java/org/apache/sqoop/connector/hdfs/configuration/LinkConfig.java
@@ -22,6 +22,7 @@ import org.apache.sqoop.model.Input;
 import org.apache.sqoop.model.Validator;
 import org.apache.sqoop.validation.Status;
 import org.apache.sqoop.validation.validators.AbstractValidator;
+import org.apache.sqoop.validation.validators.DirectoryExistsValidator;
 
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -30,6 +31,9 @@ import java.util.regex.Pattern;
 public class LinkConfig {
   @Input(size = 255) public String uri;
 
+  @Input(size = 255, validators = { @Validator(DirectoryExistsValidator.class)})
+  public String confDir;
+
   public static class ConfigValidator extends AbstractValidator<LinkConfig> {
     private static final Pattern URI_PATTERN = Pattern.compile("((?<=\\()[A-Za-z][A-Za-z0-9\\+\\.\\-]*:([A-Za-z0-9\\.\\-_~:/\\?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=]|%[A-Fa-f0-9]{2})+(?=\\)))|([A-Za-z][A-Za-z0-9\\+\\.\\-]*:([A-Za-z0-9\\.\\-_~:/\\?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=]|%[A-Fa-f0-9]{2})+)");
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/connector/connector-hdfs/src/main/resources/hdfs-connector-config.properties
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/main/resources/hdfs-connector-config.properties
b/connector/connector-hdfs/src/main/resources/hdfs-connector-config.properties
index 3904856..8d5a562 100644
--- a/connector/connector-hdfs/src/main/resources/hdfs-connector-config.properties
+++ b/connector/connector-hdfs/src/main/resources/hdfs-connector-config.properties
@@ -24,6 +24,9 @@ linkConfig.help = Here you supply information necessary to connect to HDFS
 linkConfig.uri.label = HDFS URI
 linkConfig.uri.help = HDFS URI used to connect to HDFS
 
+linkConfig.confDir.label = Hadoop conf directory:
+linkConfig.confDir.help = Directory with Hadoop configuration files. The connector will load
all -site.xml files.
+
 # To Job Config
 #
 toJobConfig.label = ToJob configuration

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
index ce6af6e..80f21cc 100644
--- a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
@@ -144,6 +144,11 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
     configs.getStringInput("linkConfig.password").setValue(provider.getConnectionPassword());
   }
 
+  protected void fillHdfsLink(MLink link) {
+    MConfigList configs = link.getConnectorLinkConfig();
+    configs.getStringInput("linkConfig.confDir").setValue(getCluster().getConfigurationPath());
+  }
+
   /**
    * Fill TO config with specific storage and output type.
    *

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
index 2ef971d..9416473 100644
--- a/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
@@ -30,6 +30,7 @@ import org.apache.sqoop.test.asserts.HdfsAsserts;
 import org.apache.sqoop.test.hadoop.HadoopRunner;
 import org.apache.sqoop.test.hadoop.HadoopRunnerFactory;
 import org.apache.sqoop.test.hadoop.HadoopLocalRunner;
+import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
 import org.apache.sqoop.test.minicluster.TomcatSqoopMiniCluster;
 import org.apache.sqoop.test.utils.HdfsUtils;
 import org.testng.ITest;
@@ -159,6 +160,10 @@ abstract public class TomcatTestCase {
     return client;
   }
 
+  public SqoopMiniCluster getCluster() {
+    return cluster;
+  }
+
   public String getTemporaryPath() {
     return tmpPath;
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
index ac90eac..6823ed2 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
@@ -77,6 +77,7 @@ public class AllTypesTest extends ConnectorTestCase implements ITest {
 
     // HDFS link
     MLink hdfsConnection = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsConnection);
     saveLink(hdfsConnection);
 
     // Job creation
@@ -120,6 +121,7 @@ public class AllTypesTest extends ConnectorTestCase implements ITest {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
index a21e4a1..034ae43 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
@@ -60,6 +60,7 @@ public class FromHDFSToRDBMSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
index 5552e04..6e1e031 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
@@ -45,6 +45,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsConnection = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsConnection);
     saveLink(hdfsConnection);
 
     // Job creation
@@ -88,6 +89,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation
@@ -133,6 +135,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation
@@ -179,6 +182,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation
@@ -225,6 +229,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
index 716de30..b37cdb4 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
@@ -71,6 +71,7 @@ public class IncrementalReadTest extends ConnectorTestCase implements ITest
{
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation
@@ -121,6 +122,7 @@ public class IncrementalReadTest extends ConnectorTestCase implements
ITest {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
index f69f08c..ef9720a 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
@@ -88,6 +88,7 @@ public class PartitionerTest extends ConnectorTestCase implements ITest
{
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
index f850777..5ef7c8f 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
@@ -51,6 +51,7 @@ public class TableStagedRDBMSTest extends ConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
b/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
index 83273f1..88db2f2 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
@@ -43,6 +43,7 @@ public class FromHDFSToKafkaTest extends KafkaConnectorTestCase {
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6ca31c50/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
index 3823583..5f9f41d 100644
--- a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
@@ -70,6 +70,7 @@ public class SubmissionWithDisabledModelObjectsTest extends ConnectorTestCase
{
 
     // HDFS link
     MLink hdfsLink = getClient().createLink("hdfs-connector");
+    fillHdfsLink(hdfsLink);
     saveLink(hdfsLink);
 
     // Job creation


Mime
View raw message