sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject sqoop git commit: SQOOP-2235: Sqoop2: Move PrefixContext back to mapreduce execution engine
Date Wed, 25 Mar 2015 22:09:28 GMT
Repository: sqoop
Updated Branches:
  refs/heads/sqoop2 45d1c32d7 -> 029e8ff56


SQOOP-2235: Sqoop2: Move PrefixContext back to mapreduce execution engine

(Jarek Jarcec Cecho via Abraham Elmahrek)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/029e8ff5
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/029e8ff5
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/029e8ff5

Branch: refs/heads/sqoop2
Commit: 029e8ff56a8c6630a844ee84209ea7052f3b6fd3
Parents: 45d1c32
Author: Abraham Elmahrek <abe@apache.org>
Authored: Wed Mar 25 15:09:01 2015 -0700
Committer: Abraham Elmahrek <abe@apache.org>
Committed: Wed Mar 25 15:09:01 2015 -0700

----------------------------------------------------------------------
 .../org/apache/sqoop/common/PrefixContext.java  |  97 ----------------
 .../apache/sqoop/common/TestPrefixContext.java  | 113 -------------------
 .../sqoop/connector/hdfs/TestExtractor.java     |  15 +--
 .../apache/sqoop/connector/hdfs/TestLoader.java |  17 ++-
 .../sqoop/connector/hdfs/TestPartitioner.java   |   8 +-
 .../org/apache/sqoop/job/PrefixContext.java     |  97 ++++++++++++++++
 .../sqoop/job/mr/SqoopDestroyerExecutor.java    |   2 +-
 .../apache/sqoop/job/mr/SqoopInputFormat.java   |   2 +-
 .../org/apache/sqoop/job/mr/SqoopMapper.java    |   2 +-
 .../job/mr/SqoopOutputFormatLoadExecutor.java   |   2 +-
 .../org/apache/sqoop/job/TestPrefixContext.java | 112 ++++++++++++++++++
 11 files changed, 230 insertions(+), 237 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/common/src/main/java/org/apache/sqoop/common/PrefixContext.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/sqoop/common/PrefixContext.java b/common/src/main/java/org/apache/sqoop/common/PrefixContext.java
deleted file mode 100644
index c78616d..0000000
--- a/common/src/main/java/org/apache/sqoop/common/PrefixContext.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.common;
-
-import org.apache.sqoop.classification.InterfaceAudience;
-import org.apache.sqoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.common.ImmutableContext;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-/**
- * Implementation of immutable context that is based on Hadoop configuration
- * object. Each context property is prefixed with special prefix and loaded
- * directly.
- */
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class PrefixContext implements ImmutableContext {
-
-  Configuration configuration;
-  String prefix;
-
-  public PrefixContext(Configuration configuration, String prefix) {
-    this.configuration = configuration;
-    this.prefix = prefix;
-  }
-
-  @Override
-  public String getString(String key) {
-    return configuration.get(prefix + key);
-  }
-
-  @Override
-  public String getString(String key, String defaultValue) {
-    return configuration.get(prefix + key, defaultValue);
-  }
-
-  @Override
-  public long getLong(String key, long defaultValue) {
-    return configuration.getLong(prefix + key, defaultValue);
-  }
-
-  @Override
-  public int getInt(String key, int defaultValue) {
-    return  configuration.getInt(prefix + key, defaultValue);
-  }
-
-  @Override
-  public boolean getBoolean(String key, boolean defaultValue) {
-    return configuration.getBoolean(prefix + key, defaultValue);
-  }
-
-  /*
-   * TODO: Use getter methods for retrieval instead of
-   * exposing configuration directly.
-   */
-  public Configuration getConfiguration() {
-    return configuration;
-  }
-
-  /*
-   * There is no good way to get iterator from the underlying Configuration object that would
-   * filter only the prefixed properties, so we create new Context/Map that contains only
the
-   * relevant properties.
-   */
-  @Override
-  public Iterator<Map.Entry<String, String>> iterator() {
-    Map<String, String> intermediateMap = new HashMap<String, String>();
-    for(Map.Entry<String, String> entry : configuration) {
-      String key = entry.getKey();
-
-      if(key.startsWith(prefix)) {
-        intermediateMap.put(key.replaceFirst(prefix, ""), entry.getValue());
-      }
-    }
-
-    return intermediateMap.entrySet().iterator();
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/common/src/test/java/org/apache/sqoop/common/TestPrefixContext.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/sqoop/common/TestPrefixContext.java b/common/src/test/java/org/apache/sqoop/common/TestPrefixContext.java
deleted file mode 100644
index b4a4b00..0000000
--- a/common/src/test/java/org/apache/sqoop/common/TestPrefixContext.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.common;
-
-import org.apache.hadoop.conf.Configuration;
-import org.testng.annotations.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-public class TestPrefixContext {
-
-  @Test
-  public void testBlankPrefix() {
-    Configuration configuration = new Configuration();
-    configuration.set("testkey", "testvalue");
-
-    PrefixContext context = new PrefixContext(configuration, "");
-    assertEquals("testvalue", context.getString("testkey"));
-  }
-
-  @Test
-  public void testNonBlankPrefix() {
-    Configuration configuration = new Configuration();
-    configuration.set("prefix.testkey", "testvalue");
-
-    PrefixContext context = new PrefixContext(configuration, "prefix.");
-    assertEquals("testvalue", context.getString("testkey"));
-  }
-
-  @Test
-  public void testGetString() {
-    Configuration configuration = new Configuration();
-    configuration.set("p.testkey", "testvalue");
-
-    PrefixContext context = new PrefixContext(configuration, "p.");
-    assertEquals("testvalue", context.getString("testkey"));
-    assertEquals("testvalue", context.getString("testkey", "defaultValue"));
-    assertEquals("defaultValue", context.getString("wrongKey", "defaultValue"));
-  }
-
-  @Test
-  public void testGetBoolean() {
-    Configuration configuration = new Configuration();
-    configuration.set("p.testkey", "true");
-
-    PrefixContext context = new PrefixContext(configuration, "p.");
-    assertEquals(true, context.getBoolean("testkey", false));
-    assertEquals(false, context.getBoolean("wrongKey", false));
-  }
-
-  @Test
-  public void testGetInt() {
-    Configuration configuration = new Configuration();
-    configuration.set("p.testkey", "123");
-
-    PrefixContext context = new PrefixContext(configuration, "p.");
-    assertEquals(123, context.getInt("testkey", 456));
-    assertEquals(456, context.getInt("wrongKey", 456));
-  }
-
-  @Test
-  public void testGetLong() {
-    Configuration configuration = new Configuration();
-    configuration.set("p.testkey", "123");
-
-    PrefixContext context = new PrefixContext(configuration, "p.");
-    assertEquals(123l, context.getLong("testkey", 456l));
-    assertEquals(456l, context.getLong("wrongKey", 456l));
-  }
-
-  @Test
-  public void testIterator() {
-    Configuration configuration = new Configuration();
-    configuration.set("p.sqooptest1", "value");
-    configuration.set("p.sqooptest2", "value");
-
-    PrefixContext context = new PrefixContext(configuration, "p.");
-    boolean seenSqooptest1 = false;
-    boolean seenSqooptest2 = false;
-    for(Map.Entry<String, String> entry : context) {
-      if("sqooptest1".equals(entry.getKey()) && "value".equals(entry.getValue()))
{
-        seenSqooptest1 = true;
-      } else if("sqooptest2".equals(entry.getKey()) && "value".equals(entry.getValue()))
{
-        seenSqooptest2 = true;
-      } else {
-        fail("Found unexpected property: " + entry.getKey() + " with value " + entry.getValue());
-      }
-    }
-
-    assertTrue(seenSqooptest1);
-    assertTrue(seenSqooptest2);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestExtractor.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestExtractor.java
b/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestExtractor.java
index 9fcd2a8..03b13bd 100644
--- a/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestExtractor.java
+++ b/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestExtractor.java
@@ -23,13 +23,13 @@ import static org.testng.AssertJUnit.assertTrue;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.BZip2Codec;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.common.MutableMapContext;
 import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
@@ -40,7 +40,6 @@ import org.apache.sqoop.schema.Schema;
 import org.apache.sqoop.schema.type.FixedPoint;
 import org.apache.sqoop.schema.type.FloatingPoint;
 import org.apache.sqoop.schema.type.Text;
-import org.testng.ITest;
 import org.testng.annotations.AfterMethod;
 import org.testng.Assert;
 import org.testng.annotations.BeforeMethod;
@@ -100,15 +99,14 @@ public class TestExtractor extends TestHdfsBase {
 
   @Test
   public void testExtractor() throws Exception {
-    Configuration conf = new Configuration();
-    PrefixContext prefixContext = new PrefixContext(conf, "org.apache.sqoop.job.connector.from.context.");
+    MutableMapContext mutableContext = new MutableMapContext(new HashMap<String, String>());
     final boolean[] visited = new boolean[NUMBER_OF_FILES * NUMBER_OF_ROWS_PER_FILE];
     Schema schema = new Schema("schema").addColumn(new FixedPoint("col1", 4L, true))
         .addColumn(new FloatingPoint("col2", 4L))
         .addColumn(new Text("col3"))
         .addColumn(new Text("col4"))
         .addColumn(new Text("col5"));
-    ExtractorContext context = new ExtractorContext(prefixContext, new DataWriter() {
+    ExtractorContext context = new ExtractorContext(mutableContext, new DataWriter() {
       @Override
       public void writeArrayRecord(Object[] array) {
         throw new AssertionError("Should not be writing array.");
@@ -156,15 +154,14 @@ public class TestExtractor extends TestHdfsBase {
 
   @Test
   public void testOverrideNull() throws Exception {
-    Configuration conf = new Configuration();
-    PrefixContext prefixContext = new PrefixContext(conf, "org.apache.sqoop.job.connector.from.context.");
+    MutableMapContext mutableContext = new MutableMapContext(new HashMap<String, String>());
     final boolean[] visited = new boolean[NUMBER_OF_FILES * NUMBER_OF_ROWS_PER_FILE];
     Schema schema = new Schema("schema").addColumn(new FixedPoint("col1", 4L, true))
         .addColumn(new FloatingPoint("col2", 4L))
         .addColumn(new Text("col3"))
         .addColumn(new Text("col4"))
         .addColumn(new Text("col5"));
-    ExtractorContext context = new ExtractorContext(prefixContext, new DataWriter() {
+    ExtractorContext context = new ExtractorContext(mutableContext, new DataWriter() {
       @Override
       public void writeArrayRecord(Object[] array) {
         int index;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestLoader.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestLoader.java
b/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestLoader.java
index 3b81715..688067b 100644
--- a/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestLoader.java
+++ b/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestLoader.java
@@ -24,6 +24,7 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -34,7 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.common.MutableMapContext;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.ToCompression;
 import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
@@ -102,10 +103,9 @@ public class TestLoader extends TestHdfsBase {
         .addColumn(new FloatingPoint("col2", 4L))
         .addColumn(new Text("col3"));
 
-    Configuration conf = new Configuration();
-    conf.set("org.apache.sqoop.job.connector.from.context." + HdfsConstants.WORK_DIRECTORY,
outputDirectory);
-    PrefixContext prefixContext = new PrefixContext(conf, "org.apache.sqoop.job.connector.from.context.");
-    LoaderContext context = new LoaderContext(prefixContext, new DataReader() {
+    MutableMapContext mutableContext = new MutableMapContext(new HashMap<String, String>());
+    mutableContext.setString(HdfsConstants.WORK_DIRECTORY, outputDirectory);
+    LoaderContext context = new LoaderContext(mutableContext, new DataReader() {
       private long index = 0L;
 
       @Override
@@ -156,10 +156,9 @@ public class TestLoader extends TestHdfsBase {
         .addColumn(new Text("col3"))
         .addColumn(new Text("col4"));
 
-    Configuration conf = new Configuration();
-    conf.set("org.apache.sqoop.job.connector.from.context." + HdfsConstants.WORK_DIRECTORY,
outputDirectory);
-    PrefixContext prefixContext = new PrefixContext(conf, "org.apache.sqoop.job.connector.from.context.");
-    LoaderContext context = new LoaderContext(prefixContext, new DataReader() {
+    MutableMapContext mutableContext = new MutableMapContext(new HashMap<String, String>());
+    mutableContext.setString(HdfsConstants.WORK_DIRECTORY, outputDirectory);
+    LoaderContext context = new LoaderContext(mutableContext, new DataReader() {
       private long index = 0L;
 
       @Override

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestPartitioner.java
----------------------------------------------------------------------
diff --git a/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestPartitioner.java
b/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestPartitioner.java
index 4c5b0a0..ff71128 100644
--- a/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestPartitioner.java
+++ b/connector/connector-hdfs/src/test/java/org/apache/sqoop/connector/hdfs/TestPartitioner.java
@@ -23,13 +23,13 @@ import static org.testng.AssertJUnit.assertEquals;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.BZip2Codec;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.common.MapContext;
 import org.apache.sqoop.connector.hdfs.configuration.FromJobConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.LinkConfiguration;
 import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
@@ -94,9 +94,7 @@ public class TestPartitioner extends TestHdfsBase {
 
   @Test
   public void testPartitioner() {
-    Configuration conf = new Configuration();
-    PrefixContext prefixContext = new PrefixContext(conf, "org.apache.sqoop.job.connector.from.context.");
-    PartitionerContext context = new PartitionerContext(prefixContext, 5, null);
+    PartitionerContext context = new PartitionerContext(new MapContext(new HashMap<String,
String>()), 5, null);
     LinkConfiguration linkConf = new LinkConfiguration();
     FromJobConfiguration jobConf = new FromJobConfiguration();
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/execution/mapreduce/src/main/java/org/apache/sqoop/job/PrefixContext.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/main/java/org/apache/sqoop/job/PrefixContext.java b/execution/mapreduce/src/main/java/org/apache/sqoop/job/PrefixContext.java
new file mode 100644
index 0000000..f0588f2
--- /dev/null
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/PrefixContext.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.job;
+
+import org.apache.sqoop.classification.InterfaceAudience;
+import org.apache.sqoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sqoop.common.ImmutableContext;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+/**
+ * Implementation of immutable context that is based on Hadoop configuration
+ * object. Each context property is prefixed with special prefix and loaded
+ * directly.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public class PrefixContext implements ImmutableContext {
+
+  Configuration configuration;
+  String prefix;
+
+  public PrefixContext(Configuration configuration, String prefix) {
+    this.configuration = configuration;
+    this.prefix = prefix;
+  }
+
+  @Override
+  public String getString(String key) {
+    return configuration.get(prefix + key);
+  }
+
+  @Override
+  public String getString(String key, String defaultValue) {
+    return configuration.get(prefix + key, defaultValue);
+  }
+
+  @Override
+  public long getLong(String key, long defaultValue) {
+    return configuration.getLong(prefix + key, defaultValue);
+  }
+
+  @Override
+  public int getInt(String key, int defaultValue) {
+    return  configuration.getInt(prefix + key, defaultValue);
+  }
+
+  @Override
+  public boolean getBoolean(String key, boolean defaultValue) {
+    return configuration.getBoolean(prefix + key, defaultValue);
+  }
+
+  /*
+   * TODO: Use getter methods for retrieval instead of
+   * exposing configuration directly.
+   */
+  public Configuration getConfiguration() {
+    return configuration;
+  }
+
+  /*
+   * There is no good way to get iterator from the underlying Configuration object that would
+   * filter only the prefixed properties, so we create new Context/Map that contains only
the
+   * relevant properties.
+   */
+  @Override
+  public Iterator<Map.Entry<String, String>> iterator() {
+    Map<String, String> intermediateMap = new HashMap<String, String>();
+    for(Map.Entry<String, String> entry : configuration) {
+      String key = entry.getKey();
+
+      if(key.startsWith(prefix)) {
+        intermediateMap.put(key.replaceFirst(prefix, ""), entry.getValue());
+      }
+    }
+
+    return intermediateMap.entrySet().iterator();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopDestroyerExecutor.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopDestroyerExecutor.java
b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopDestroyerExecutor.java
index c6ba749..b3c1ce8 100644
--- a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopDestroyerExecutor.java
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopDestroyerExecutor.java
@@ -20,7 +20,7 @@ package org.apache.sqoop.job.mr;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 import org.apache.sqoop.common.Direction;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.job.PrefixContext;
 import org.apache.sqoop.connector.matcher.Matcher;
 import org.apache.sqoop.connector.matcher.MatcherFactory;
 import org.apache.sqoop.job.MRJobConstants;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopInputFormat.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopInputFormat.java
b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopInputFormat.java
index db31ca4..732ee0a 100644
--- a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopInputFormat.java
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopInputFormat.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.log4j.Logger;
 import org.apache.sqoop.common.Direction;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.job.PrefixContext;
 import org.apache.sqoop.common.SqoopException;
 import org.apache.sqoop.error.code.MRExecutionError;
 import org.apache.sqoop.job.MRJobConstants;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
index d6fe6af..14fdfdc 100644
--- a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
@@ -33,7 +33,7 @@ import org.apache.sqoop.connector.matcher.Matcher;
 import org.apache.sqoop.connector.matcher.MatcherFactory;
 import org.apache.sqoop.job.MRJobConstants;
 import org.apache.sqoop.error.code.MRExecutionError;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.job.PrefixContext;
 import org.apache.sqoop.job.etl.Extractor;
 import org.apache.sqoop.job.etl.ExtractorContext;
 import org.apache.sqoop.etl.io.DataWriter;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopOutputFormatLoadExecutor.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopOutputFormatLoadExecutor.java
b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopOutputFormatLoadExecutor.java
index fc18586..3c091a2 100644
--- a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopOutputFormatLoadExecutor.java
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopOutputFormatLoadExecutor.java
@@ -38,7 +38,7 @@ import org.apache.sqoop.connector.matcher.Matcher;
 import org.apache.sqoop.connector.matcher.MatcherFactory;
 import org.apache.sqoop.job.MRJobConstants;
 import org.apache.sqoop.error.code.MRExecutionError;
-import org.apache.sqoop.common.PrefixContext;
+import org.apache.sqoop.job.PrefixContext;
 import org.apache.sqoop.job.etl.Loader;
 import org.apache.sqoop.job.etl.LoaderContext;
 import org.apache.sqoop.etl.io.DataReader;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/029e8ff5/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestPrefixContext.java
----------------------------------------------------------------------
diff --git a/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestPrefixContext.java
b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestPrefixContext.java
new file mode 100644
index 0000000..911a53f
--- /dev/null
+++ b/execution/mapreduce/src/test/java/org/apache/sqoop/job/TestPrefixContext.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.job;
+
+import org.apache.hadoop.conf.Configuration;
+import org.testng.annotations.Test;
+
+import java.util.Map;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.fail;
+
+public class TestPrefixContext {
+
+  @Test
+  public void testBlankPrefix() {
+    Configuration configuration = new Configuration();
+    configuration.set("testkey", "testvalue");
+
+    PrefixContext context = new PrefixContext(configuration, "");
+    assertEquals("testvalue", context.getString("testkey"));
+  }
+
+  @Test
+  public void testNonBlankPrefix() {
+    Configuration configuration = new Configuration();
+    configuration.set("prefix.testkey", "testvalue");
+
+    PrefixContext context = new PrefixContext(configuration, "prefix.");
+    assertEquals("testvalue", context.getString("testkey"));
+  }
+
+  @Test
+  public void testGetString() {
+    Configuration configuration = new Configuration();
+    configuration.set("p.testkey", "testvalue");
+
+    PrefixContext context = new PrefixContext(configuration, "p.");
+    assertEquals("testvalue", context.getString("testkey"));
+    assertEquals("testvalue", context.getString("testkey", "defaultValue"));
+    assertEquals("defaultValue", context.getString("wrongKey", "defaultValue"));
+  }
+
+  @Test
+  public void testGetBoolean() {
+    Configuration configuration = new Configuration();
+    configuration.set("p.testkey", "true");
+
+    PrefixContext context = new PrefixContext(configuration, "p.");
+    assertEquals(true, context.getBoolean("testkey", false));
+    assertEquals(false, context.getBoolean("wrongKey", false));
+  }
+
+  @Test
+  public void testGetInt() {
+    Configuration configuration = new Configuration();
+    configuration.set("p.testkey", "123");
+
+    PrefixContext context = new PrefixContext(configuration, "p.");
+    assertEquals(123, context.getInt("testkey", 456));
+    assertEquals(456, context.getInt("wrongKey", 456));
+  }
+
+  @Test
+  public void testGetLong() {
+    Configuration configuration = new Configuration();
+    configuration.set("p.testkey", "123");
+
+    PrefixContext context = new PrefixContext(configuration, "p.");
+    assertEquals(123l, context.getLong("testkey", 456l));
+    assertEquals(456l, context.getLong("wrongKey", 456l));
+  }
+
+  @Test
+  public void testIterator() {
+    Configuration configuration = new Configuration();
+    configuration.set("p.sqooptest1", "value");
+    configuration.set("p.sqooptest2", "value");
+
+    PrefixContext context = new PrefixContext(configuration, "p.");
+    boolean seenSqooptest1 = false;
+    boolean seenSqooptest2 = false;
+    for(Map.Entry<String, String> entry : context) {
+      if("sqooptest1".equals(entry.getKey()) && "value".equals(entry.getValue()))
{
+        seenSqooptest1 = true;
+      } else if("sqooptest2".equals(entry.getKey()) && "value".equals(entry.getValue()))
{
+        seenSqooptest2 = true;
+      } else {
+        fail("Found unexpected property: " + entry.getKey() + " with value " + entry.getValue());
+      }
+    }
+
+    assertTrue(seenSqooptest1);
+    assertTrue(seenSqooptest2);
+  }
+}


Mime
View raw message