sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From b...@apache.org
Subject [1/2] sqoop git commit: SQOOP-3309: Implement HiveServer2 client
Date Wed, 18 Apr 2018 14:01:31 GMT
Repository: sqoop
Updated Branches:
  refs/heads/trunk 44ac3012f -> 72c5cd717


http://git-wip-us.apache.org/repos/asf/sqoop/blob/72c5cd71/src/test/org/apache/sqoop/hive/minicluster/KerberosAuthenticationConfiguration.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hive/minicluster/KerberosAuthenticationConfiguration.java
b/src/test/org/apache/sqoop/hive/minicluster/KerberosAuthenticationConfiguration.java
new file mode 100644
index 0000000..549a8c6
--- /dev/null
+++ b/src/test/org/apache/sqoop/hive/minicluster/KerberosAuthenticationConfiguration.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hive.minicluster;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.sqoop.authentication.KerberosAuthenticator;
+import org.apache.sqoop.db.JdbcConnectionFactory;
+import org.apache.sqoop.db.decorator.KerberizedConnectionFactoryDecorator;
+import org.apache.sqoop.infrastructure.kerberos.KerberosConfigurationProvider;
+
+import java.security.PrivilegedAction;
+import java.util.HashMap;
+import java.util.Map;
+
+public class KerberosAuthenticationConfiguration implements AuthenticationConfiguration {
+
+  private final KerberosConfigurationProvider kerberosConfig;
+
+  private KerberosAuthenticator authenticator;
+
+  public KerberosAuthenticationConfiguration(KerberosConfigurationProvider kerberosConfig)
{
+    this.kerberosConfig = kerberosConfig;
+  }
+
+  @Override
+  public Map<String, String> getAuthenticationConfig() {
+    Map<String, String> result = new HashMap<>();
+
+    result.put(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, kerberosConfig.getTestPrincipal());
+    result.put(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB.varname, kerberosConfig.getKeytabFilePath());
+    result.put(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, HiveAuthFactory.AuthTypes.KERBEROS.toString());
+    result.put(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.toString());
+    result.put(YarnConfiguration.RM_PRINCIPAL, kerberosConfig.getTestPrincipal());
+
+    return result;
+  }
+
+  @Override
+  public String getUrlParams() {
+    return ";principal=" + kerberosConfig.getTestPrincipal();
+  }
+
+  @Override
+  public <T> T doAsAuthenticated(PrivilegedAction<T> action) {
+    return authenticator.authenticate().doAs(action);
+  }
+
+  @Override
+  public void init() {
+    authenticator = createKerberosAuthenticator();
+  }
+
+  @Override
+  public JdbcConnectionFactory decorateConnectionFactory(JdbcConnectionFactory connectionFactory)
{
+    return new KerberizedConnectionFactoryDecorator(connectionFactory, authenticator);
+  }
+
+  private KerberosAuthenticator createKerberosAuthenticator() {
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+    KerberosAuthenticator result = new KerberosAuthenticator(conf, kerberosConfig.getTestPrincipal(),
kerberosConfig.getKeytabFilePath());
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/72c5cd71/src/test/org/apache/sqoop/hive/minicluster/NoAuthenticationConfiguration.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hive/minicluster/NoAuthenticationConfiguration.java
b/src/test/org/apache/sqoop/hive/minicluster/NoAuthenticationConfiguration.java
new file mode 100644
index 0000000..20502c9
--- /dev/null
+++ b/src/test/org/apache/sqoop/hive/minicluster/NoAuthenticationConfiguration.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hive.minicluster;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.sqoop.db.JdbcConnectionFactory;
+
+import java.security.PrivilegedAction;
+import java.util.Collections;
+import java.util.Map;
+
+public class NoAuthenticationConfiguration implements AuthenticationConfiguration {
+  @Override
+  public Map<String, String> getAuthenticationConfig() {
+    return Collections.emptyMap();
+  }
+
+  @Override
+  public String getUrlParams() {
+    return StringUtils.EMPTY;
+  }
+
+  @Override
+  public <T> T doAsAuthenticated(PrivilegedAction<T> action) {
+    return action.run();
+  }
+
+  @Override
+  public void init() {
+    // do nothing
+  }
+
+  @Override
+  public JdbcConnectionFactory decorateConnectionFactory(JdbcConnectionFactory connectionFactory)
{
+    return connectionFactory;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/72c5cd71/src/test/org/apache/sqoop/hive/minicluster/PasswordAuthenticationConfiguration.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hive/minicluster/PasswordAuthenticationConfiguration.java
b/src/test/org/apache/sqoop/hive/minicluster/PasswordAuthenticationConfiguration.java
new file mode 100644
index 0000000..79881f7
--- /dev/null
+++ b/src/test/org/apache/sqoop/hive/minicluster/PasswordAuthenticationConfiguration.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hive.minicluster;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.PasswdAuthenticationProvider;
+import org.apache.sqoop.db.JdbcConnectionFactory;
+
+import javax.security.sasl.AuthenticationException;
+import java.security.PrivilegedAction;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION;
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS;
+
+public class PasswordAuthenticationConfiguration implements AuthenticationConfiguration {
+
+  private static String TEST_USERNAME;
+
+  private static String TEST_PASSWORD;
+
+  private static final class TestPasswordAuthenticationProvider implements PasswdAuthenticationProvider
{
+
+    @Override
+    public void Authenticate(String user, String password) throws AuthenticationException
{
+      if (!(TEST_USERNAME.equals(user) && TEST_PASSWORD.equals(password))) {
+        throw new AuthenticationException("Authentication failed!");
+      }
+    }
+  }
+
+  public PasswordAuthenticationConfiguration(String testUsername, String testPassword) {
+    TEST_USERNAME = testUsername;
+    TEST_PASSWORD = testPassword;
+  }
+
+  @Override
+  public Map<String, String> getAuthenticationConfig() {
+    Map<String, String> result = new HashMap<>();
+    result.put(HIVE_SERVER2_AUTHENTICATION.varname, HiveAuthFactory.AuthTypes.CUSTOM.getAuthName());
+    result.put(HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname, TestPasswordAuthenticationProvider.class.getName());
+
+    return result;
+  }
+
+  @Override
+  public String getUrlParams() {
+    return StringUtils.EMPTY;
+  }
+
+  @Override
+  public <T> T doAsAuthenticated(PrivilegedAction<T> action) {
+    return action.run();
+  }
+
+  @Override
+  public void init() {
+    //do nothing
+  }
+
+  @Override
+  public JdbcConnectionFactory decorateConnectionFactory(JdbcConnectionFactory connectionFactory)
{
+    return connectionFactory;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/72c5cd71/src/test/org/apache/sqoop/testutil/HiveServer2TestUtil.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/testutil/HiveServer2TestUtil.java b/src/test/org/apache/sqoop/testutil/HiveServer2TestUtil.java
new file mode 100644
index 0000000..7993708
--- /dev/null
+++ b/src/test/org/apache/sqoop/testutil/HiveServer2TestUtil.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.testutil;
+
+import org.apache.sqoop.hive.HiveServer2ConnectionFactory;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+public class HiveServer2TestUtil {
+
+  private static final String SELECT_TABLE_QUERY = "SELECT * FROM %s";
+
+  private HiveServer2ConnectionFactory hs2ConnectionFactory;
+
+  public HiveServer2TestUtil(String url) {
+    this(url, null, null);
+  }
+
+  public HiveServer2TestUtil(String url, String username, String password) {
+    hs2ConnectionFactory = new HiveServer2ConnectionFactory(url, username, password);
+  }
+
+  public List<LinkedHashMap<String, Object>> loadRowsFromTable(String tableName)
{
+    List<LinkedHashMap<String, Object>> result = new ArrayList<>();
+    try(Connection connection = hs2ConnectionFactory.createConnection();
+        PreparedStatement query = connection.prepareStatement(String.format(SELECT_TABLE_QUERY,
tableName))) {
+
+      ResultSet resultSet = query.executeQuery();
+      ResultSetMetaData metaData = resultSet.getMetaData();
+
+      while (resultSet.next()) {
+        LinkedHashMap<String, Object> row = new LinkedHashMap<>();
+        for (int i = 1; i <= metaData.getColumnCount(); i++) {
+          row.put(metaData.getColumnName(i), resultSet.getObject(i));
+        }
+        result.add(row);
+      }
+    } catch (SQLException e) {
+      throw new RuntimeException(e);
+    }
+    return result;
+  }
+
+  public List<List<Object>> loadRawRowsFromTable(String tableName) {
+    List<List<Object>> result = new ArrayList<>();
+    List<LinkedHashMap<String, Object>> rowsWithColumnNames = loadRowsFromTable(tableName);
+
+    for (LinkedHashMap<String, Object> rowWithColumnNames : rowsWithColumnNames) {
+      result.add(new ArrayList<>(rowWithColumnNames.values()));
+    }
+
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/72c5cd71/src/test/org/apache/sqoop/tool/TestHiveServer2OptionValidations.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/tool/TestHiveServer2OptionValidations.java b/src/test/org/apache/sqoop/tool/TestHiveServer2OptionValidations.java
new file mode 100644
index 0000000..4d3f938
--- /dev/null
+++ b/src/test/org/apache/sqoop/tool/TestHiveServer2OptionValidations.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.tool;
+
+import org.apache.sqoop.SqoopOptions;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import java.util.Arrays;
+import java.util.Properties;
+
+import static org.apache.sqoop.SqoopOptions.FileLayout.ParquetFile;
+import static org.apache.sqoop.SqoopOptions.IncrementalMode.None;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+@RunWith(Parameterized.class)
+public class TestHiveServer2OptionValidations {
+
+  @Parameters(name = "sqoopTool = {0}")
+  public static Iterable<? extends Object> parameters() {
+    return Arrays.asList(
+        new ImportTool(),
+        new ImportAllTablesTool(),
+        new CreateHiveTableTool());
+  }
+
+  private static final String TEST_HS2_URL = "test-hs2-url";
+  private static final String TEST_HS2_USER = "test-hs2-user";
+  private static final String TEST_HS2_KEYTAB = "test-hs2-keytab";
+  private static final String TEST_TABLE = "testtable";
+  private static final String TEST_CONNECTION_STRING = "testconnectstring";
+
+  @Rule
+  public ExpectedException expectedException = ExpectedException.none();
+
+  private final BaseSqoopTool sqoopTool;
+
+  private SqoopOptions sqoopOptions;
+
+  public TestHiveServer2OptionValidations(BaseSqoopTool sqoopTool) {
+    this.sqoopTool = spy(sqoopTool);
+  }
+
+  @Before
+  public void before() {
+    sqoopOptions = mock(SqoopOptions.class);
+    when(sqoopOptions.getTableName()).thenReturn(TEST_TABLE);
+    when(sqoopOptions.getIncrementalMode()).thenReturn(None);
+    when(sqoopOptions.getConnectString()).thenReturn(TEST_CONNECTION_STRING);
+    when(sqoopOptions.getMapColumnHive()).thenReturn(new Properties());
+
+
+    doReturn(0).when(sqoopTool).getDashPosition(any(String[].class));
+  }
+
+  @Test
+  public void testValidateOptionsThrowsWhenHs2UrlIsUsedWithoutHiveImport() throws Exception
{
+    expectedException.expect(SqoopOptions.InvalidOptionsException.class);
+    expectedException.expectMessage("The hs2-url option cannot be used without the hive-import
option.");
+
+    when(sqoopOptions.doHiveImport()).thenReturn(false);
+    when(sqoopOptions.getHs2Url()).thenReturn(TEST_HS2_URL);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+  @Test
+  public void testValidateOptionsThrowsWhenHs2UrlIsUsedWithHCatalogImport() throws Exception
{
+    expectedException.expect(SqoopOptions.InvalidOptionsException.class);
+    expectedException.expectMessage("The hs2-url option cannot be used without the hive-import
option.");
+
+    when(sqoopOptions.getHCatTableName()).thenReturn(TEST_TABLE);
+    when(sqoopOptions.getHs2Url()).thenReturn(TEST_HS2_URL);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+  @Test
+  public void testValidateOptionsThrowsWhenHs2UserIsUsedWithoutHs2Url() throws Exception
{
+    expectedException.expect(SqoopOptions.InvalidOptionsException.class);
+    expectedException.expectMessage("The hs2-user option cannot be used without the hs2-url
option.");
+
+    when(sqoopOptions.getHs2User()).thenReturn(TEST_HS2_USER);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+  @Test
+  public void testValidateOptionsThrowsWhenHs2KeytabIsUsedWithoutHs2User() throws Exception
{
+    expectedException.expect(SqoopOptions.InvalidOptionsException.class);
+    expectedException.expectMessage("The hs2-keytab option cannot be used without the hs2-user
option.");
+
+    when(sqoopOptions.getHs2Keytab()).thenReturn(TEST_HS2_KEYTAB);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+  @Test
+  public void testValidateOptionsSucceedsWhenHs2UrlIsUsedWithHiveImport() throws Exception
{
+    when(sqoopOptions.doHiveImport()).thenReturn(true);
+    when(sqoopOptions.getHs2Url()).thenReturn(TEST_HS2_URL);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+  @Test
+  public void testValidateOptionsSucceedsWhenHs2UrlIsUsedWithHiveImportAndHs2UserButWithoutHs2Keytab()
throws Exception {
+    when(sqoopOptions.doHiveImport()).thenReturn(true);
+    when(sqoopOptions.getHs2Url()).thenReturn(TEST_HS2_URL);
+    when(sqoopOptions.getHs2User()).thenReturn(TEST_HS2_URL);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+  @Test
+  public void testValidateOptionsFailsWhenHs2UrlIsUsedWithParquetFormat() throws Exception
{
+    expectedException.expect(SqoopOptions.InvalidOptionsException.class);
+    expectedException.expectMessage("The hs2-url option cannot be used with the as-parquetfile
option.");
+
+    when(sqoopOptions.doHiveImport()).thenReturn(true);
+    when(sqoopOptions.getHs2Url()).thenReturn(TEST_HS2_URL);
+    when(sqoopOptions.getFileLayout()).thenReturn(ParquetFile);
+
+    sqoopTool.validateOptions(sqoopOptions);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/72c5cd71/src/test/org/apache/sqoop/tool/TestImportTool.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/tool/TestImportTool.java b/src/test/org/apache/sqoop/tool/TestImportTool.java
index 1c0cf4d..3bdc5c6 100644
--- a/src/test/org/apache/sqoop/tool/TestImportTool.java
+++ b/src/test/org/apache/sqoop/tool/TestImportTool.java
@@ -23,7 +23,6 @@ import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
@@ -33,10 +32,10 @@ import static org.mockito.Mockito.when;
 import java.sql.Connection;
 
 import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
-import org.apache.sqoop.hive.HiveImport;
 import org.apache.avro.Schema;
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.avro.AvroSchemaMismatchException;
+import org.apache.sqoop.hive.HiveClientFactory;
 import org.apache.sqoop.util.ExpectedLogMessage;
 import org.junit.Assert;
 import org.junit.Rule;
@@ -75,7 +74,7 @@ public class TestImportTool {
     final String actualSchemaString = "actualSchema";
     final String errorMessage = "Import failed";
 
-    ImportTool importTool = spy(new ImportTool("import", mock(CodeGenTool.class), false));
+    ImportTool importTool = spy(new ImportTool("import", mock(CodeGenTool.class), false,
mock(HiveClientFactory.class)));
 
     doReturn(true).when(importTool).init(any(SqoopOptions.class));
 
@@ -85,7 +84,7 @@ public class TestImportTool {
     when(actualSchema.toString()).thenReturn(actualSchemaString);
 
     AvroSchemaMismatchException expectedException = new AvroSchemaMismatchException(errorMessage,
writtenWithSchema, actualSchema);
-    doThrow(expectedException).when(importTool).importTable(any(SqoopOptions.class), anyString(),
any(HiveImport.class));
+    doThrow(expectedException).when(importTool).importTable(any(SqoopOptions.class));
 
     SqoopOptions sqoopOptions = mock(SqoopOptions.class);
     when(sqoopOptions.doHiveImport()).thenReturn(true);


Mime
View raw message