Skip to content

Commit 9a24fac

Browse files
[Feature][E2E] Add hive3 e2e test case (#8003)
1 parent 699d165 commit 9a24fac

File tree

10 files changed

+289
-47
lines changed

10 files changed

+289
-47
lines changed

.github/workflows/backend.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -818,7 +818,7 @@ jobs:
818818
matrix:
819819
java: [ '8', '11' ]
820820
os: [ 'ubuntu-latest' ]
821-
timeout-minutes: 150
821+
timeout-minutes: 180
822822
steps:
823823
- uses: actions/checkout@v2
824824
- name: Set up JDK ${{ matrix.java }}

seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/storage/StorageFactory.java

+5
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,18 @@
1818
package org.apache.seatunnel.connectors.seatunnel.hive.storage;
1919

2020
public class StorageFactory {
21+
2122
public static Storage getStorageType(String hiveSdLocation) {
2223
if (hiveSdLocation.startsWith(StorageType.S3.name().toLowerCase())) {
2324
return new S3Storage();
2425
} else if (hiveSdLocation.startsWith(StorageType.OSS.name().toLowerCase())) {
2526
return new OSSStorage();
2627
} else if (hiveSdLocation.startsWith(StorageType.COS.name().toLowerCase())) {
2728
return new COSStorage();
29+
} else if (hiveSdLocation.startsWith(StorageType.FILE.name().toLowerCase())) {
30+
// Currently used in e2e, When Hive uses local files as storage, "file:" needs to be
31+
// replaced with "file:/" to avoid being recognized as HDFS storage.
32+
return new HDFSStorage(hiveSdLocation.replace("file:", "file:/"));
2833
} else {
2934
return new HDFSStorage(hiveSdLocation);
3035
}

seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/storage/StorageType.java

+1
Original file line numberDiff line numberDiff line change
@@ -21,5 +21,6 @@ public enum StorageType {
2121
S3,
2222
OSS,
2323
COS,
24+
FILE,
2425
HDFS
2526
}

seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/pom.xml

+26
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,10 @@
2525
<artifactId>connector-hive-e2e</artifactId>
2626
<name>SeaTunnel : E2E : Connector V2 : Hive</name>
2727

28+
<properties>
29+
<hive.version>3.1.3</hive.version>
30+
</properties>
31+
2832
<dependencies>
2933
<dependency>
3034
<groupId>org.apache.seatunnel</groupId>
@@ -44,6 +48,28 @@
4448
<version>${project.version}</version>
4549
<classifier>optional</classifier>
4650
<scope>test</scope>
51+
<exclusions>
52+
<exclusion>
53+
<groupId>log4j</groupId>
54+
<artifactId>log4j</artifactId>
55+
</exclusion>
56+
</exclusions>
57+
</dependency>
58+
<dependency>
59+
<groupId>org.apache.hive</groupId>
60+
<artifactId>hive-jdbc</artifactId>
61+
<version>${hive.version}</version>
62+
<scope>test</scope>
63+
<exclusions>
64+
<exclusion>
65+
<groupId>log4j</groupId>
66+
<artifactId>log4j</artifactId>
67+
</exclusion>
68+
<exclusion>
69+
<groupId>org.apache.logging.log4j</groupId>
70+
<artifactId>log4j-web</artifactId>
71+
</exclusion>
72+
</exclusions>
4773
</dependency>
4874
</dependencies>
4975

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.seatunnel.e2e.connector.hive;
19+
20+
import org.apache.hadoop.hive.conf.HiveConf;
21+
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
22+
import org.apache.hadoop.hive.metastore.api.MetaException;
23+
24+
import org.testcontainers.containers.GenericContainer;
25+
import org.testcontainers.containers.output.Slf4jLogConsumer;
26+
import org.testcontainers.containers.wait.strategy.Wait;
27+
import org.testcontainers.containers.wait.strategy.WaitStrategy;
28+
import org.testcontainers.utility.DockerImageName;
29+
import org.testcontainers.utility.DockerLoggerFactory;
30+
31+
import java.sql.Connection;
32+
import java.sql.Driver;
33+
import java.sql.SQLException;
34+
import java.util.Properties;
35+
36+
public class HiveContainer extends GenericContainer<HiveContainer> {
37+
public static final String IMAGE = "apache/hive";
38+
public static final String DEFAULT_TAG = "3.1.3";
39+
40+
private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse(IMAGE);
41+
42+
public static final int HIVE_SERVER_PORT = 10000;
43+
44+
public static final int HMS_PORT = 9083;
45+
46+
private static final String SERVICE_NAME_ENV = "SERVICE_NAME";
47+
48+
private static final String DRIVER_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver";
49+
50+
public HiveContainer(Role role) {
51+
super(DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG));
52+
this.addExposedPorts(role.exposePort);
53+
this.addEnv(SERVICE_NAME_ENV, role.serviceName);
54+
this.setWaitStrategy(role.waitStrategy);
55+
this.withLogConsumer(
56+
new Slf4jLogConsumer(
57+
DockerLoggerFactory.getLogger(
58+
DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG).toString())));
59+
}
60+
61+
public static HiveContainer hmsStandalone() {
62+
return new HiveContainer(Role.HMS_STANDALONE);
63+
}
64+
65+
public static HiveContainer hiveServer() {
66+
return new HiveContainer(Role.HIVE_SERVER_WITH_EMBEDDING_HMS);
67+
}
68+
69+
public String getMetastoreUri() {
70+
return String.format("thrift://%s:%s", getHost(), getMappedPort(HMS_PORT));
71+
}
72+
73+
public String getHiveJdbcUri() {
74+
return String.format(
75+
"jdbc:hive2://%s:%s/default", getHost(), getMappedPort(HIVE_SERVER_PORT));
76+
}
77+
78+
public HiveMetaStoreClient createMetaStoreClient() throws MetaException {
79+
HiveConf conf = new HiveConf();
80+
conf.set("hive.metastore.uris", getMetastoreUri());
81+
82+
return new HiveMetaStoreClient(conf);
83+
}
84+
85+
public Connection getConnection()
86+
throws ClassNotFoundException, InstantiationException, IllegalAccessException,
87+
SQLException {
88+
Driver driver = loadHiveJdbcDriver();
89+
90+
return driver.connect(getHiveJdbcUri(), getJdbcConnectionConfig());
91+
}
92+
93+
public Driver loadHiveJdbcDriver()
94+
throws ClassNotFoundException, InstantiationException, IllegalAccessException {
95+
return (Driver) Class.forName(DRIVER_CLASS_NAME).newInstance();
96+
}
97+
98+
public Properties getJdbcConnectionConfig() {
99+
Properties props = new Properties();
100+
101+
return props;
102+
}
103+
104+
public enum Role {
105+
HIVE_SERVER_WITH_EMBEDDING_HMS(
106+
"hiveserver2", HIVE_SERVER_PORT, Wait.forLogMessage(".*Starting HiveServer2.*", 1)),
107+
HMS_STANDALONE(
108+
"metastore", HMS_PORT, Wait.forLogMessage(".*Starting Hive Metastore Server.*", 1));
109+
110+
private final String serviceName;
111+
private final int exposePort;
112+
private final WaitStrategy waitStrategy;
113+
114+
Role(String serviceName, int exposePort, WaitStrategy waitStrategy) {
115+
this.serviceName = serviceName;
116+
this.exposePort = exposePort;
117+
this.waitStrategy = waitStrategy;
118+
}
119+
}
120+
}

0 commit comments

Comments
 (0)