Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PHOENIX-6940 Add a connector variant for Hive 4 #136

Closed
wants to merge 11 commits into from
Closed
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,15 @@ on:
jobs:
build:
# upgrade to ubuntu-latest after removing Phoenix 4 support
runs-on: ubuntu-20.04
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
uses: actions/setup-java@v4
with:
java-version: 1.8
- name: Rebuild HBase
run: /bin/bash dev/rebuild_hbase.sh 2.4.16
java-version: 8
distribution: 'temurin'
- name: Test
run: mvn -B -fae clean install
run: |
mvn -v
mvn -B -fae clean install
68 changes: 66 additions & 2 deletions phoenix5-connectors-assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,34 @@
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-hive</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-hive-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-hive4</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-hive4-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-spark</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-spark-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-spark3</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-spark3-shaded</artifactId>
</dependency>
</dependencies>

<build>
Expand All @@ -58,7 +78,7 @@
<groupId>org.codehaus.mojo</groupId>
<executions>
<execution>
<id>hive without version</id>
<id>hive3 without version</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
Expand All @@ -78,7 +98,7 @@
</configuration>
</execution>
<execution>
<id>hive-shaded without version</id>
<id>hive3-shaded without version</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
Expand All @@ -97,6 +117,46 @@
</arguments>
</configuration>
</execution>
<execution>
<id>hive4 without version</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>ln</executable>
<workingDirectory>${project.basedir}/../phoenix5-hive4/target</workingDirectory>
<arguments>
<argument>-fnsv</argument>
<argument>
phoenix5-hive4-${project.version}.jar
</argument>
<argument>
phoenix5-hive4.jar
</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>hive4-shaded without version</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>ln</executable>
<workingDirectory>${project.basedir}/../phoenix5-hive4-shaded/target</workingDirectory>
<arguments>
<argument>-fnsv</argument>
<argument>
phoenix5-hive4-shaded-${project.version}.jar
</argument>
<argument>
phoenix5-hive4-shaded.jar
</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>spark without version</id>
<phase>package</phase>
Expand Down Expand Up @@ -228,6 +288,10 @@
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-hive-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-hive4-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix5-spark-shaded</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,5 +45,13 @@
<include>phoenix5-hive.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.basedir}/../phoenix5-hive4/target</directory>
<outputDirectory>/</outputDirectory>
<includes>
<include>phoenix5-hive4-${project.version}.jar</include>
<include>phoenix5-hive4.jar</include>
</includes>
</fileSet>
</fileSets>
</component>
Original file line number Diff line number Diff line change
Expand Up @@ -45,5 +45,13 @@
<include>phoenix5-hive-shaded.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.basedir}/../phoenix5-hive4-shaded/target</directory>
<outputDirectory>/</outputDirectory>
<includes>
<include>phoenix5-hive4-shaded-${project.version}.jar</include>
<include>phoenix5-hive4-shaded.jar</include>
</includes>
</fileSet>
</fileSets>
</component>
1 change: 1 addition & 0 deletions phoenix5-hive/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
<tez.version>0.9.1</tez.version>
<commons-lang3.version>3.9</commons-lang3.version>
<calcite.version>1.16.0</calcite.version>
<hive.version>${hive3.version}</hive.version>
</properties>

<dependencies>
Expand Down
68 changes: 52 additions & 16 deletions phoenix5-hive/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,8 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
Expand Down Expand Up @@ -118,8 +119,6 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.common.util.StreamPrinter;
import org.apache.logging.log4j.util.Strings;
import org.apache.phoenix.compat.HiveCompatUtil;
import org.apache.phoenix.compat.MyResult;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
Expand Down Expand Up @@ -156,7 +155,9 @@ public class QTestUtil {

public static final String PATH_HDFS_REGEX = "(hdfs://)([a-zA-Z0-9:/_\\-\\.=])+";
public static final String PATH_HDFS_WITH_DATE_USER_GROUP_REGEX = "([a-z]+) ([a-z]+)([ ]+)([0-9]+) ([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}) " + PATH_HDFS_REGEX;
private static String DEFAULT_DATABASE_NAME = HiveCompatUtil.getDefaultDatabaseName();
private static String DEFAULT_DATABASE_NAME =
org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;


private String testWarehouse;
private final String testFiles;
Expand Down Expand Up @@ -525,7 +526,7 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
this.srcTables=getSrcTables();
this.srcUDFs = getSrcUDFs();

MyResult result = HiveCompatUtil.doSetup(confDir);
MyResult result = doSetup(confDir);
conf = result.getFirst();
queryState = result.getSecond();

Expand Down Expand Up @@ -638,7 +639,7 @@ public void shutdown() throws Exception {
}

if (clusterType.getCoreClusterType() == CoreClusterType.TEZ){
HiveCompatUtil.destroyTEZSession(SessionState.get());
SessionState.get().getTezSession().destroy();
}

setup.tearDown();
Expand Down Expand Up @@ -948,11 +949,11 @@ public void clearTestSideEffects() throws Exception {
return;
}

HiveCompatUtil.cleanupQueryResultCache();
QueryResultsCache.cleanupInstance();

// allocate and initialize a new conf since a test can
// modify conf by using 'set' commands
conf = HiveCompatUtil.getHiveConf();
conf = new HiveConf(IDriver.class);
initConf();
initConfFromSetup();

Expand Down Expand Up @@ -1024,7 +1025,7 @@ public void cleanUp(String tname) throws Exception {

protected void runCreateTableCmd(String createTableCmd) throws Exception {
int ecode = 0;
ecode = HiveCompatUtil.getDriverResponseCode(drv, createTableCmd);
ecode = getDriverResponseCode(drv, createTableCmd);
if (ecode != 0) {
throw new Exception("create table command: " + createTableCmd
+ " failed with exit code= " + ecode);
Expand All @@ -1035,8 +1036,8 @@ protected void runCreateTableCmd(String createTableCmd) throws Exception {

protected void runCmd(String cmd) throws Exception {
int ecode = 0;
ecode = HiveCompatUtil.getDriverResponseCode(drv, cmd);
HiveCompatUtil.closeDriver(drv);
ecode = getDriverResponseCode(drv, cmd);
((IDriver)drv).close();
if (ecode != 0) {
throw new Exception("command: " + cmd
+ " failed with exit code= " + ecode);
Expand Down Expand Up @@ -1085,15 +1086,15 @@ public void init() throws Exception {
}

// Create views registry
HiveCompatUtil.initHiveMaterializedViewsRegistry();
HiveMaterializedViewsRegistry.get().init();

testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
db = Hive.get(conf);
drv = HiveCompatUtil.getDriver(conf);
drv = DriverFactory.newDriver(conf);
pd = new ParseDriver();
sem = new SemanticAnalyzer(queryState);
}
Expand Down Expand Up @@ -1250,7 +1251,7 @@ public int executeOne(String tname) {
}

public int execute(String tname) {
return HiveCompatUtil.getDriverResponseCode(drv, qMap.get(tname));
return getDriverResponseCode(drv, qMap.get(tname));
}

public int executeClient(String tname1, String tname2) {
Expand Down Expand Up @@ -1420,7 +1421,7 @@ public void convertSequenceFileToTextFile() throws Exception {
db = Hive.get(conf);

// Move all data from dest4_sequencefile to dest4
HiveCompatUtil.getDriverResponseCode(drv, "FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT dest4_sequencefile.*");
getDriverResponseCode(drv, "FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT dest4_sequencefile.*");


// Drop dest4_sequencefile
Expand Down Expand Up @@ -1881,7 +1882,7 @@ public ASTNode parseQuery(String tname) throws Exception {

public void resetParser() throws SemanticException {
pd = new ParseDriver();
queryState = HiveCompatUtil.getQueryState(conf);
queryState = new QueryState.Builder().withHiveConf(conf).build();
sem = new SemanticAnalyzer(queryState);
}

Expand Down Expand Up @@ -2480,4 +2481,39 @@ public static String ensurePathEndsInSlash(String path) {
}
}

public int getDriverResponseCode(Object drv, String createTableCmd){
IDriver driver = (IDriver) drv;
return driver.run(createTableCmd).getResponseCode();
}

public static MyResult doSetup(String confDir) throws MalformedURLException {
// HIVE-14443 move this fall-back logic to CliConfigs
if (confDir != null && !confDir.isEmpty()) {
HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
MetastoreConf.setHiveSiteLocation(HiveConf.getHiveSiteLocation());
System.out.println("Setting hive-site: "+ HiveConf.getHiveSiteLocation());
}

QueryState queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build();
HiveConf conf = queryState.getConf();
return new MyResult(conf, queryState);
}

public static class MyResult {
private final HiveConf conf;
private final QueryState queryState;

public MyResult(HiveConf conf, QueryState queryState) {
this.conf = conf;
this.queryState = queryState;
}

public HiveConf getFirst() {
return conf;
}

public QueryState getSecond() {
return queryState;
}
}
}
Loading