diff --git a/phoenix5-hive-shaded/pom.xml b/phoenix5-hive-shaded/pom.xml
index 52fdade9..a72ffe10 100644
--- a/phoenix5-hive-shaded/pom.xml
+++ b/phoenix5-hive-shaded/pom.xml
@@ -48,16 +48,16 @@
phoenix-hbase-compat-${hbase.compat.version}
runtime
-
+
org.apache.phoenix
phoenix-hbase-compat-${hbase.compat.version}
runtime
true
-
+
org.apache.hive
hive-cli
@@ -115,11 +115,12 @@
-
-
+
+
commons-beanutils
commons-beanutils
@@ -131,10 +132,10 @@
org.apache.hadoop
hadoop-common
-
- commons-beanutils
- commons-beanutils
-
+
+ commons-beanutils
+ commons-beanutils
+
provided
@@ -291,8 +292,8 @@
protobuf-java
provided
-
+
com.google.guava
guava
@@ -308,7 +309,7 @@
provided
-
+
@@ -362,26 +363,26 @@
+ implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
csv-bulk-load-config.properties
${project.basedir}/../config/csv-bulk-load-config.properties
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
README.md
${project.basedir}/../README.md
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
LICENSE.txt
${project.basedir}/../LICENSE
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
NOTICE
${project.basedir}/../NOTICE
@@ -404,7 +405,8 @@
org/w3c/dom/**
org/xml/sax/**
-
+
org/apache/hbase/**
org/apache/spark/**
@@ -412,16 +414,15 @@
org/apache/phoenix/**
org/apache/omid/**
-
+
org/apache/commons/configuration2/**
-
com/beust/
@@ -451,8 +452,8 @@
com/google/inject/
${shaded.package}.com.google.inject.
-
+
com/google/protobuf/
${hbase.shaded.package}.com.google.protobuf.
@@ -478,7 +479,8 @@
io/
${shaded.package}.io.
-
+
io/compression/**
io/mapfile/**
io/map/index/*
@@ -522,7 +524,8 @@
net/
${shaded.package}.net.
-
+
net/topology/**
@@ -579,8 +582,8 @@
maven-compiler-plugin
- default-compile
- none
+ default-compile
+ none
diff --git a/phoenix5-hive/pom.xml b/phoenix5-hive/pom.xml
index 19a20cf2..b34722bc 100644
--- a/phoenix5-hive/pom.xml
+++ b/phoenix5-hive/pom.xml
@@ -31,206 +31,148 @@
6.0.0-SNAPSHOT
phoenix5-hive
- Phoenix Hive Connector for Phoenix 5
+ Phoenix Hive 3 Connector for Phoenix 5
${project.basedir}/..
${project.build.directory}/tmp
0.9.1
3.9
- ${hive3.version}
1.16.0
+
org.apache.phoenix
- phoenix-hbase-compat-${hbase.compat.version}
- runtime
- true
+ phoenix-core-client
-
- org.apache.hive
- hive-cli
- ${hive.version}
- provided
-
-
- org.apache.hive
- hive-common
- ${hive.version}
- provided
+ org.apache.phoenix
+ phoenix-core-server
- org.apache.hive
- hive-exec
- ${hive.version}
- provided
+ org.apache.phoenix.thirdparty
+ phoenix-shaded-guava
-
-
- org.apache.hive
- hive-serde
- ${hive.version}
-
-
-
- io.netty
- *
-
-
- provided
-
-
- org.apache.hive
- hive-storage-api
- ${hive-storage.version}
- provided
-
-
- org.apache.hive.shims
- hive-shims-common
- ${hive.version}
- provided
-
-
-
- org.apache.hive
- hive-standalone-metastore
- ${hive.version}
- provided
-
+
org.apache.hbase
- hbase-mapreduce
+ hbase-protocol
provided
org.apache.hbase
- hbase-zookeeper
- provided
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
+ hbase-client
provided
org.apache.hbase
- hbase-protocol-shaded
+ hbase-common
provided
org.apache.hbase
- hbase-endpoint
+ hbase-mapreduce
provided
+
+
org.apache.hadoop
- hadoop-hdfs-client
+ hadoop-common
provided
org.apache.hadoop
- hadoop-minicluster
- test
-
-
- org.apache.zookeeper
- zookeeper
- ${zookeeper.version}
+ hadoop-mapreduce-client-core
provided
+
+
- org.apache.hbase
- hbase-testing-util
- test
-
-
-
- org.apache.phoenix
- phoenix-core
-
-
-
- org.apache.phoenix.thirdparty
- phoenix-shaded-guava
-
-
- org.slf4j
- slf4j-api
-
-
- org.apache.commons
- commons-lang3
- ${commons-lang3.version}
+ org.apache.hive
+ hive-common
+ ${hive.version}
+ provided
+
+
+
+ com.google.guava
+ guava
+
+
+ org.eclipse.jetty
+ *
+
+
- org.apache.hbase
- hbase-protocol
+ org.apache.hive
+ hive-exec
+ ${hive.version}
provided
+
+
+ org.eclipse.jetty
+ *
+
+
-
- org.apache.hbase
- hbase-client
+ org.apache.hive
+ hive-storage-api
+ ${hive-storage.version}
provided
- org.apache.hbase
- hbase-common
+ org.apache.hive.shims
+ hive-shims-common
+ ${hive.version}
provided
+
- org.apache.hadoop
- hadoop-common
+ org.apache.hive
+ hive-standalone-metastore
+ ${hive.version}
provided
+
org.slf4j
- slf4j-log4j12
+ slf4j-api
provided
-
-
- com.google.code.findbugs
- jsr305
- 3.0.0
- test
-
-
- org.apache.commons
- commons-compress
- ${commons-compress.version}
- test
-
-
- commons-io
- commons-io
- ${commons-io.version}
- test
-
+
org.apache.phoenix
phoenix-core
tests
- test
+
- org.apache.omid
- omid-tso-server-hbase2.x
+ org.apache.phoenix
+ phoenix-hbase-compat-${hbase.compat.version}
test
+
+
- org.apache.omid
- omid-tso-server-hbase2.x
+ org.apache.hbase
+ hbase-zookeeper
test
- test-jar
+
+
+ org.apache.hbase
+ hbase-testing-util
+ test
org.apache.hbase
@@ -244,6 +186,8 @@
test-jar
test
+
+
org.apache.hadoop
hadoop-hdfs
@@ -254,179 +198,140 @@
test-jar
+ test
+
+
+
+ org.apache.commons
+ commons-lang3
+ ${commons-lang3.version}
+
+
+
+
+ org.apache.hive
+ hive-cli
+ ${hive.version}
test
+
+
+ org.eclipse.jetty
+ *
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
- junit
- junit
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
test
+
- org.mockito
- mockito-all
+ com.google.code.findbugs
+ jsr305
+ 3.0.0
test
- org.apache.tez
- tez-tests
+ org.apache.commons
+ commons-compress
+ ${commons-compress.version}
test
- ${tez.version}
- test-jar
-
-
- org.apache.hadoop
- hadoop-yarn-api
-
-
- org.apache.tez
- tez-dag
+ commons-io
+ commons-io
+ ${commons-io.version}
test
- ${tez.version}
-
-
- org.apache.hadoop
- hadoop-yarn-api
-
-
+
+
org.apache.logging.log4j
log4j-api
- provided
+ test
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
${log4j2.version}
org.apache.logging.log4j
log4j-core
- provided
+ test
${log4j2.version}
org.apache.logging.log4j
log4j-slf4j-impl
- provided
+ test
${log4j2.version}
-
-
- org.apache.hbase
- hbase-annotations
- provided
-
-
- org.apache.hbase
- hbase-metrics-api
- provided
-
-
- org.apache.hbase
- hbase-metrics
- provided
-
-
- org.apache.hbase
- hbase-server
- provided
-
-
- org.apache.hbase
- hbase-hadoop-compat
- provided
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- provided
-
-
- org.apache.hbase.thirdparty
- hbase-shaded-miscellaneous
- provided
-
-
- org.apache.hbase.thirdparty
- hbase-shaded-protobuf
- provided
-
-
- org.apache.hadoop
- hadoop-annotations
- provided
-
-
- org.apache.hadoop
- hadoop-auth
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-api
- provided
+ junit
+ junit
+ test
+
- com.google.protobuf
- protobuf-java
- provided
+ org.mockito
+ mockito-core
+ test
-
- maven-dependency-plugin
-
- true
-
-
- org.slf4j:slf4j-api
-
-
- org.apache.commons:commons-lang3
-
-
-
-
-
- org.apache.calcite.avatica:avatica
-
-
- org.antlr:antlr-runtime
-
-
-
- org.antlr:antlr-runtime
-
-
-
org.codehaus.mojo
build-helper-maven-plugin
-
-
- add-parent-test-source
- generate-sources
-
- add-test-source
-
-
-
-
-
-
-
-
-
org.apache.maven.plugins
maven-failsafe-plugin
-
+
1
false
@@ -436,7 +341,8 @@
1
false
- org.apache.phoenix.end2end.ParallelStatsDisabledTest
+
+ org.apache.phoenix.end2end.ParallelStatsDisabledTest
integration-test
@@ -446,42 +352,83 @@
- org.apache.maven.plugins
- maven-resources-plugin
-
-
- copy-resources
- generate-resources
-
- copy-resources
-
-
- ${project.build.directory}/test-classes
-
- true
-
-
- ${project.parent.basedir}/src/test/resources
-
-
-
-
-
+ maven-dependency-plugin
+
+
+
+ org.apache.hbase:hbase-testing-util
+
+
+ org.apache.hbase:hbase-it:test-jar
+
+
+ org.apache.hadoop:hadoop-hdfs
+
+
+ org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}
+
+
+ org.apache.hive.shims:hive-shims-common
+
+
+ org.apache.hive:hive-standalone-metastore
+
+
+ org.apache.hive:hive-storage-api
+
+
+ org.apache.logging.log4j:log4j-core
+
+
+ org.apache.logging.log4j:log4j-slf4j-impl
+
+
+ org.apache.logging.log4j:log4j-slf4j-impl
+
+
+ org.apache.logging.log4j:log4j-1.2-api
+
+
+ org.apache.logging.log4j:log4j-slf4j-impl
+
+
+ org.apache.commons:commons-lang3
+
+
+ org.slf4j:slf4j-api
+
+ ignoredUnusedDeclaredDependency>
+
+
+
+ org.antlr:antlr-runtime
+
+
+
+ org.apache.calcite.avatica:avatica
+
+
+
+
+ org.antlr:antlr-runtime
+
+
+
org.apache.maven.plugins
maven-compiler-plugin
+
8
-
-
+
org.apache.calcite
@@ -489,7 +436,8 @@
${calcite.version}
-
+
org.pentaho
pentaho-aggdesigner-algorithm
diff --git a/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java b/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
index 2931f939..aa5a0f9e 100644
--- a/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
@@ -19,7 +19,11 @@
package org.apache.phoenix.hive;
import org.junit.BeforeClass;
+import org.junit.Ignore;
+// I was unable to find a combination classpath that works with Phoenix 5.2, and the unrelocated
+// hive-exec uberjar that is added by the tez job runner.
+@Ignore
public class HiveTezIT extends HivePhoenixStoreIT {
@BeforeClass
diff --git a/phoenix5-hive/src/test/resources/hbase-site.xml b/phoenix5-hive/src/it/resources/hbase-site.xml
similarity index 100%
rename from phoenix5-hive/src/test/resources/hbase-site.xml
rename to phoenix5-hive/src/it/resources/hbase-site.xml
diff --git a/phoenix5-hive/src/test/resources/hive-site.xml b/phoenix5-hive/src/it/resources/hive-site.xml
similarity index 100%
rename from phoenix5-hive/src/test/resources/hive-site.xml
rename to phoenix5-hive/src/it/resources/hive-site.xml
diff --git a/phoenix5-hive/src/test/resources/log4j.properties b/phoenix5-hive/src/it/resources/log4j.properties
similarity index 100%
rename from phoenix5-hive/src/test/resources/log4j.properties
rename to phoenix5-hive/src/it/resources/log4j.properties
diff --git a/phoenix5-hive/src/test/resources/tez-site.xml b/phoenix5-hive/src/it/resources/tez-site.xml
similarity index 100%
rename from phoenix5-hive/src/test/resources/tez-site.xml
rename to phoenix5-hive/src/it/resources/tez-site.xml
diff --git a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index 6cb82cb6..d1d3b597 100644
--- a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -50,7 +50,7 @@
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.phoenix.compile.QueryPlan;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
@@ -177,7 +177,7 @@ private List generateSplits(final JobConf jobConf, final QueryPlan q
for (int i = 0, limit = scans.size(); i < limit; i++) {
LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " + Bytes
.toStringBinary(scans.get(i).getAttribute
- (BaseScannerRegionObserver.EXPECTED_UPPER_REGION_KEY)));
+ (BaseScannerRegionObserverConstants.EXPECTED_UPPER_REGION_KEY)));
}
}
diff --git a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
index 66c91995..63c5d31b 100644
--- a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
+++ b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
@@ -33,7 +33,7 @@
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
import org.apache.phoenix.hive.PhoenixRowKey;
import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
import org.apache.phoenix.iterate.ConcatResultIterator;
@@ -99,8 +99,8 @@ public void initialize(InputSplit split) throws IOException {
for (int i = 0, limit = scans.size(); i < limit; i++) {
LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " +
- Bytes.toStringBinary(scans.get(i).getAttribute(BaseScannerRegionObserver
- .EXPECTED_UPPER_REGION_KEY)));
+ Bytes.toStringBinary(scans.get(i).getAttribute(
+ BaseScannerRegionObserverConstants.EXPECTED_UPPER_REGION_KEY)));
}
}
@@ -113,8 +113,8 @@ public void initialize(InputSplit split) throws IOException {
long renewScannerLeaseThreshold = queryPlan.getContext().getConnection()
.getQueryServices().getRenewLeaseThresholdMilliSeconds();
for (Scan scan : scans) {
- scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, Bytes
- .toBytes(true));
+ scan.setAttribute(BaseScannerRegionObserverConstants.SKIP_REGION_BOUNDARY_CHECK,
+ Bytes.toBytes(true));
ScanMetricsHolder scanMetricsHolder = ScanMetricsHolder.getInstance(readMetrics, tableName, scan, ctx.getConnection().getLogLevel());
final TableResultIterator tableResultIterator = new TableResultIterator(
queryPlan.getContext().getConnection().getMutationState(), scan, scanMetricsHolder,
diff --git a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index 892aed97..fc1b2c14 100644
--- a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.TableName;
-import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
+import org.apache.phoenix.coprocessorclient.MetaDataProtocol.MetaDataMutationResult;
import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.schema.MetaDataClient;
diff --git a/phoenix5-spark-shaded/pom.xml b/phoenix5-spark-shaded/pom.xml
index f9fb93d9..698edc1a 100644
--- a/phoenix5-spark-shaded/pom.xml
+++ b/phoenix5-spark-shaded/pom.xml
@@ -53,10 +53,10 @@
provided
-
+
commons-beanutils
commons-beanutils
@@ -222,8 +222,8 @@
protobuf-java
provided
-
+
com.google.guava
guava
@@ -242,7 +242,7 @@
-
org.apache.maven.plugins
@@ -338,7 +338,8 @@
org/w3c/dom/**
org/xml/sax/**
-
+
org/apache/hbase/**
org/apache/spark/**
@@ -346,16 +347,15 @@
org/apache/phoenix/**
org/apache/omid/**
-
+
org/apache/commons/configuration2/**
-
com/beust/
@@ -385,8 +385,8 @@
com/google/inject/
${shaded.package}.com.google.inject.
-
+
com/google/protobuf/
${hbase.shaded.package}.com.google.protobuf.
@@ -412,7 +412,8 @@
io/
${shaded.package}.io.
-
+
io/compression/**
io/mapfile/**
io/map/index/*
@@ -456,7 +457,8 @@
net/
${shaded.package}.net.
-
+
net/topology/**
diff --git a/phoenix5-spark/pom.xml b/phoenix5-spark/pom.xml
index 17e02bb9..f5242ca9 100644
--- a/phoenix5-spark/pom.xml
+++ b/phoenix5-spark/pom.xml
@@ -31,7 +31,7 @@
6.0.0-SNAPSHOT
phoenix5-spark
- Phoenix Spark Connector for Phoenix 5
+ Phoenix Spark 2 Connector for Phoenix 5
${project.basedir}/..
@@ -59,7 +59,8 @@
org.apache.spark
spark-core_${scala.binary.version}
${spark.version}
-
+ provided
+
org.apache.hadoop
@@ -70,7 +71,6 @@
hadoop-client-runtime
- provided
org.apache.spark
@@ -99,7 +99,11 @@
org.apache.phoenix
- phoenix-core
+ phoenix-core-client
+
+
+ org.apache.phoenix
+ phoenix-core-server
org.apache.phoenix
@@ -117,12 +121,12 @@
org.apache.omid
- omid-tso-server-hbase2.x
+ omid-tso-server
test
org.apache.omid
- omid-tso-server-hbase2.x
+ omid-tso-server
test
test-jar
@@ -141,10 +145,10 @@
test
-
org.apache.hadoop
hadoop-common
+ provided
log4j
@@ -171,12 +175,12 @@
netty
- provided
org.apache.hadoop
hadoop-mapreduce-client-core
+ provided
log4j
@@ -203,13 +207,13 @@
netty
- provided
org.apache.hbase
hbase-client
+ provided
log4j
@@ -285,6 +289,7 @@
org.apache.hbase
hbase-common
+ provided
log4j
@@ -414,202 +419,159 @@
-
+ org.apache.hbase
+ hbase-testing-util
+ test
+
+
+
org.apache.zookeeper
zookeeper
${zookeeper.version}
test
-
-
- org.apache.hadoop
- hadoop-annotations
- provided
-
-
- org.apache.hadoop
- hadoop-auth
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-api
- provided
-
-
- org.apache.hadoop
- hadoop-hdfs
- provided
-
-
- org.apache.hadoop
- hadoop-distcp
- provided
-
-
- org.apache.hadoop
- hadoop-client
- provided
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- provided
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
- provided
-
org.slf4j
slf4j-log4j12
- provided
-
-
- com.google.protobuf
- protobuf-java
- 2.5.0
- provided
+ test
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
-
-
- add-test-source
- generate-sources
-
- add-test-source
-
-
-
-
-
-
-
-
-
-
-
- maven-dependency-plugin
-
- true
-
-
-
- org.apache.hadoop:hadoop-hdfs
-
-
- org.apache.hbase:hbase-it
-
-
-
-
-
- net.alchim31.maven
- scala-maven-plugin
-
- ${project.build.sourceEncoding}
-
- -Xmx1024m
-
- ${scala.version}
- ${scala.binary.version}
-
-
-
- scala-compile-first
- process-resources
-
- add-source
- compile
-
-
-
- scala-test-compile
- process-test-resources
-
- testCompile
-
-
-
-
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+
+
+ maven-dependency-plugin
+
+
+
+
+ org.apache.zookeeper:zookeeper
+
+
+ org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}
+
+
+ org.apache.omid:omid-tso-server
+
+
+ org.apache.hadoop:hadoop-hdfs
+
+
+ org.apache.hbase:hbase-it
+
+
+ org.apache.hbase:hbase-testing-util
+
+
+ org.slf4j:slf4j-log4j12
+
+
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+ ${project.build.sourceEncoding}
+
+ -Xmx1024m
+
+ ${scala.version}
+ ${scala.binary.version}
+
+
+
+ scala-compile-first
+ process-resources
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+ process-test-resources
+
+ testCompile
+
+
+
+
-
- org.scalatest
- scalatest-maven-plugin
- 1.0
-
- ${project.build.directory}/surefire-reports
- .
- WDF TestSuite.txt
- ${skip.scalatest}
-
-
-
- integration-test
- integration-test
-
- test
-
-
-
- false
- Integration-Test
- -XX:ReservedCodeCacheSize=512m ${argLine}
-
-
-
-
-
- org.apache.maven.plugins
- maven-failsafe-plugin
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
-
-
- 1.8
-
-
-
- org.apache.maven.plugins
- maven-javadoc-plugin
+
+ org.scalatest
+ scalatest-maven-plugin
+ 1.0
+
+ ${project.build.directory}/surefire-reports
+ .
+ WDF TestSuite.txt
+ ${skip.scalatest}
+
+
+
+ integration-test
+ integration-test
+
+ test
+
+
+
+ false
+ Integration-Test
+ -XX:ReservedCodeCacheSize=512m ${argLine}
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+ 1.8
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+
+ ${skip.spark.javadoc}
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+ empty-javadoc-jar
+ package
+
+ jar
+
- ${skip.spark.javadoc}
+ javadoc
+ ${basedir}/javadoc
-
-
- org.apache.maven.plugins
- maven-jar-plugin
-
-
- empty-javadoc-jar
- package
-
- jar
-
-
- javadoc
- ${basedir}/javadoc
-
-
-
-
-
-
+
+
+
+
+
-
+
exclude-tephra
diff --git a/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java b/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
index a49bc0ae..507fc8e4 100644
--- a/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
+++ b/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
@@ -41,6 +41,7 @@
import static org.apache.phoenix.spark.datasource.v2.PhoenixDataSource.JDBC_URL;
import static org.apache.phoenix.spark.datasource.v2.PhoenixDataSource.ZOOKEEPER_URL;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_ZK;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
import static org.junit.Assert.*;
@@ -104,6 +105,12 @@ public void basicWriteAndReadBackTest() throws SQLException {
.save();
// Use jdbcUrl
+ // In Phoenix 5.2+ getUrl() return a JDBC URL, in earlier versions it returns a ZK
+ // quorum
+ String jdbcUrl = getUrl();
+ if (!jdbcUrl.startsWith(JDBC_PROTOCOL)) {
+ jdbcUrl = JDBC_PROTOCOL_ZK + JDBC_PROTOCOL_SEPARATOR + jdbcUrl;
+ }
Dataset df2 =
spark.createDataFrame(
Arrays.asList(RowFactory.create(2, "x")),
@@ -111,7 +118,7 @@ public void basicWriteAndReadBackTest() throws SQLException {
df2.write().format("phoenix").mode(SaveMode.Overwrite)
.option("table", tableName)
- .option(JDBC_URL, JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + getUrl())
+ .option(JDBC_URL, jdbcUrl)
.save();
// Use default from hbase-site.xml
@@ -148,8 +155,7 @@ public void basicWriteAndReadBackTest() throws SQLException {
// Use jdbcUrl
Dataset df2Read = spark.read().format("phoenix")
.option("table", tableName)
- .option(PhoenixDataSource.JDBC_URL,
- JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + getUrl())
+ .option(PhoenixDataSource.JDBC_URL, jdbcUrl)
.load();
assertEquals(3l, df2Read.count());
diff --git a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
index a7aca229..84d83f39 100644
--- a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
+++ b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
@@ -29,7 +29,6 @@
import org.apache.phoenix.jdbc.PhoenixStatement;
import org.apache.phoenix.mapreduce.PhoenixInputSplit;
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-import org.apache.phoenix.protobuf.ProtobufUtil;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.schema.PTableImpl;
import org.apache.phoenix.spark.FilterExpressionCompiler;
diff --git a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
index a7f6240c..3a4ef656 100644
--- a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
+++ b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
@@ -17,7 +17,6 @@
*/
package org.apache.phoenix.spark.datasource.v2.reader;
-import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
@@ -33,9 +32,9 @@
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
import org.apache.phoenix.coprocessor.generated.PTableProtos;
import org.apache.phoenix.coprocessor.generated.PTableProtos.PTable;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
import org.apache.phoenix.iterate.ConcatResultIterator;
import org.apache.phoenix.iterate.LookAheadResultIterator;
import org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
@@ -98,7 +97,7 @@ private QueryPlan getQueryPlan() throws SQLException {
PTable pTable = null;
try {
pTable = PTable.parseFrom(options.getPTableCacheBytes());
- } catch (InvalidProtocolBufferException e) {
+ } catch (Exception e) {
throw new RuntimeException("Parsing the PTable Cache Bytes is failing ", e);
}
org.apache.phoenix.schema.PTable table = PTableImpl.createFromProto(pTable);
@@ -134,7 +133,7 @@ private void initialize() {
.getQueryServices().getRenewLeaseThresholdMilliSeconds();
for (Scan scan : scans) {
// For MR, skip the region boundary check exception if we encounter a split. ref: PHOENIX-2599
- scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, Bytes.toBytes(true));
+ scan.setAttribute(BaseScannerRegionObserverConstants.SKIP_REGION_BOUNDARY_CHECK, Bytes.toBytes(true));
PeekingResultIterator peekingResultIterator;
ScanMetricsHolder scanMetricsHolder =
diff --git a/phoenix5-spark3-it/pom.xml b/phoenix5-spark3-it/pom.xml
deleted file mode 100644
index 5a8aaf1e..00000000
--- a/phoenix5-spark3-it/pom.xml
+++ /dev/null
@@ -1,260 +0,0 @@
-
-
-
-
- phoenix-connectors
- org.apache.phoenix
- 6.0.0-SNAPSHOT
-
- 4.0.0
-
- phoenix5-spark3-it
- Tests for Phoenix Spark 3 Connector for Phoenix 5
-
-
- ${project.basedir}/..
- ${spark3.version}
- ${scala.version.for.spark3}
- ${scala.binary.version.for.spark3}
-
-
-
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
- ${spark.version}
- provided
-
-
-
- org.apache.hadoop
- hadoop-client-api
-
-
- org.apache.hadoop
- hadoop-client-runtime
-
-
-
-
- org.apache.spark
- spark-catalyst_${scala.binary.version}
- ${spark.version}
- provided
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
- ${spark.version}
- provided
-
-
-
- org.scala-lang
- scala-library
- ${scala.version}
- provided
-
-
- org.scala-lang
- scala-reflect
- ${scala.version}
- provided
-
-
-
-
- org.apache.phoenix
- phoenix5-spark3
- test
-
-
- org.apache.phoenix
- phoenix-core
- test
-
-
- org.apache.phoenix
- phoenix-hbase-compat-${hbase.compat.version}
- test
- true
-
-
- org.apache.phoenix
- phoenix-core
- tests
- test
-
-
- org.apache.omid
- omid-tso-server-hbase2.x
- test
-
-
- org.apache.omid
- omid-tso-server-hbase2.x
- test
- test-jar
-
-
-
-
- junit
- junit
- test
-
-
- org.scalactic
- scalactic_${scala.binary.version}
-
- 3.1.4
- test
-
-
- org.scalatest
- scalatest_${scala.binary.version}
- 3.1.4
- test
-
-
-
-
-
- org.apache.zookeeper
- zookeeper
- ${zookeeper.version}
- test
-
-
- org.apache.hbase
- hbase-it
- test-jar
-
-
-
- org.apache.hadoop
- hadoop-minicluster
-
-
-
-
-
-
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
-
-
- add-test-source
- generate-sources
-
- add-test-source
-
-
-
-
-
-
-
-
-
-
-
- net.alchim31.maven
- scala-maven-plugin
-
- ${project.build.sourceEncoding}
-
- -Xmx1024m
-
- ${scala.version}
- ${scala.binary.version}
-
-
-
- scala-compile-first
- process-resources
-
- add-source
- compile
-
-
-
- scala-test-compile
-
- testCompile
-
-
-
-
-
- org.scalatest
- scalatest-maven-plugin
- 1.0
-
- ${project.build.directory}/surefire-reports
- .
- WDF TestSuite.txt
- ${skip.scalatest}
-
-
-
- integration-test
- integration-test
-
- test
-
-
-
- false
- Integration-Test
- -XX:ReservedCodeCacheSize=512m ${argLine}
-
-
-
-
-
- org.apache.maven.plugins
- maven-failsafe-plugin
-
-
- maven-dependency-plugin
-
-
- org.apache.hadoop:hadoop-common
- org.apache.hadoop:hadoop-minicluster
- org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}
- org.apache.hbase:hbase-it
- org.apache.omid:*
- org.apache.zookeeper:zookeeper
-
-
-
-
-
-
\ No newline at end of file
diff --git a/phoenix5-spark3-shaded/pom.xml b/phoenix5-spark3-shaded/pom.xml
index f3bc82c5..5c414f23 100644
--- a/phoenix5-spark3-shaded/pom.xml
+++ b/phoenix5-spark3-shaded/pom.xml
@@ -53,10 +53,10 @@
provided
-
+
commons-beanutils
commons-beanutils
@@ -222,8 +222,8 @@
protobuf-java
provided
-
+
com.google.guava
guava
@@ -242,7 +242,7 @@
-
org.apache.maven.plugins
@@ -338,7 +338,8 @@
org/w3c/dom/**
org/xml/sax/**
-
+
org/apache/hbase/**
org/apache/spark/**
@@ -346,16 +347,15 @@
org/apache/phoenix/**
org/apache/omid/**
-
+
org/apache/commons/configuration2/**
-
com/beust/
@@ -385,8 +385,8 @@
com/google/inject/
${shaded.package}.com.google.inject.
-
+
com/google/protobuf/
${hbase.shaded.package}.com.google.protobuf.
@@ -412,7 +412,8 @@
io/
${shaded.package}.io.
-
+
io/compression/**
io/mapfile/**
io/map/index/*
@@ -456,7 +457,8 @@
net/
${shaded.package}.net.
-
+
net/topology/**
diff --git a/phoenix5-spark3/pom.xml b/phoenix5-spark3/pom.xml
index aecb6b62..7123e323 100644
--- a/phoenix5-spark3/pom.xml
+++ b/phoenix5-spark3/pom.xml
@@ -41,36 +41,24 @@
-
- org.apache.phoenix
- phoenix-core
-
-
- org.apache.phoenix
- phoenix-hbase-compat-${hbase.compat.version}
- runtime
- true
-
-
- org.apache.phoenix.thirdparty
- phoenix-shaded-guava
- provided
-
-
-
-
- org.scala-lang
- scala-library
- ${scala.version}
- provided
-
-
+
org.apache.spark
spark-core_${scala.binary.version}
${spark.version}
provided
+
+
+
+ org.apache.hadoop
+ hadoop-client-api
+
+
+ org.apache.hadoop
+ hadoop-client-runtime
+
+
org.apache.spark
@@ -97,6 +85,34 @@
provided
+
+
+ org.apache.phoenix
+ phoenix-core-client
+
+
+ org.apache.phoenix
+ phoenix-core-server
+
+
+ org.apache.phoenix.thirdparty
+ phoenix-shaded-guava
+
+
+ org.apache.phoenix
+ phoenix-hbase-compat-${hbase.compat.version}
+ runtime
+ true
+
+
+
+
+ org.scala-lang
+ scala-library
+ ${scala.version}
+ provided
+
+
org.apache.hbase
@@ -114,25 +130,99 @@
provided
+
+
- com.google.protobuf
- protobuf-java
- 2.5.0
+ org.apache.hadoop
+ hadoop-common
provided
-
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ provided
+
+
joda-time
joda-time
${jodatime.version}
+
+
+
+ org.apache.phoenix
+ phoenix-core
+ tests
+ test
+
+
+
+
+ org.scala-lang
+ scala-reflect
+ ${scala.version}
+ test
+
+
+ org.scalactic
+ scalactic_${scala.binary.version}
+
+ 3.1.4
+ test
+
+
+ org.scalatest
+ scalatest_${scala.binary.version}
+ 3.1.4
+ test
+
+
+
+
+
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
+ test
+
+
+ org.apache.hbase
+ hbase-testing-util
+ test
+
+
+ org.apache.hbase
+ hbase-it
+ test-jar
+ test
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ test
+
+
+
+
+ org.apache.omid
+ omid-tso-server
+ test
+
+
+ org.apache.omid
+ omid-tso-server
+ test
+ test-jar
+
+
org.slf4j
slf4j-api
+ provided
-
junit
junit
@@ -142,48 +232,89 @@
-
- net.alchim31.maven
- scala-maven-plugin
-
- ${project.build.sourceEncoding}
-
- -Xmx1024m
-
- ${scala.version}
- ${scala.binary.version}
-
-
-
- scala-compile-first
- process-resources
-
- add-source
- compile
-
-
-
-
- maven-dependency-plugin
-
- true
-
- org.apache.hadoop:hadoop-common
- org.apache.hadoop:hadoop-mapreduce-client-core
-
-
-
-
- org.apache.maven.plugins
- maven-javadoc-plugin
+ org.codehaus.mojo
+ build-helper-maven-plugin
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+ ${project.build.sourceEncoding}
+
+ -Xmx1024m
+
+ ${scala.version}
+ ${scala.binary.version}
+
+
+
+ scala-compile-first
+ process-resources
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+
+ testCompile
+
+
+
+
+
+ org.scalatest
+ scalatest-maven-plugin
+ 1.0
+
+ ${project.build.directory}/surefire-reports
+ .
+ WDF TestSuite.txt
+ ${skip.scalatest}
+
+
+
+ integration-test
+ integration-test
+
+ test
+
- ${skip.spark.javadoc}
+
+ false
+ Integration-Test
+ -XX:ReservedCodeCacheSize=512m ${argLine}
-
+
+
+
+
+ maven-dependency-plugin
+
+
+
+ org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}
+ org.apache.zookeeper:zookeeper
+ org.apache.hbase:hbase-testing-util
+ org.apache.hbase:hbase-it
+ org.apache.hadoop:hadoop-minicluster
+ org.apache.omid:omid-tso-server
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+
+ ${skip.spark.javadoc}
+
+
-
+
exclude-tephra
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/AggregateIT.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/AggregateIT.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/AggregateIT.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/AggregateIT.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
similarity index 94%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
index f25a693f..efcef710 100644
--- a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
+++ b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
@@ -40,6 +40,7 @@
import static org.apache.phoenix.spark.sql.connector.PhoenixDataSource.JDBC_URL;
import static org.apache.phoenix.spark.sql.connector.PhoenixDataSource.ZOOKEEPER_URL;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_ZK;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
import static org.junit.Assert.*;
@@ -103,6 +104,13 @@ public void basicWriteAndReadBackTest() throws SQLException {
.save();
// Use jdbcUrl
+ // In Phoenix 5.2+ getUrl() return a JDBC URL, in earlier versions it returns a ZK
+ // quorum
+ String jdbcUrl = getUrl();
+ if (!jdbcUrl.startsWith(JDBC_PROTOCOL)) {
+ jdbcUrl = JDBC_PROTOCOL_ZK + JDBC_PROTOCOL_SEPARATOR + jdbcUrl;
+ }
+
Dataset df2 =
spark.createDataFrame(
Arrays.asList(RowFactory.create(2, "x")),
@@ -110,7 +118,7 @@ public void basicWriteAndReadBackTest() throws SQLException {
df2.write().format("phoenix").mode(SaveMode.Append)
.option("table", tableName)
- .option(JDBC_URL, JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + getUrl())
+ .option(JDBC_URL, jdbcUrl)
.save();
// Use default from hbase-site.xml
@@ -147,8 +155,7 @@ public void basicWriteAndReadBackTest() throws SQLException {
// Use jdbcUrl
Dataset df2Read = spark.read().format("phoenix")
.option("table", tableName)
- .option(PhoenixDataSource.JDBC_URL,
- JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + getUrl())
+ .option(PhoenixDataSource.JDBC_URL, jdbcUrl)
.load();
assertEquals(3l, df2Read.count());
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/OrderByIT.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/OrderByIT.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/OrderByIT.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/OrderByIT.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SparkUtil.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SparkUtil.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SparkUtil.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SparkUtil.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
diff --git a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
similarity index 100%
rename from phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
diff --git a/phoenix5-spark3-it/src/it/resources/globalSetup.sql b/phoenix5-spark3/src/it/resources/globalSetup.sql
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/globalSetup.sql
rename to phoenix5-spark3/src/it/resources/globalSetup.sql
diff --git a/phoenix5-spark3-it/src/it/resources/log4j.xml b/phoenix5-spark3/src/it/resources/log4j.xml
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/log4j.xml
rename to phoenix5-spark3/src/it/resources/log4j.xml
diff --git a/phoenix5-spark3-it/src/it/resources/tenantSetup.sql b/phoenix5-spark3/src/it/resources/tenantSetup.sql
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/tenantSetup.sql
rename to phoenix5-spark3/src/it/resources/tenantSetup.sql
diff --git a/phoenix5-spark3-it/src/it/resources/transactionTableSetup.sql b/phoenix5-spark3/src/it/resources/transactionTableSetup.sql
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/transactionTableSetup.sql
rename to phoenix5-spark3/src/it/resources/transactionTableSetup.sql
diff --git a/phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala b/phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
similarity index 100%
rename from phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
rename to phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
diff --git a/phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala b/phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
similarity index 100%
rename from phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
rename to phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
diff --git a/phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala b/phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
similarity index 100%
rename from phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
rename to phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
diff --git a/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java b/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
index 6a7ee4d0..ee8a80ca 100644
--- a/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
+++ b/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
@@ -17,7 +17,6 @@
*/
package org.apache.phoenix.spark.sql.connector.reader;
-import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
@@ -33,8 +32,8 @@
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
import org.apache.phoenix.coprocessor.generated.PTableProtos.PTable;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
import org.apache.phoenix.iterate.ConcatResultIterator;
import org.apache.phoenix.iterate.LookAheadResultIterator;
import org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
@@ -85,7 +84,7 @@ private QueryPlan getQueryPlan() throws SQLException {
PTable pTable = null;
try {
pTable = PTable.parseFrom(options.getPTableCacheBytes());
- } catch (InvalidProtocolBufferException e) {
+ } catch (Exception e) {
throw new RuntimeException("Parsing the PTable Cache Bytes is failing ", e);
}
org.apache.phoenix.schema.PTable table = PTableImpl.createFromProto(pTable);
@@ -121,7 +120,7 @@ private void initialize() {
.getQueryServices().getRenewLeaseThresholdMilliSeconds();
for (Scan scan : scans) {
// For MR, skip the region boundary check exception if we encounter a split. ref: PHOENIX-2599
- scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, Bytes.toBytes(true));
+ scan.setAttribute(BaseScannerRegionObserverConstants.SKIP_REGION_BOUNDARY_CHECK, Bytes.toBytes(true));
PeekingResultIterator peekingResultIterator;
ScanMetricsHolder scanMetricsHolder =
diff --git a/pom.xml b/pom.xml
index 42b7056d..d0075c39 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,5 +1,27 @@
-
+
+
+
4.0.0
org.apache.phoenix
phoenix-connectors
@@ -13,7 +35,7 @@
The Apache Software License, Version 2.0
http://www.apache.org/licenses/LICENSE-2.0.txt
repo
-
+
@@ -42,29 +64,29 @@
-
- phoenix5-hive
- phoenix5-hive-shaded
- phoenix5-spark
- phoenix5-spark-shaded
- phoenix5-spark3
- phoenix5-spark3-it
- phoenix5-spark3-shaded
- phoenix5-connectors-assembly
+
+ phoenix5-hive
+ phoenix5-hive-shaded
+ phoenix5-spark
+ phoenix5-spark-shaded
+ phoenix5-spark3
+ phoenix5-spark3-shaded
+ phoenix5-connectors-assembly
- 5.1.3
- 1.0.2
+ 5.2.0
+ 1.1.2
1.9.4
- 2.0.0
-
- 2.4.16
- 2.4.1
- 3.1.2
- 3.5.7
- 4.1.4
+ 2.1.0
+
+ 2.5.8-hadoop3
+ 2.5.0
+ 3.2.4
+ 3.8.4
+ 4.1.5
@@ -75,15 +97,16 @@
${compileSource}
${compileSource}
-
-
+
+
3000m
-
+
-enableassertions -Xmx${surefire.Xmx} -Djava.security.egd=file:/dev/./urandom -Djava.net.preferIPv4Stack=true -Djava.awt.headless=true -Djdk.net.URLClassPath.disableClassPathURLCheck=true -Dorg.apache.hbase.thirdparty.io.netty.leakDetection.level=advanced -Dio.netty.eventLoopThreads=3 -Duser.timezone="America/Los_Angeles" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
-XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68
-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true --add-modules jdk.unsupported --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/jdk.internal.ref=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-exports java.base/jdk.internal.misc=ALL-UNNAMED --add-exports java.security.jgss/sun.security.krb5=ALL-UNNAMED
@@ -104,11 +127,11 @@
3.2.4
2.12.18
2.12
-
+
1.2.17
- 2.18.0
+ 2.23.1
3.3.6
- 1.7.30
+ 1.7.36
3.2.2
1.0
1.3.9-1
@@ -184,8 +207,8 @@
maven-compiler-plugin
- org.apache.maven.plugins
- maven-assembly-plugin
+ org.apache.maven.plugins
+ maven-assembly-plugin
@@ -220,8 +243,8 @@
org.apache.rat
apache-rat-plugin
-
+
org.owasp
@@ -249,11 +272,10 @@
2048
-
+
org.codehaus.mojo
build-helper-maven-plugin
@@ -298,7 +320,8 @@
maven-failsafe-plugin
-
+
UTF-8
${numForkedIT}
alphabetical
@@ -340,8 +363,8 @@
-
+
org.apache.felix
maven-bundle-plugin
${maven.bundle.version}
@@ -370,8 +393,8 @@
-
+
com.puppycrawl.tools
@@ -412,7 +435,8 @@
org.apache.maven.plugins
maven-surefire-plugin
-
+
${numForkedUT}
true
${test.output.tofile}
@@ -463,19 +487,19 @@
- maven-dependency-plugin
-
-
- enforce-dependencies
-
- analyze-only
-
-
- true
-
-
-
-
+ maven-dependency-plugin
+
+
+ enforce-dependencies
+
+ analyze-only
+
+
+ true
+
+
+
+
@@ -493,6 +517,28 @@
+
+ org.apache.phoenix
+ phoenix-core-client
+ ${phoenix.version}
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-jersey
+
+
+
+
+ org.apache.phoenix
+ phoenix-core-server
+ ${phoenix.version}
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-jersey
+
+
+
org.apache.phoenix
phoenix-hbase-compat-${hbase.compat.version}
@@ -507,15 +553,15 @@
org.apache.omid
- omid-tso-server-hbase2.x
+ omid-tso-server
test
${omid.version}
org.apache.omid
- omid-tso-server-hbase2.x
+ omid-tso-server
test
- ${omid.version}
+ ${omid.version}
test-jar
@@ -548,6 +594,11 @@
phoenix5-hive
${project.version}
+
+
+
+
+
org.apache.phoenix
phoenix5-hive-shaded
@@ -560,7 +611,8 @@
-
+
org.apache.hbase
hbase-protocol-shaded
@@ -623,7 +675,7 @@
hbase-testing-util
${hbase.version}
test
- true
+
org.jruby
@@ -648,6 +700,17 @@
+
+ org.apache.hbase
+ hbase-it
+ ${hbase.version}
+
+
+ org.jruby
+ jruby-complete
+
+
+
org.apache.hbase
hbase-protocol
@@ -663,7 +726,6 @@
hbase-common
${hbase.version}
test-jar
- test
org.apache.hbase
@@ -675,7 +737,6 @@
hbase-client
${hbase.version}
test-jar
- test
org.apache.hbase
@@ -687,7 +748,6 @@
hbase-server
${hbase.version}
test-jar
- test
org.apache.hbase
@@ -699,7 +759,6 @@
hbase-hadoop-compat
${hbase.version}
test-jar
- test
org.apache.hbase
@@ -711,7 +770,6 @@
hbase-hadoop2-compat
${hbase.version}
test-jar
- test
@@ -754,22 +812,20 @@
org.apache.hadoop
hadoop-mapreduce-client-core
${hadoop.version}
- provided
org.apache.hadoop
hadoop-minicluster
${hadoop.version}
- test
org.apache.hadoop
hadoop-client-minicluster
${hadoop.version}
- test
-
+
org.apache.hadoop
hadoop-auth
@@ -785,11 +841,52 @@
hadoop-mapreduce-client-jobclient
${hadoop.version}
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ test-jar
+ ${hadoop.version}
+
org.apache.hadoop
hadoop-client
${hadoop.version}
+
+ org.apache.hadoop
+ hadoop-yarn-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-client
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-registry
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-web-proxy
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-resourcemanager
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-applicationhistoryservice
+ ${hadoop.version}
+
org.apache.hadoop
hadoop-hdfs
@@ -799,7 +896,7 @@
org.apache.hadoop
hadoop-hdfs
${hadoop.version}
- test-jar
+ test-jar
test
@@ -836,13 +933,19 @@
junit
${junit.version}
-
+
org.mockito
mockito-all
${mockito-all.version}
test
-
+
+ org.mockito
+ mockito-core
+ ${mockito-all.version}
+ test
+
+
com.lmax
disruptor
${disruptor.version}
@@ -858,8 +961,8 @@
-
+
build-with-jdk11
@@ -878,7 +981,7 @@
${phoenix-surefire.jdk11.flags} ${phoenix-surefire.jdk17.flags} ${phoenix-surefire.jdk17.tuning.flags} ${phoenix-surefire.argLine}
-
+
java8-doclint-disabled
@@ -890,13 +993,13 @@
- disable-javadoc-for-spark
-
- (8,)
-
-
- true
-
+ disable-javadoc-for-spark
+
+ (8,)
+
+
+ true
+
@@ -930,7 +1033,7 @@
spotbugs-site
- !spotbugs.site
+ !spotbugs.site