Skip to content

Commit

Permalink
fix UT failures and javadoc problems
Browse files Browse the repository at this point in the history
  • Loading branch information
abstractdog committed Jan 16, 2025
1 parent b87e9e0 commit bf75848
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 5 deletions.
5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -756,6 +756,11 @@
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
Expand Down
10 changes: 6 additions & 4 deletions ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
Original file line number Diff line number Diff line change
Expand Up @@ -287,13 +287,15 @@ public long getQueryStartTime() {
* 1. general semantic exception
* 2. transaction manager validation (throwin exception)
* What a user expects here is something like "QUERY", "DDL", "DML", so this magic will do its best to tell.
* any kind of "analyze" ==> STATS
* DML operations (INSERT, UPDATE, DELETE, MERGE) ==> DML
* MAPRED ==> QUERY, DML (depending on QueryProperties achieved in compile time)
* FETCH ==> QUERY: a simple fetch task is a QUERY
* any kind of "analyze": STATS
* DML operations (INSERT, UPDATE, DELETE, MERGE): DML
* MAPRED: QUERY, DML (depending on QueryProperties achieved in compile time)
* FETCH: QUERY: a simple fetch task is a QUERY
* UNKNOWN: if we can't determine the type of the query:
* e.g. when ParseException happens, we won't do further magic,
* even if it's obvious by reading the sql statement that user wanted to run e.g. a select query
* @param sem the semantic analyzer which already analyzed the query
* @param tree the root ASTNode of the query
*/
public void setQueryType(BaseSemanticAnalyzer sem, ASTNode tree) {
List<Task<? extends Serializable>> rootTasks = sem.getAllRootTasks();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public void setMonitor(TezJobMonitor monitor) {
}

public long getCounter(String groupName, String counterName) {
CounterGroup group = getCounters().getGroup(groupName);
CounterGroup group = counters == null ? null : counters.getGroup(groupName);
if (group == null) {
return 0;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ private void updateFromQueryState(QueryState queryState, QueryHistoryRecord reco
}

private void updateFromQueryInfo(QueryInfo queryInfo, QueryHistoryRecord record) {
if (queryInfo == null) {
return;
}
// this state is the same as "state" displayed in the JSON returned by QueriesRESTfulAPIServlet
record.setOperationId(queryInfo.getOperationId());
record.setExecutionEngine(queryInfo.getExecutionEngine());
Expand Down
4 changes: 4 additions & 0 deletions service/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,10 @@
<groupId>org.jamon</groupId>
<artifactId>jamon-runtime</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
</dependency>
<!-- intra-project -->
<dependency>
<groupId>org.apache.hive</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;


import javax.servlet.ServletContext;
import javax.servlet.ServletException;
Expand Down Expand Up @@ -144,6 +146,8 @@ private void sendAsJson(
SimpleModule module = new SimpleModule("CustomSessionModule", new Version(1, 0, 0, null, null, null));
module.addSerializer(HiveSession.class, new HiveSessionSerializer());
mapper.registerModule(module);
// support java.time.Instant from QueryInfo
mapper.registerModule(new JavaTimeModule());

try {
PrintWriter out = response.getWriter();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ public static void beforeTests() throws Exception {
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
// query history adds no value to this test, it would just bring iceberg handler dependency, which isn't worth
// this should be handled with HiveConfForTests when it's used here too
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_QUERY_HISTORY_SERVICE_ENABLED, false);

startHiveServer2WithConf(hiveConf);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,9 @@ public static void beforeTests() throws Exception {
hiveConf.set(ConfVars.METASTORE_PWD.varname, METASTORE_PASSWD);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
// query history adds no value to this test, it would just bring iceberg handler dependency, which isn't worth
// this should be handled with HiveConfForTests when it's used here too
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_QUERY_HISTORY_SERVICE_ENABLED, false);
PasswdAuthenticationProvider authenticationProvider = new DummyLdapAuthenticationProviderImpl();
hiveServer2 = new HiveServer2(authenticationProvider);
hiveServer2.init(hiveConf);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ public static void beforeTests() throws Exception {
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd");
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM, true);
hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true);
// query history adds no value to this test, it would just bring iceberg handler dependency, which isn't worth
// this should be handled with HiveConfForTests when it's used here too
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_QUERY_HISTORY_SERVICE_ENABLED, false);
hiveServer2 = new HiveServer2(new TestPamAuthenticator(hiveConf));
hiveServer2.init(hiveConf);
hiveServer2.start();
Expand Down

0 comments on commit bf75848

Please sign in to comment.