Skip to content

Commit aa74387

Browse files
authored
HIVE-25189: Addendum: Cache validWriteIdList after SemanticAnalyzer#genResolvedParseTree (#6153)
1 parent 4f35a45 commit aa74387

File tree

7 files changed

+90
-242
lines changed

7 files changed

+90
-242
lines changed

ql/src/java/org/apache/hadoop/hive/ql/DriverTxnHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ private void acquireLocks() throws CommandProcessorException {
246246
String errorMessage;
247247
if (driverState.isDestroyed() || driverState.isAborted() || driverState.isClosed()) {
248248
errorMessage = String.format("Ignore lock acquisition related exception in terminal state (%s): %s",
249-
driverState.toString(), e.getMessage());
249+
driverState, e.getMessage());
250250
CONSOLE.printInfo(errorMessage);
251251
} else {
252252
errorMessage = String.format("FAILED: Error in acquiring locks: %s", e.getMessage());

ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2091,7 +2091,7 @@ public void startAnalysis() {
20912091
public void endAnalysis(ASTNode tree) {
20922092
if (ctx != null){
20932093
queryProperties.setUsedTables(
2094-
CacheTableHelper.getUniqueNames(ctx.getParsedTables()));
2094+
TableHelper.getUniqueNames(ctx.getParsedTables()));
20952095
}
20962096
setQueryType(tree); // at this point we know the query type for sure
20972097
}

ql/src/java/org/apache/hadoop/hive/ql/parse/CacheTableHelper.java

Lines changed: 0 additions & 201 deletions
This file was deleted.

ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2458,8 +2458,7 @@ protected Set<TableName> getTablesUsed(RelNode plan) {
24582458
new RelVisitor() {
24592459
@Override
24602460
public void visit(RelNode node, int ordinal, RelNode parent) {
2461-
if (node instanceof TableScan) {
2462-
TableScan ts = (TableScan) node;
2461+
if (node instanceof TableScan ts) {
24632462
Table table = ((RelOptHiveTable) ts.getTable()).getHiveTableMD();
24642463
if (AcidUtils.isTransactionalTable(table) ||
24652464
table.isNonNative() && table.getStorageHandler().areSnapshotsSupported()) {
@@ -5112,7 +5111,7 @@ private ImmutableMap<String, Integer> buildHiveColNameToInputPosMap(
51125111
return hiveColNameToInputPosMapBuilder.build();
51135112
}
51145113

5115-
private QBParseInfo getQBParseInfo(QB qb) throws CalciteSemanticException {
5114+
private QBParseInfo getQBParseInfo(QB qb) {
51165115
return qb.getParseInfo();
51175116
}
51185117
}

ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,14 +45,12 @@ public static TableName of(Table table) throws SemanticException {
4545
* Set a @{@link Table} object's table and db names based on the provided string.
4646
* @param dbTable the dbtable string
4747
* @param table the table to update
48-
* @return the table
4948
* @throws SemanticException
5049
*/
51-
public static Table setFrom(String dbTable, Table table) throws SemanticException{
50+
public static void setFrom(String dbTable, Table table) throws SemanticException{
5251
TableName name = ofNullable(dbTable);
5352
table.setTableName(name.getTable());
5453
table.setDbName(name.getDb());
55-
return table;
5654
}
5755

5856
/**

ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Lines changed: 36 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,6 @@
8787
import org.apache.hadoop.hive.common.StatsSetupConst.StatDB;
8888
import org.apache.hadoop.hive.common.StringInternUtils;
8989
import org.apache.hadoop.hive.common.TableName;
90-
import org.apache.hadoop.hive.common.ValidTxnList;
9190
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
9291
import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
9392
import org.apache.hadoop.hive.conf.Constants;
@@ -363,7 +362,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
363362
private final Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
364363
private Map<SelectOperator, Table> viewProjectToTableSchema;
365364
private Operator<? extends OperatorDesc> sinkOp;
366-
private final CacheTableHelper cacheTableHelper = new CacheTableHelper();
367365

368366
/**
369367
* a map for the split sampling, from alias to an instance of SplitSample
@@ -2883,8 +2881,7 @@ private void replaceViewReferenceWithDefinition(QB qb, Table tab,
28832881
String viewText = tab.getViewExpandedText();
28842882
TableMask viewMask = new TableMask(this, conf, false);
28852883
viewTree = ParseUtils.parse(viewText, ctx, tab.getCompleteName());
2886-
cacheTableHelper.populateCacheForView(ctx.getParsedTables(), conf,
2887-
getTxnMgr(), tab.getDbName(), tab.getTableName());
2884+
28882885
if (viewMask.isEnabled() && analyzeRewrite == null) {
28892886
ParseResult parseResult = rewriteASTWithMaskAndFilter(viewMask, viewTree,
28902887
ctx.getViewTokenRewriteStream(viewFullyQualifiedName),
@@ -13163,16 +13160,13 @@ void analyzeInternal(ASTNode ast, Supplier<PlannerContext> pcf) throws SemanticE
1316313160
perfLogger.perfLogBegin(this.getClass().getName(), PerfLogger.GENERATE_RESOLVED_PARSETREE);
1316413161
// 1. Generate Resolved Parse tree from syntax tree
1316513162
boolean needsTransform = needsTransform();
13166-
//change the location of position alias process here
13163+
// change the location of position alias process here
1316713164
processPositionAlias(ast);
13168-
cacheTableHelper.populateCache(ctx.getParsedTables(), conf, getTxnMgr());
1316913165
PlannerContext plannerCtx = pcf.get();
1317013166
if (!genResolvedParseTree(ast, plannerCtx)) {
1317113167
return;
1317213168
}
13173-
if (tablesFromReadEntities(inputs).stream().anyMatch(AcidUtils::isTransactionalTable)) {
13174-
queryState.getValidTxnList();
13175-
}
13169+
openTxnAndSetValidTxnList();
1317613170

1317713171
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_REMOVE_ORDERBY_IN_SUBQUERY)) {
1317813172
for (String alias : qb.getSubqAliases()) {
@@ -15164,39 +15158,48 @@ private String getQueryStringForCache(ASTNode ast) {
1516415158
.toString(RESULTS_CACHE_KEY_TOKEN_REWRITE_PROGRAM, ast.getTokenStartIndex(), ast.getTokenStopIndex());
1516515159
}
1516615160

15167-
private ValidTxnWriteIdList getQueryValidTxnWriteIdList() throws SemanticException {
15161+
private void openTxnAndSetValidTxnList() throws SemanticException {
15162+
if (tablesFromReadEntities(inputs).stream().noneMatch(AcidUtils::isTransactionalTable)
15163+
|| SessionState.get().isCompaction()) {
15164+
return;
15165+
}
15166+
if (queryState.getHMSCache() != null) {
15167+
// this step primes the cache containing the validTxnWriteIdList. It will fetch
15168+
// all the tables into the MetaStore Client cache with one HMS call.
15169+
getValidTxnWriteIdList();
15170+
} else {
15171+
queryState.getValidTxnList();
15172+
}
15173+
}
15174+
15175+
private ValidTxnWriteIdList getValidTxnWriteIdList() throws SemanticException {
1516815176
// TODO: Once HIVE-18948 is in, should be able to retrieve writeIdList from the conf.
15169-
//cachedWriteIdList = AcidUtils.getValidTxnWriteIdList(conf);
15177+
// cachedWriteIdList = AcidUtils.getValidTxnWriteIdList(conf);
1517015178
//
15171-
List<String> transactionalTables = tablesFromReadEntities(inputs)
15172-
.stream()
15173-
.filter(AcidUtils::isTransactionalTable)
15174-
.map(Table::getFullyQualifiedName)
15175-
.collect(Collectors.toList());
15176-
15177-
if (transactionalTables.size() > 0) {
15178-
String txnString = queryState.getValidTxnList();
15179-
if (txnString == null) {
15180-
return null;
15181-
}
15182-
try {
15183-
return getTxnMgr().getValidWriteIds(transactionalTables, txnString);
15184-
} catch (Exception err) {
15185-
String msg = "Error while getting the txnWriteIdList for tables " + transactionalTables
15186-
+ " and validTxnList " + conf.get(ValidTxnList.VALID_TXNS_KEY);
15187-
throw new SemanticException(msg, err);
15188-
}
15189-
}
15179+
var transactionalTables = tablesFromReadEntities(inputs)
15180+
.stream()
15181+
.filter(AcidUtils::isTransactionalTable)
15182+
.map(Table::getFullyQualifiedName)
15183+
.toList();
1519015184

15191-
// No transactional tables.
15192-
return null;
15185+
if (transactionalTables.isEmpty()) {
15186+
return null;
15187+
}
15188+
String txnString = queryState.getValidTxnList();
15189+
try {
15190+
return getTxnMgr().getValidWriteIds(transactionalTables, txnString);
15191+
} catch (Exception err) {
15192+
String msg = "Error while getting the txnWriteIdList for tables " + transactionalTables
15193+
+ " and validTxnList " + txnString;
15194+
throw new SemanticException(msg, err);
15195+
}
1519315196
}
1519415197

1519515198
private QueryResultsCache.LookupInfo createLookupInfoForQuery(ASTNode astNode) throws SemanticException {
1519615199
QueryResultsCache.LookupInfo lookupInfo = null;
1519715200
String queryString = getQueryStringForCache(astNode);
1519815201
if (queryString != null) {
15199-
ValidTxnWriteIdList writeIdList = getQueryValidTxnWriteIdList();
15202+
ValidTxnWriteIdList writeIdList = getValidTxnWriteIdList();
1520015203
Set<Long> involvedTables = tablesFromReadEntities(inputs).stream()
1520115204
.map(Table::getTTable)
1520215205
.map(org.apache.hadoop.hive.metastore.api.Table::getId)

0 commit comments

Comments
 (0)