Skip to content

Commit 21eae09

Browse files
committed
Cache and reuse ValidTxnWriteIdList
1 parent 94874ff commit 21eae09

File tree

7 files changed

+82
-239
lines changed

7 files changed

+82
-239
lines changed

ql/src/java/org/apache/hadoop/hive/ql/DriverTxnHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ private void acquireLocks() throws CommandProcessorException {
246246
String errorMessage;
247247
if (driverState.isDestroyed() || driverState.isAborted() || driverState.isClosed()) {
248248
errorMessage = String.format("Ignore lock acquisition related exception in terminal state (%s): %s",
249-
driverState.toString(), e.getMessage());
249+
driverState, e.getMessage());
250250
CONSOLE.printInfo(errorMessage);
251251
} else {
252252
errorMessage = String.format("FAILED: Error in acquiring locks: %s", e.getMessage());

ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2091,7 +2091,7 @@ public void startAnalysis() {
20912091
public void endAnalysis(ASTNode tree) {
20922092
if (ctx != null){
20932093
queryProperties.setUsedTables(
2094-
CacheTableHelper.getUniqueNames(ctx.getParsedTables()));
2094+
TableHelper.getUniqueNames(ctx.getParsedTables()));
20952095
}
20962096
setQueryType(tree); // at this point we know the query type for sure
20972097
}

ql/src/java/org/apache/hadoop/hive/ql/parse/CacheTableHelper.java

Lines changed: 0 additions & 201 deletions
This file was deleted.

ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2458,8 +2458,7 @@ protected Set<TableName> getTablesUsed(RelNode plan) {
24582458
new RelVisitor() {
24592459
@Override
24602460
public void visit(RelNode node, int ordinal, RelNode parent) {
2461-
if (node instanceof TableScan) {
2462-
TableScan ts = (TableScan) node;
2461+
if (node instanceof TableScan ts) {
24632462
Table table = ((RelOptHiveTable) ts.getTable()).getHiveTableMD();
24642463
if (AcidUtils.isTransactionalTable(table) ||
24652464
table.isNonNative() && table.getStorageHandler().areSnapshotsSupported()) {
@@ -5112,7 +5111,7 @@ private ImmutableMap<String, Integer> buildHiveColNameToInputPosMap(
51125111
return hiveColNameToInputPosMapBuilder.build();
51135112
}
51145113

5115-
private QBParseInfo getQBParseInfo(QB qb) throws CalciteSemanticException {
5114+
private QBParseInfo getQBParseInfo(QB qb) {
51165115
return qb.getParseInfo();
51175116
}
51185117
}

ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,14 +45,12 @@ public static TableName of(Table table) throws SemanticException {
4545
* Set a @{@link Table} object's table and db names based on the provided string.
4646
* @param dbTable the dbtable string
4747
* @param table the table to update
48-
* @return the table
4948
* @throws SemanticException
5049
*/
51-
public static Table setFrom(String dbTable, Table table) throws SemanticException{
50+
public static void setFrom(String dbTable, Table table) throws SemanticException{
5251
TableName name = ofNullable(dbTable);
5352
table.setTableName(name.getTable());
5453
table.setDbName(name.getDb());
55-
return table;
5654
}
5755

5856
/**

ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Lines changed: 28 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,6 @@
8787
import org.apache.hadoop.hive.common.StatsSetupConst.StatDB;
8888
import org.apache.hadoop.hive.common.StringInternUtils;
8989
import org.apache.hadoop.hive.common.TableName;
90-
import org.apache.hadoop.hive.common.ValidTxnList;
9190
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
9291
import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
9392
import org.apache.hadoop.hive.conf.Constants;
@@ -363,7 +362,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
363362
private final Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
364363
private Map<SelectOperator, Table> viewProjectToTableSchema;
365364
private Operator<? extends OperatorDesc> sinkOp;
366-
private final CacheTableHelper cacheTableHelper = new CacheTableHelper();
367365

368366
/**
369367
* a map for the split sampling, from alias to an instance of SplitSample
@@ -2883,8 +2881,7 @@ private void replaceViewReferenceWithDefinition(QB qb, Table tab,
28832881
String viewText = tab.getViewExpandedText();
28842882
TableMask viewMask = new TableMask(this, conf, false);
28852883
viewTree = ParseUtils.parse(viewText, ctx, tab.getCompleteName());
2886-
cacheTableHelper.populateCacheForView(ctx.getParsedTables(), conf,
2887-
getTxnMgr(), tab.getDbName(), tab.getTableName());
2884+
28882885
if (viewMask.isEnabled() && analyzeRewrite == null) {
28892886
ParseResult parseResult = rewriteASTWithMaskAndFilter(viewMask, viewTree,
28902887
ctx.getViewTokenRewriteStream(viewFullyQualifiedName),
@@ -13163,15 +13160,21 @@ void analyzeInternal(ASTNode ast, Supplier<PlannerContext> pcf) throws SemanticE
1316313160
perfLogger.perfLogBegin(this.getClass().getName(), PerfLogger.GENERATE_RESOLVED_PARSETREE);
1316413161
// 1. Generate Resolved Parse tree from syntax tree
1316513162
boolean needsTransform = needsTransform();
13166-
//change the location of position alias process here
13163+
// change the location of position alias process here
1316713164
processPositionAlias(ast);
13168-
cacheTableHelper.populateCache(ctx.getParsedTables(), conf, getTxnMgr());
1316913165
PlannerContext plannerCtx = pcf.get();
1317013166
if (!genResolvedParseTree(ast, plannerCtx)) {
1317113167
return;
1317213168
}
13173-
if (tablesFromReadEntities(inputs).stream().anyMatch(AcidUtils::isTransactionalTable)) {
13174-
queryState.getValidTxnList();
13169+
13170+
if (queryState.getHMSCache() != null) {
13171+
// this step primes the cache containing the validTxnWriteIdList. It will fetch
13172+
// all the tables into the MetaStore Client cache with one HMS call.
13173+
getQueryValidTxnWriteIdList();
13174+
} else {
13175+
if (tablesFromReadEntities(inputs).stream().anyMatch(AcidUtils::isTransactionalTable)) {
13176+
queryState.getValidTxnList();
13177+
}
1317513178
}
1317613179

1317713180
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_REMOVE_ORDERBY_IN_SUBQUERY)) {
@@ -15166,30 +15169,25 @@ private String getQueryStringForCache(ASTNode ast) {
1516615169

1516715170
private ValidTxnWriteIdList getQueryValidTxnWriteIdList() throws SemanticException {
1516815171
// TODO: Once HIVE-18948 is in, should be able to retrieve writeIdList from the conf.
15169-
//cachedWriteIdList = AcidUtils.getValidTxnWriteIdList(conf);
15172+
// cachedWriteIdList = AcidUtils.getValidTxnWriteIdList(conf);
1517015173
//
15171-
List<String> transactionalTables = tablesFromReadEntities(inputs)
15172-
.stream()
15173-
.filter(AcidUtils::isTransactionalTable)
15174-
.map(Table::getFullyQualifiedName)
15175-
.collect(Collectors.toList());
15176-
15177-
if (transactionalTables.size() > 0) {
15178-
String txnString = queryState.getValidTxnList();
15179-
if (txnString == null) {
15180-
return null;
15181-
}
15182-
try {
15183-
return getTxnMgr().getValidWriteIds(transactionalTables, txnString);
15184-
} catch (Exception err) {
15185-
String msg = "Error while getting the txnWriteIdList for tables " + transactionalTables
15186-
+ " and validTxnList " + conf.get(ValidTxnList.VALID_TXNS_KEY);
15187-
throw new SemanticException(msg, err);
15188-
}
15189-
}
15174+
var transactionalTables = tablesFromReadEntities(inputs)
15175+
.stream()
15176+
.filter(AcidUtils::isTransactionalTable)
15177+
.map(Table::getFullyQualifiedName)
15178+
.toList();
1519015179

15191-
// No transactional tables.
15192-
return null;
15180+
if (transactionalTables.isEmpty()) {
15181+
return null;
15182+
}
15183+
String txnString = queryState.getValidTxnList();
15184+
try {
15185+
return getTxnMgr().getValidWriteIds(transactionalTables, txnString);
15186+
} catch (Exception err) {
15187+
String msg = "Error while getting the txnWriteIdList for tables " + transactionalTables
15188+
+ " and validTxnList " + txnString;
15189+
throw new SemanticException(msg, err);
15190+
}
1519315191
}
1519415192

1519515193
private QueryResultsCache.LookupInfo createLookupInfoForQuery(ASTNode astNode) throws SemanticException {

0 commit comments

Comments
 (0)