|
34 | 34 | import java.util.function.Supplier;
|
35 | 35 | import java.util.stream.Collectors;
|
36 | 36 |
|
| 37 | +import scala.Tuple2; |
| 38 | + |
37 | 39 | import com.google.common.annotations.VisibleForTesting;
|
38 | 40 | import com.google.common.collect.Maps;
|
39 | 41 | import com.google.common.collect.Sets;
|
@@ -684,8 +686,6 @@ public boolean reassignOnStageResubmit(
|
684 | 686 | int requiredShuffleServerNumber =
|
685 | 687 | RssSparkShuffleUtils.getRequiredShuffleServerNumber(sparkConf);
|
686 | 688 | int estimateTaskConcurrency = RssSparkShuffleUtils.estimateTaskConcurrency(sparkConf);
|
687 |
| - // Deregister the shuffleId corresponding to the Shuffle Server. |
688 |
| - shuffleWriteClient.unregisterShuffle(appId, shuffleId); |
689 | 689 | Map<Integer, List<ShuffleServerInfo>> partitionToServers =
|
690 | 690 | requestShuffleAssignment(
|
691 | 691 | shuffleId,
|
@@ -1042,7 +1042,7 @@ protected void registerShuffleServers(
|
1042 | 1042 | }
|
1043 | 1043 | LOG.info("Start to register shuffleId {}", shuffleId);
|
1044 | 1044 | long start = System.currentTimeMillis();
|
1045 |
| - Map<String, String> sparkConfMap = RssSparkConfig.sparkConfToMap(getSparkConf()); |
| 1045 | + Map<String, String> sparkConfMap = sparkConfToMap(getSparkConf()); |
1046 | 1046 | serverToPartitionRanges.entrySet().stream()
|
1047 | 1047 | .forEach(
|
1048 | 1048 | entry -> {
|
@@ -1073,7 +1073,7 @@ protected void registerShuffleServers(
|
1073 | 1073 | }
|
1074 | 1074 | LOG.info("Start to register shuffleId[{}]", shuffleId);
|
1075 | 1075 | long start = System.currentTimeMillis();
|
1076 |
| - Map<String, String> sparkConfMap = RssSparkConfig.sparkConfToMap(getSparkConf()); |
| 1076 | + Map<String, String> sparkConfMap = sparkConfToMap(getSparkConf()); |
1077 | 1077 | Set<Map.Entry<ShuffleServerInfo, List<PartitionRange>>> entries =
|
1078 | 1078 | serverToPartitionRanges.entrySet();
|
1079 | 1079 | entries.stream()
|
@@ -1119,4 +1119,19 @@ public boolean isRssStageRetryForFetchFailureEnabled() {
|
1119 | 1119 | public SparkConf getSparkConf() {
|
1120 | 1120 | return sparkConf;
|
1121 | 1121 | }
|
| 1122 | + |
| 1123 | + public Map<String, String> sparkConfToMap(SparkConf sparkConf) { |
| 1124 | + Map<String, String> map = new HashMap<>(); |
| 1125 | + |
| 1126 | + for (Tuple2<String, String> tuple : sparkConf.getAll()) { |
| 1127 | + String key = tuple._1; |
| 1128 | + map.put(key, tuple._2); |
| 1129 | + } |
| 1130 | + |
| 1131 | + return map; |
| 1132 | + } |
| 1133 | + |
| 1134 | + public ShuffleWriteClient getShuffleWriteClient() { |
| 1135 | + return shuffleWriteClient; |
| 1136 | + } |
1122 | 1137 | }
|
0 commit comments