Skip to content

Commit 9216901

Browse files
holdenkMarcelo Vanzin
authored and
Marcelo Vanzin
committedAug 8, 2016
[SPARK-16779][TRIVIAL] Avoid using postfix operators where they do not add much and remove whitelisting
## What changes were proposed in this pull request? Avoid using postfix operation for command execution in SQLQuerySuite where it wasn't whitelisted and audit existing whitelistings removing postfix operators from most places. Some notable places where postfix operation remains is in the XML parsing & time units (seconds, millis, etc.) where it arguably can improve readability. ## How was this patch tested? Existing tests. Author: Holden Karau <holden@us.ibm.com> Closes apache#14407 from holdenk/SPARK-16779.
1 parent 8650239 commit 9216901

File tree

21 files changed

+6
-33
lines changed

21 files changed

+6
-33
lines changed
 

‎core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala

-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import java.util.{Arrays, Comparator, Date}
2525

2626
import scala.collection.JavaConverters._
2727
import scala.concurrent.duration._
28-
import scala.language.postfixOps
2928
import scala.util.control.NonFatal
3029

3130
import com.google.common.primitives.Longs

‎core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala

-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import java.util.concurrent.atomic.AtomicLong
2525
import scala.collection.mutable.ArrayBuffer
2626
import scala.collection.mutable.HashMap
2727
import scala.collection.mutable.HashSet
28-
import scala.language.postfixOps
2928
import scala.util.Random
3029

3130
import org.apache.spark._

‎core/src/main/scala/org/apache/spark/util/RpcUtils.scala

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.util
1919

20-
import scala.language.postfixOps
21-
2220
import org.apache.spark.SparkConf
2321
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcEnv, RpcTimeout}
2422

‎core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala

-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import java.util.concurrent.{ExecutorService, TimeUnit}
2222
import scala.collection.Map
2323
import scala.collection.mutable
2424
import scala.concurrent.duration._
25-
import scala.language.postfixOps
2625

2726
import org.mockito.Matchers
2827
import org.mockito.Matchers._

‎core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala

-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
2323

2424
import scala.collection.mutable
2525
import scala.collection.mutable.ListBuffer
26-
import scala.language.postfixOps
2726

2827
import com.codahale.metrics.Counter
2928
import com.google.common.cache.LoadingCache

‎core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ import java.io.File
2121

2222
import scala.collection.Map
2323
import scala.io.Codec
24-
import scala.language.postfixOps
2524
import scala.sys.process._
2625
import scala.util.Try
2726

@@ -215,7 +214,8 @@ class PipedRDDSuite extends SparkFunSuite with SharedSparkContext {
215214
}
216215

217216
def testCommandAvailable(command: String): Boolean = {
218-
Try(Process(command) !!).isSuccess
217+
val attempt = Try(Process(command).run().exitValue())
218+
attempt.isSuccess && attempt.get == 0
219219
}
220220

221221
def testExportInputFile(varName: String) {

‎core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala

-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ package org.apache.spark.storage
2020
import java.nio.ByteBuffer
2121

2222
import scala.language.implicitConversions
23-
import scala.language.postfixOps
2423
import scala.language.reflectiveCalls
2524
import scala.reflect.ClassTag
2625

‎external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala

-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import java.util.concurrent.TimeoutException
2525

2626
import scala.annotation.tailrec
2727
import scala.collection.JavaConverters._
28-
import scala.language.postfixOps
2928
import scala.util.control.NonFatal
3029

3130
import kafka.admin.AdminUtils
@@ -279,4 +278,3 @@ private[kafka010] class KafkaTestUtils extends Logging {
279278
}
280279
}
281280
}
282-

‎external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala

-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import java.util.concurrent.TimeoutException
2525

2626
import scala.annotation.tailrec
2727
import scala.collection.JavaConverters._
28-
import scala.language.postfixOps
2928
import scala.util.control.NonFatal
3029

3130
import kafka.admin.AdminUtils
@@ -274,4 +273,3 @@ private[kafka] class KafkaTestUtils extends Logging {
274273
}
275274
}
276275
}
277-

‎graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala

+1-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.graphx.lib
1919

20-
import scala.language.postfixOps
2120
import scala.reflect.ClassTag
2221

2322
import org.apache.spark.graphx._
@@ -109,7 +108,7 @@ object PageRank extends Logging {
109108
require(resetProb >= 0 && resetProb <= 1, s"Random reset probability must belong" +
110109
s" to [0, 1], but got ${resetProb}")
111110

112-
val personalized = srcId isDefined
111+
val personalized = srcId.isDefined
113112
val src: VertexId = srcId.getOrElse(-1L)
114113

115114
// Initialize the PageRank graph with each edge attribute having

‎mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala

-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.mllib.util
1919

2020
import java.{util => ju}
2121

22-
import scala.language.postfixOps
2322
import scala.util.Random
2423

2524
import org.apache.spark.SparkContext

‎repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala

-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import java.util
2727
import scala.concurrent.duration._
2828
import scala.io.Source
2929
import scala.language.implicitConversions
30-
import scala.language.postfixOps
3130

3231
import com.google.common.io.Files
3332
import org.mockito.Matchers.anyString

‎sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.types
1919

20-
import scala.language.postfixOps
21-
2220
import org.scalatest.PrivateMethodTester
2321

2422
import org.apache.spark.SparkFunSuite

‎sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala

-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ import java.io.File
2121
import java.nio.charset.StandardCharsets
2222
import java.util.UUID
2323

24-
import scala.language.postfixOps
2524
import scala.util.Random
2625

2726
import org.scalatest.Matchers._

‎sql/core/src/test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql
1919

20-
import scala.language.postfixOps
21-
2220
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
2321
import org.apache.spark.sql.expressions.Aggregator
2422
import org.apache.spark.sql.expressions.scalalang.typed

‎sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql
1919

20-
import scala.language.postfixOps
21-
2220
import org.apache.spark.sql.functions._
2321
import org.apache.spark.sql.test.SharedSQLContext
2422

‎sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql
1919

20-
import scala.language.postfixOps
21-
2220
import org.apache.spark.sql.test.SharedSQLContext
2321

2422
case class IntClass(value: Int)

‎sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@ package org.apache.spark.sql
2020
import java.io.{Externalizable, ObjectInput, ObjectOutput}
2121
import java.sql.{Date, Timestamp}
2222

23-
import scala.language.postfixOps
24-
2523
import org.apache.spark.sql.catalyst.encoders.{OuterScopes, RowEncoder}
2624
import org.apache.spark.sql.catalyst.util.sideBySide
2725
import org.apache.spark.sql.execution.streaming.MemoryStream

‎sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -1790,6 +1790,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
17901790
}
17911791

17921792
def testCommandAvailable(command: String): Boolean = {
1793-
Try(Process(command) !!).isSuccess
1793+
val attempt = Try(Process(command).run().exitValue())
1794+
attempt.isSuccess && attempt.get == 0
17941795
}
17951796
}

‎streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala

-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import java.util.concurrent.atomic.AtomicInteger
2525

2626
import scala.collection.JavaConverters._
2727
import scala.collection.mutable
28-
import scala.language.postfixOps
2928

3029
import com.google.common.io.Files
3130
import org.apache.hadoop.fs.Path

‎yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala

+1-3
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ package org.apache.spark.deploy.yarn
1919
import java.security.PrivilegedExceptionAction
2020
import java.util.concurrent.{Executors, TimeUnit}
2121

22-
import scala.language.postfixOps
23-
2422
import org.apache.hadoop.conf.Configuration
2523
import org.apache.hadoop.fs.{FileSystem, Path}
2624
import org.apache.hadoop.security.UserGroupInformation
@@ -128,7 +126,7 @@ private[yarn] class AMDelegationTokenRenewer(
128126
try {
129127
val remoteFs = FileSystem.get(freshHadoopConf)
130128
val credentialsPath = new Path(credentialsFile)
131-
val thresholdTime = System.currentTimeMillis() - (daysToKeepFiles days).toMillis
129+
val thresholdTime = System.currentTimeMillis() - (daysToKeepFiles.days).toMillis
132130
hadoopUtil.listFilesSorted(
133131
remoteFs, credentialsPath.getParent,
134132
credentialsPath.getName, SparkHadoopUtil.SPARK_YARN_CREDS_TEMP_EXTENSION)

0 commit comments

Comments
 (0)
Please sign in to comment.