Skip to content

Commit fd20248

Browse files
committed
[SPARK-12489][CORE][SQL][MLIB] Fix minor issues found by FindBugs
Include the following changes: 1. Close `java.sql.Statement` 2. Fix incorrect `asInstanceOf`. 3. Remove unnecessary `synchronized` and `ReentrantLock`. Author: Shixiong Zhu <shixiong@databricks.com> Closes apache#10440 from zsxwing/findbugs. (cherry picked from commit 710b411) Signed-off-by: Shixiong Zhu <shixiong@databricks.com>
1 parent a9c52d4 commit fd20248

File tree

7 files changed

+51
-32
lines changed

7 files changed

+51
-32
lines changed

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.scheduler.cluster.mesos
1919

2020
import java.io.File
21-
import java.util.concurrent.locks.ReentrantLock
2221
import java.util.{Collections, Date, List => JList}
2322

2423
import scala.collection.JavaConverters._
@@ -126,7 +125,7 @@ private[spark] class MesosClusterScheduler(
126125
private val retainedDrivers = conf.getInt("spark.mesos.retainedDrivers", 200)
127126
private val maxRetryWaitTime = conf.getInt("spark.mesos.cluster.retry.wait.max", 60) // 1 minute
128127
private val schedulerState = engineFactory.createEngine("scheduler")
129-
private val stateLock = new ReentrantLock()
128+
private val stateLock = new Object()
130129
private val finishedDrivers =
131130
new mutable.ArrayBuffer[MesosClusterSubmissionState](retainedDrivers)
132131
private var frameworkId: String = null

launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -293,9 +293,7 @@ private class ServerConnection extends LauncherConnection {
293293
protected void handle(Message msg) throws IOException {
294294
try {
295295
if (msg instanceof Hello) {
296-
synchronized (timeout) {
297-
timeout.cancel();
298-
}
296+
timeout.cancel();
299297
timeout = null;
300298
Hello hello = (Hello) msg;
301299
ChildProcAppHandle handle = pending.remove(hello.secret);

launcher/src/main/java/org/apache/spark/launcher/Main.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ private static class MainClassOptionParser extends SparkSubmitOptionParser {
151151

152152
@Override
153153
protected boolean handle(String opt, String value) {
154-
if (opt == CLASS) {
154+
if (CLASS.equals(opt)) {
155155
className = value;
156156
}
157157
return false;

mllib/src/main/scala/org/apache/spark/ml/tree/Node.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -386,9 +386,9 @@ private[tree] object LearningNode {
386386
var levelsToGo = indexToLevel(nodeIndex)
387387
while (levelsToGo > 0) {
388388
if ((nodeIndex & (1 << levelsToGo - 1)) == 0) {
389-
tmpNode = tmpNode.leftChild.asInstanceOf[LearningNode]
389+
tmpNode = tmpNode.leftChild.get
390390
} else {
391-
tmpNode = tmpNode.rightChild.asInstanceOf[LearningNode]
391+
tmpNode = tmpNode.rightChild.get
392392
}
393393
levelsToGo -= 1
394394
}

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,12 @@ final class DataFrameWriter private[sql](df: DataFrame) {
297297
if (!tableExists) {
298298
val schema = JdbcUtils.schemaString(df, url)
299299
val sql = s"CREATE TABLE $table ($schema)"
300-
conn.createStatement.executeUpdate(sql)
300+
val statement = conn.createStatement
301+
try {
302+
statement.executeUpdate(sql)
303+
} finally {
304+
statement.close()
305+
}
301306
}
302307
} finally {
303308
conn.close()

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala

Lines changed: 26 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -120,30 +120,35 @@ private[sql] object JDBCRDD extends Logging {
120120
val dialect = JdbcDialects.get(url)
121121
val conn: Connection = getConnector(properties.getProperty("driver"), url, properties)()
122122
try {
123-
val rs = conn.prepareStatement(s"SELECT * FROM $table WHERE 1=0").executeQuery()
123+
val statement = conn.prepareStatement(s"SELECT * FROM $table WHERE 1=0")
124124
try {
125-
val rsmd = rs.getMetaData
126-
val ncols = rsmd.getColumnCount
127-
val fields = new Array[StructField](ncols)
128-
var i = 0
129-
while (i < ncols) {
130-
val columnName = rsmd.getColumnLabel(i + 1)
131-
val dataType = rsmd.getColumnType(i + 1)
132-
val typeName = rsmd.getColumnTypeName(i + 1)
133-
val fieldSize = rsmd.getPrecision(i + 1)
134-
val fieldScale = rsmd.getScale(i + 1)
135-
val isSigned = rsmd.isSigned(i + 1)
136-
val nullable = rsmd.isNullable(i + 1) != ResultSetMetaData.columnNoNulls
137-
val metadata = new MetadataBuilder().putString("name", columnName)
138-
val columnType =
139-
dialect.getCatalystType(dataType, typeName, fieldSize, metadata).getOrElse(
140-
getCatalystType(dataType, fieldSize, fieldScale, isSigned))
141-
fields(i) = StructField(columnName, columnType, nullable, metadata.build())
142-
i = i + 1
125+
val rs = statement.executeQuery()
126+
try {
127+
val rsmd = rs.getMetaData
128+
val ncols = rsmd.getColumnCount
129+
val fields = new Array[StructField](ncols)
130+
var i = 0
131+
while (i < ncols) {
132+
val columnName = rsmd.getColumnLabel(i + 1)
133+
val dataType = rsmd.getColumnType(i + 1)
134+
val typeName = rsmd.getColumnTypeName(i + 1)
135+
val fieldSize = rsmd.getPrecision(i + 1)
136+
val fieldScale = rsmd.getScale(i + 1)
137+
val isSigned = rsmd.isSigned(i + 1)
138+
val nullable = rsmd.isNullable(i + 1) != ResultSetMetaData.columnNoNulls
139+
val metadata = new MetadataBuilder().putString("name", columnName)
140+
val columnType =
141+
dialect.getCatalystType(dataType, typeName, fieldSize, metadata).getOrElse(
142+
getCatalystType(dataType, fieldSize, fieldScale, isSigned))
143+
fields(i) = StructField(columnName, columnType, nullable, metadata.build())
144+
i = i + 1
145+
}
146+
return new StructType(fields)
147+
} finally {
148+
rs.close()
143149
}
144-
return new StructType(fields)
145150
} finally {
146-
rs.close()
151+
statement.close()
147152
}
148153
} finally {
149154
conn.close()

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,14 +49,26 @@ object JdbcUtils extends Logging {
4949
// Somewhat hacky, but there isn't a good way to identify whether a table exists for all
5050
// SQL database systems using JDBC meta data calls, considering "table" could also include
5151
// the database name. Query used to find table exists can be overriden by the dialects.
52-
Try(conn.prepareStatement(dialect.getTableExistsQuery(table)).executeQuery()).isSuccess
52+
Try {
53+
val statement = conn.prepareStatement(dialect.getTableExistsQuery(table))
54+
try {
55+
statement.executeQuery()
56+
} finally {
57+
statement.close()
58+
}
59+
}.isSuccess
5360
}
5461

5562
/**
5663
* Drops a table from the JDBC database.
5764
*/
5865
def dropTable(conn: Connection, table: String): Unit = {
59-
conn.createStatement.executeUpdate(s"DROP TABLE $table")
66+
val statement = conn.createStatement
67+
try {
68+
statement.executeUpdate(s"DROP TABLE $table")
69+
} finally {
70+
statement.close()
71+
}
6072
}
6173

6274
/**

0 commit comments

Comments
 (0)