Skip to content

Commit 9b76332

Browse files
ScrapCodespwendell
authored andcommitted
[SPARK-2549] Functions defined inside of other functions trigger failures
Author: Prashant Sharma <[email protected]> Closes apache#1510 from ScrapCodes/SPARK-2549/fun-in-fun and squashes the following commits: 9458bc5 [Prashant Sharma] Tested by removing an inner function from excludes. bc03b1c [Prashant Sharma] SPARK-2549 Functions defined inside of other functions trigger failures
1 parent efdaeb1 commit 9b76332

File tree

2 files changed

+98
-106
lines changed

2 files changed

+98
-106
lines changed

project/MimaExcludes.scala

+79-104
Original file line numberDiff line numberDiff line change
@@ -32,108 +32,83 @@ import com.typesafe.tools.mima.core._
3232
*/
3333
object MimaExcludes {
3434

35-
def excludes(version: String) = version match {
36-
case v if v.startsWith("1.1") =>
37-
Seq(
38-
MimaBuild.excludeSparkPackage("deploy"),
39-
MimaBuild.excludeSparkPackage("graphx")
40-
) ++
41-
closures.map(method => ProblemFilters.exclude[MissingMethodProblem](method)) ++
42-
Seq(
43-
// Adding new method to JavaRDLike trait - we should probably mark this as a developer API.
44-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.api.java.JavaRDDLike.partitions"),
45-
// We made a mistake earlier (ed06500d3) in the Java API to use default parameter values
46-
// for countApproxDistinct* functions, which does not work in Java. We later removed
47-
// them, and use the following to tell Mima to not care about them.
48-
ProblemFilters.exclude[IncompatibleResultTypeProblem](
49-
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinctByKey"),
50-
ProblemFilters.exclude[IncompatibleResultTypeProblem](
51-
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinctByKey"),
52-
ProblemFilters.exclude[MissingMethodProblem](
53-
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinct$default$1"),
54-
ProblemFilters.exclude[MissingMethodProblem](
55-
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinctByKey$default$1"),
56-
ProblemFilters.exclude[MissingMethodProblem](
57-
"org.apache.spark.api.java.JavaRDD.countApproxDistinct$default$1"),
58-
ProblemFilters.exclude[MissingMethodProblem](
59-
"org.apache.spark.api.java.JavaRDDLike.countApproxDistinct$default$1"),
60-
ProblemFilters.exclude[MissingMethodProblem](
61-
"org.apache.spark.api.java.JavaDoubleRDD.countApproxDistinct$default$1"),
62-
ProblemFilters.exclude[MissingMethodProblem](
63-
"org.apache.spark.storage.MemoryStore.Entry"),
64-
ProblemFilters.exclude[MissingMethodProblem](
65-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$debugChildren$1"),
66-
ProblemFilters.exclude[MissingMethodProblem](
67-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$firstDebugString$1"),
68-
ProblemFilters.exclude[MissingMethodProblem](
69-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$shuffleDebugString$1"),
70-
ProblemFilters.exclude[MissingMethodProblem](
71-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$debugString$1"),
72-
ProblemFilters.exclude[MissingMethodProblem](
73-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$"
74-
+ "createZero$1")
75-
) ++
76-
Seq(
77-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.flume.FlumeReceiver.this")
78-
) ++
79-
Seq( // Ignore some private methods in ALS.
80-
ProblemFilters.exclude[MissingMethodProblem](
81-
"org.apache.spark.mllib.recommendation.ALS.org$apache$spark$mllib$recommendation$ALS$^dateFeatures"),
82-
ProblemFilters.exclude[MissingMethodProblem]( // The only public constructor is the one without arguments.
83-
"org.apache.spark.mllib.recommendation.ALS.this"),
84-
ProblemFilters.exclude[MissingMethodProblem](
85-
"org.apache.spark.mllib.recommendation.ALS.org$apache$spark$mllib$recommendation$ALS$$<init>$default$7"),
86-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
87-
"org.apache.spark.mllib.recommendation.ALS.org$apache$spark$mllib$recommendation$ALS$^dateFeatures")
88-
) ++
89-
MimaBuild.excludeSparkClass("mllib.linalg.distributed.ColumnStatisticsAggregator") ++
90-
MimaBuild.excludeSparkClass("rdd.ZippedRDD") ++
91-
MimaBuild.excludeSparkClass("rdd.ZippedPartition") ++
92-
MimaBuild.excludeSparkClass("util.SerializableHyperLogLog") ++
93-
MimaBuild.excludeSparkClass("storage.Values") ++
94-
MimaBuild.excludeSparkClass("storage.Entry") ++
95-
MimaBuild.excludeSparkClass("storage.MemoryStore$Entry") ++
96-
Seq(
97-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
98-
"org.apache.spark.mllib.tree.impurity.Gini.calculate"),
99-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
100-
"org.apache.spark.mllib.tree.impurity.Entropy.calculate"),
101-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
102-
"org.apache.spark.mllib.tree.impurity.Variance.calculate")
103-
)
104-
case v if v.startsWith("1.0") =>
105-
Seq(
106-
MimaBuild.excludeSparkPackage("api.java"),
107-
MimaBuild.excludeSparkPackage("mllib"),
108-
MimaBuild.excludeSparkPackage("streaming")
109-
) ++
110-
MimaBuild.excludeSparkClass("rdd.ClassTags") ++
111-
MimaBuild.excludeSparkClass("util.XORShiftRandom") ++
112-
MimaBuild.excludeSparkClass("graphx.EdgeRDD") ++
113-
MimaBuild.excludeSparkClass("graphx.VertexRDD") ++
114-
MimaBuild.excludeSparkClass("graphx.impl.GraphImpl") ++
115-
MimaBuild.excludeSparkClass("graphx.impl.RoutingTable") ++
116-
MimaBuild.excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
117-
MimaBuild.excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
118-
MimaBuild.excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
119-
MimaBuild.excludeSparkClass("mllib.optimization.SquaredGradient") ++
120-
MimaBuild.excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
121-
MimaBuild.excludeSparkClass("mllib.regression.LassoWithSGD") ++
122-
MimaBuild.excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
123-
case _ => Seq()
124-
}
125-
126-
private val closures = Seq(
127-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$mergeMaps$1",
128-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$countPartition$1",
129-
"org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$distributePartition$1",
130-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$mergeValue$1",
131-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$writeToFile$1",
132-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$reducePartition$1",
133-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$writeShard$1",
134-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$mergeCombiners$1",
135-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$process$1",
136-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$createCombiner$1",
137-
"org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$rdd$PairRDDFunctions$$mergeMaps$1"
138-
)
35+
def excludes(version: String) =
36+
version match {
37+
case v if v.startsWith("1.1") =>
38+
Seq(
39+
MimaBuild.excludeSparkPackage("deploy"),
40+
MimaBuild.excludeSparkPackage("graphx")
41+
) ++
42+
Seq(
43+
// Adding new method to JavaRDLike trait - we should probably mark this as a developer API.
44+
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.api.java.JavaRDDLike.partitions"),
45+
// We made a mistake earlier (ed06500d3) in the Java API to use default parameter values
46+
// for countApproxDistinct* functions, which does not work in Java. We later removed
47+
// them, and use the following to tell Mima to not care about them.
48+
ProblemFilters.exclude[IncompatibleResultTypeProblem](
49+
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinctByKey"),
50+
ProblemFilters.exclude[IncompatibleResultTypeProblem](
51+
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinctByKey"),
52+
ProblemFilters.exclude[MissingMethodProblem](
53+
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinct$default$1"),
54+
ProblemFilters.exclude[MissingMethodProblem](
55+
"org.apache.spark.api.java.JavaPairRDD.countApproxDistinctByKey$default$1"),
56+
ProblemFilters.exclude[MissingMethodProblem](
57+
"org.apache.spark.api.java.JavaRDD.countApproxDistinct$default$1"),
58+
ProblemFilters.exclude[MissingMethodProblem](
59+
"org.apache.spark.api.java.JavaRDDLike.countApproxDistinct$default$1"),
60+
ProblemFilters.exclude[MissingMethodProblem](
61+
"org.apache.spark.api.java.JavaDoubleRDD.countApproxDistinct$default$1"),
62+
ProblemFilters.exclude[MissingMethodProblem](
63+
"org.apache.spark.storage.MemoryStore.Entry")
64+
) ++
65+
Seq(
66+
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.flume.FlumeReceiver.this")
67+
) ++
68+
Seq( // Ignore some private methods in ALS.
69+
ProblemFilters.exclude[MissingMethodProblem](
70+
"org.apache.spark.mllib.recommendation.ALS.org$apache$spark$mllib$recommendation$ALS$^dateFeatures"),
71+
ProblemFilters.exclude[MissingMethodProblem]( // The only public constructor is the one without arguments.
72+
"org.apache.spark.mllib.recommendation.ALS.this"),
73+
ProblemFilters.exclude[MissingMethodProblem](
74+
"org.apache.spark.mllib.recommendation.ALS.org$apache$spark$mllib$recommendation$ALS$$<init>$default$7"),
75+
ProblemFilters.exclude[IncompatibleMethTypeProblem](
76+
"org.apache.spark.mllib.recommendation.ALS.org$apache$spark$mllib$recommendation$ALS$^dateFeatures")
77+
) ++
78+
MimaBuild.excludeSparkClass("mllib.linalg.distributed.ColumnStatisticsAggregator") ++
79+
MimaBuild.excludeSparkClass("rdd.ZippedRDD") ++
80+
MimaBuild.excludeSparkClass("rdd.ZippedPartition") ++
81+
MimaBuild.excludeSparkClass("util.SerializableHyperLogLog") ++
82+
MimaBuild.excludeSparkClass("storage.Values") ++
83+
MimaBuild.excludeSparkClass("storage.Entry") ++
84+
MimaBuild.excludeSparkClass("storage.MemoryStore$Entry") ++
85+
Seq(
86+
ProblemFilters.exclude[IncompatibleMethTypeProblem](
87+
"org.apache.spark.mllib.tree.impurity.Gini.calculate"),
88+
ProblemFilters.exclude[IncompatibleMethTypeProblem](
89+
"org.apache.spark.mllib.tree.impurity.Entropy.calculate"),
90+
ProblemFilters.exclude[IncompatibleMethTypeProblem](
91+
"org.apache.spark.mllib.tree.impurity.Variance.calculate")
92+
)
93+
case v if v.startsWith("1.0") =>
94+
Seq(
95+
MimaBuild.excludeSparkPackage("api.java"),
96+
MimaBuild.excludeSparkPackage("mllib"),
97+
MimaBuild.excludeSparkPackage("streaming")
98+
) ++
99+
MimaBuild.excludeSparkClass("rdd.ClassTags") ++
100+
MimaBuild.excludeSparkClass("util.XORShiftRandom") ++
101+
MimaBuild.excludeSparkClass("graphx.EdgeRDD") ++
102+
MimaBuild.excludeSparkClass("graphx.VertexRDD") ++
103+
MimaBuild.excludeSparkClass("graphx.impl.GraphImpl") ++
104+
MimaBuild.excludeSparkClass("graphx.impl.RoutingTable") ++
105+
MimaBuild.excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
106+
MimaBuild.excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
107+
MimaBuild.excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
108+
MimaBuild.excludeSparkClass("mllib.optimization.SquaredGradient") ++
109+
MimaBuild.excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
110+
MimaBuild.excludeSparkClass("mllib.regression.LassoWithSGD") ++
111+
MimaBuild.excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
112+
case _ => Seq()
113+
}
139114
}

tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala

+19-2
Original file line numberDiff line numberDiff line change
@@ -99,9 +99,25 @@ object GenerateMIMAIgnore {
9999
(ignoredClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet, ignoredMembers.toSet)
100100
}
101101

102+
/** Scala reflection does not let us see inner function even if they are upgraded
103+
* to public for some reason. So had to resort to java reflection to get all inner
104+
* functions with $$ in there name.
105+
*/
106+
def getInnerFunctions(classSymbol: unv.ClassSymbol): Seq[String] = {
107+
try {
108+
Class.forName(classSymbol.fullName, false, classLoader).getMethods.map(_.getName)
109+
.filter(_.contains("$$")).map(classSymbol.fullName + "." + _)
110+
} catch {
111+
case t: Throwable =>
112+
println("[WARN] Unable to detect inner functions for class:" + classSymbol.fullName)
113+
Seq.empty[String]
114+
}
115+
}
116+
102117
private def getAnnotatedOrPackagePrivateMembers(classSymbol: unv.ClassSymbol) = {
103118
classSymbol.typeSignature.members
104-
.filter(x => isPackagePrivate(x) || isDeveloperApi(x) || isExperimental(x)).map(_.fullName)
119+
.filter(x => isPackagePrivate(x) || isDeveloperApi(x) || isExperimental(x)).map(_.fullName) ++
120+
getInnerFunctions(classSymbol)
105121
}
106122

107123
def main(args: Array[String]) {
@@ -121,7 +137,8 @@ object GenerateMIMAIgnore {
121137
name.endsWith("$class") ||
122138
name.contains("$sp") ||
123139
name.contains("hive") ||
124-
name.contains("Hive")
140+
name.contains("Hive") ||
141+
name.contains("repl")
125142
}
126143

127144
/**

0 commit comments

Comments
 (0)