From f495620d0f11e34dee67853dd0912fe20602d24d Mon Sep 17 00:00:00 2001 From: nikhil7sh Date: Mon, 19 May 2014 11:34:02 +0530 Subject: [PATCH 1/6] (SPARK-1820) Make GenerateMimaIgnore @DeveloperApi annotation aware --- .../spark/tools/GenerateMIMAIgnore.scala | 41 +++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index a433e8e2e89f4..3b33ceab85c9c 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -23,6 +23,8 @@ import java.util.jar.JarFile import scala.collection.mutable import scala.collection.JavaConversions._ import scala.reflect.runtime.universe.runtimeMirror +import scala.reflect.runtime.{universe => unv} +import scala.reflect._ /** * A tool for generating classes to be excluded during binary checking with MIMA. It is expected @@ -68,10 +70,29 @@ object GenerateMIMAIgnore { false } } - } - + } + + def classesAnnotationCheck(className: String) = { + + try { + /* Couldn't figure out if it's possible to determine a-priori whether a given symbol + has specified annotation. */ + + val classSymbol=mirror.classSymbol(Class.forName(className, false, classLoader)) + val annotList=annotationsOfClass(classSymbol) + + isAnnotationExistClassLevel(annotList) + } catch { + case _: Throwable => { + println("Error determining visibility: " + className) + false + } + } + } + for (className <- classes) { val directlyPrivateSpark = isPackagePrivate(className) + val annotationCheck = classesAnnotationCheck(className) /* Inner classes defined within a private[spark] class or object are effectively invisible, so we account for them as package private. */ @@ -83,7 +104,8 @@ object GenerateMIMAIgnore { false } } - if (directlyPrivateSpark || indirectlyPrivateSpark) privateClasses += className + + if (directlyPrivateSpark || indirectlyPrivateSpark || annotationCheck) privateClasses += className } privateClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet } @@ -94,6 +116,19 @@ object GenerateMIMAIgnore { println("Created : .mima-excludes in current directory.") } + private def annotationsOfClass(classSymbol : unv.ClassSymbol ) ={ + classSymbol.annotations + } + + private def annotationsOfClassMembers(classType: unv.Type) = { + classType.members.foldLeft(Nil: List[unv.type#Annotation]) { + (xs, x) => x.annotations ::: xs + } + } + + private def isAnnotationExistClassLevel(annotList: List[unv.Annotation]): Boolean = { + annotList.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) + } private def shouldExclude(name: String) = { // Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala From 6a7201b3bdbf917ea0054049eeaded13bfcbfd72 Mon Sep 17 00:00:00 2001 From: nikhil7sh Date: Wed, 21 May 2014 14:46:10 +0530 Subject: [PATCH 2/6] [SPARK-1820] Make GenerateMimaIgnore @DeveloperApi annotation aware --- .../spark/tools/GenerateMIMAIgnore.scala | 23 ++++--------------- 1 file changed, 5 insertions(+), 18 deletions(-) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index 3b33ceab85c9c..1e0afe5c0daba 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -73,16 +73,13 @@ object GenerateMIMAIgnore { } def classesAnnotationCheck(className: String) = { - - try { - /* Couldn't figure out if it's possible to determine a-priori whether a given symbol - has specified annotation. */ - - val classSymbol=mirror.classSymbol(Class.forName(className, false, classLoader)) - val annotList=annotationsOfClass(classSymbol) + try { + val annotList=mirror + .classSymbol(Class.forName(className, false, classLoader)) + .annotations isAnnotationExistClassLevel(annotList) - } catch { + } catch { case _: Throwable => { println("Error determining visibility: " + className) false @@ -116,16 +113,6 @@ object GenerateMIMAIgnore { println("Created : .mima-excludes in current directory.") } - private def annotationsOfClass(classSymbol : unv.ClassSymbol ) ={ - classSymbol.annotations - } - - private def annotationsOfClassMembers(classType: unv.Type) = { - classType.members.foldLeft(Nil: List[unv.type#Annotation]) { - (xs, x) => x.annotations ::: xs - } - } - private def isAnnotationExistClassLevel(annotList: List[unv.Annotation]): Boolean = { annotList.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) } From 8fa02d2c67f16556d7477f515603d472d2679a21 Mon Sep 17 00:00:00 2001 From: nikhil7sh Date: Wed, 21 May 2014 14:50:04 +0530 Subject: [PATCH 3/6] [SPARK-1820] Make GenerateMimaIgnore @DeveloperApi annotation aware --- .../main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index 1e0afe5c0daba..986d6978b1279 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -81,7 +81,7 @@ object GenerateMIMAIgnore { isAnnotationExistClassLevel(annotList) } catch { case _: Throwable => { - println("Error determining visibility: " + className) + println("Error determining Annotations: " + className) false } } From 4810471e66e0fd8cd2632870fd494be3114dc750 Mon Sep 17 00:00:00 2001 From: nikhil7sh Date: Mon, 26 May 2014 11:45:59 +0530 Subject: [PATCH 4/6] [SPARK-1820] Make GenerateMimaIgnore @DeveloperApi annotation aware. --- .../spark/tools/GenerateMIMAIgnore.scala | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index 986d6978b1279..f8d2040df682e 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -70,26 +70,26 @@ object GenerateMIMAIgnore { false } } - } - + } + def classesAnnotationCheck(className: String) = { - try { - val annotList=mirror - .classSymbol(Class.forName(className, false, classLoader)) - .annotations + try { + val annotList=mirror + .classSymbol(Class.forName(className, false, classLoader)) + .annotations - isAnnotationExistClassLevel(annotList) + isAnnotationExistClassLevel(annotList) } catch { - case _: Throwable => { + case _: Throwable => { println("Error determining Annotations: " + className) false - } - } - } + } + } + } for (className <- classes) { val directlyPrivateSpark = isPackagePrivate(className) - val annotationCheck = classesAnnotationCheck(className) + val annotationCheck = classesAnnotationCheck(className) /* Inner classes defined within a private[spark] class or object are effectively invisible, so we account for them as package private. */ @@ -112,10 +112,10 @@ object GenerateMIMAIgnore { writeAll(classesPrivateWithin("org.apache.spark").mkString("\n")) println("Created : .mima-excludes in current directory.") } - + private def isAnnotationExistClassLevel(annotList: List[unv.Annotation]): Boolean = { annotList.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) - } + } private def shouldExclude(name: String) = { // Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala From b735a78c3e026db2c6f38d933dbf441dd0a512a5 Mon Sep 17 00:00:00 2001 From: nikhil7sh Date: Mon, 26 May 2014 14:59:16 +0530 Subject: [PATCH 5/6] [SPARK-1820] Make GenerateMimaIgnore @DeveloperApi annotation aware. --- .../spark/tools/GenerateMIMAIgnore.scala | 275 +++++++++--------- 1 file changed, 135 insertions(+), 140 deletions(-) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index f8d2040df682e..d70497b5489df 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -1,155 +1,150 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.tools - -import java.io.File -import java.util.jar.JarFile - -import scala.collection.mutable -import scala.collection.JavaConversions._ -import scala.reflect.runtime.universe.runtimeMirror -import scala.reflect.runtime.{universe => unv} -import scala.reflect._ - -/** - * A tool for generating classes to be excluded during binary checking with MIMA. It is expected - * that this tool is run with ./spark-class. - * - * MIMA itself only supports JVM-level visibility and doesn't account for package-private classes. - * This tool looks at all currently package-private classes and generates exclusions for them. Note - * that this approach is not sound. It can lead to false positives if we move or rename a previously - * package-private class. It can lead to false negatives if someone explicitly makes a class - * package-private that wasn't before. This exists only to help catch certain classes of changes - * which might be difficult to catch during review. - */ -object GenerateMIMAIgnore { - private val classLoader = Thread.currentThread().getContextClassLoader - private val mirror = runtimeMirror(classLoader) - - private def classesPrivateWithin(packageName: String): Set[String] = { - - val classes = getClasses(packageName) - val privateClasses = mutable.HashSet[String]() - - def isPackagePrivate(className: String) = { - try { - /* Couldn't figure out if it's possible to determine a-priori whether a given symbol - is a module or class. */ - - val privateAsClass = mirror - .classSymbol(Class.forName(className, false, classLoader)) - .privateWithin - .fullName - .startsWith(packageName) - - val privateAsModule = mirror - .staticModule(className) - .privateWithin - .fullName - .startsWith(packageName) - - privateAsClass || privateAsModule - } catch { - case _: Throwable => { - println("Error determining visibility: " + className) - false + /* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + package org.apache.spark.tools + + import java.io.File + import java.util.jar.JarFile + + import scala.collection.mutable + import scala.collection.JavaConversions._ + import scala.reflect.runtime.universe.runtimeMirror + import scala.reflect.runtime.{universe => unv} + import scala.reflect._ + + /** + * A tool for generating classes to be excluded during binary checking with MIMA. It is expected + * that this tool is run with ./spark-class. + * + * MIMA itself only supports JVM-level visibility and doesn't account for package-private classes. + * This tool looks at all currently package-private classes and generates exclusions for them. Note + * that this approach is not sound. It can lead to false positives if we move or rename a previously + * package-private class. It can lead to false negatives if someone explicitly makes a class + * package-private that wasn't before. This exists only to help catch certain classes of changes + * which might be difficult to catch during review. + */ + object GenerateMIMAIgnore { + private val classLoader = Thread.currentThread().getContextClassLoader + private val mirror = runtimeMirror(classLoader) + + private def classesPrivateWithin(packageName: String): Set[String] = { + + val classes = getClasses(packageName) + val privateClasses = mutable.HashSet[String]() + + def isPackagePrivate(className: String) = { + try { + /* Couldn't figure out if it's possible to determine a-priori whether a given symbol + is a module or class. */ + + val privateAsClass = mirror + .classSymbol(Class.forName(className, false, classLoader)) + .privateWithin + .fullName + .startsWith(packageName) + + val privateAsModule = mirror + .staticModule(className) + .privateWithin + .fullName + .startsWith(packageName) + + privateAsClass || privateAsModule + } catch { + case _: Throwable => { + println("Error determining visibility: " + className) + false + } } } - } - - def classesAnnotationCheck(className: String) = { - try { - val annotList=mirror - .classSymbol(Class.forName(className, false, classLoader)) - .annotations - - isAnnotationExistClassLevel(annotList) - } catch { - case _: Throwable => { - println("Error determining Annotations: " + className) - false + + def isDeveloperApi(className: String) = { + try { + val clazz = mirror + .classSymbol(Class.forName(className, false, classLoader)) + + clazz.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) + } catch { + case _: Throwable => { + println("Error determining Annotations: " + className) + false } - } - } - - for (className <- classes) { - val directlyPrivateSpark = isPackagePrivate(className) - val annotationCheck = classesAnnotationCheck(className) - - /* Inner classes defined within a private[spark] class or object are effectively - invisible, so we account for them as package private. */ - val indirectlyPrivateSpark = { - val maybeOuter = className.toString.takeWhile(_ != '$') - if (maybeOuter != className) { - isPackagePrivate(maybeOuter) - } else { - false } } - - if (directlyPrivateSpark || indirectlyPrivateSpark || annotationCheck) privateClasses += className + + for (className <- classes) { + val directlyPrivateSpark = isPackagePrivate(className) + val developerApi = isDeveloperApi(className) + + /* Inner classes defined within a private[spark] class or object are effectively + invisible, so we account for them as package private. */ + val indirectlyPrivateSpark = { + val maybeOuter = className.toString.takeWhile(_ != '$') + if (maybeOuter != className) { + isPackagePrivate(maybeOuter) + } else { + false + } + } + + if (directlyPrivateSpark || indirectlyPrivateSpark || developerApi) privateClasses += className + } + privateClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet } - privateClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet - } - def main(args: Array[String]) { - scala.tools.nsc.io.File(".mima-excludes"). - writeAll(classesPrivateWithin("org.apache.spark").mkString("\n")) - println("Created : .mima-excludes in current directory.") - } - - private def isAnnotationExistClassLevel(annotList: List[unv.Annotation]): Boolean = { - annotList.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) + def main(args: Array[String]) { + scala.tools.nsc.io.File(".mima-excludes"). + writeAll(classesPrivateWithin("org.apache.spark").mkString("\n")) + println("Created : .mima-excludes in current directory.") } - private def shouldExclude(name: String) = { - // Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala - name.contains("anon") || - name.endsWith("$class") || - name.contains("$sp") || - name.contains("hive") || - name.contains("Hive") - } + private def shouldExclude(name: String) = { + // Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala + name.contains("anon") || + name.endsWith("$class") || + name.contains("$sp") || + name.contains("hive") || + name.contains("Hive") + } - /** - * Scans all classes accessible from the context class loader which belong to the given package - * and subpackages both from directories and jars present on the classpath. - */ - private def getClasses(packageName: String): Set[String] = { - val path = packageName.replace('.', '/') - val resources = classLoader.getResources(path) + /** + * Scans all classes accessible from the context class loader which belong to the given package + * and subpackages both from directories and jars present on the classpath. + */ + private def getClasses(packageName: String): Set[String] = { + val path = packageName.replace('.', '/') + val resources = classLoader.getResources(path) - val jars = resources.filter(x => x.getProtocol == "jar") - .map(_.getFile.split(":")(1).split("!")(0)).toSeq + val jars = resources.filter(x => x.getProtocol == "jar") + .map(_.getFile.split(":")(1).split("!")(0)).toSeq - jars.flatMap(getClassesFromJar(_, path)) - .map(_.getName) - .filterNot(shouldExclude).toSet - } + jars.flatMap(getClassesFromJar(_, path)) + .map(_.getName) + .filterNot(shouldExclude).toSet + } - /** - * Get all classes in a package from a jar file. - */ - private def getClassesFromJar(jarPath: String, packageName: String) = { - val jar = new JarFile(new File(jarPath)) - val enums = jar.entries().map(_.getName).filter(_.startsWith(packageName)) - val classes = for (entry <- enums if entry.endsWith(".class")) + /** + * Get all classes in a package from a jar file. + */ + private def getClassesFromJar(jarPath: String, packageName: String) = { + val jar = new JarFile(new File(jarPath)) + val enums = jar.entries().map(_.getName).filter(_.startsWith(packageName)) + val classes = for (entry <- enums if entry.endsWith(".class")) yield Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader) - classes + classes + } } -} From 0516ee27590703bd4552a8c6e45384cfaf2784f4 Mon Sep 17 00:00:00 2001 From: nikhil7sh Date: Tue, 27 May 2014 15:08:05 +0530 Subject: [PATCH 6/6] [SPARK-1820] Make GenerateMimaIgnore @DeveloperApi annotation aware. --- .../spark/tools/GenerateMIMAIgnore.scala | 267 +++++++++--------- 1 file changed, 133 insertions(+), 134 deletions(-) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index d70497b5489df..bbcacb099a2ff 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -1,150 +1,149 @@ - /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - package org.apache.spark.tools - - import java.io.File - import java.util.jar.JarFile - - import scala.collection.mutable - import scala.collection.JavaConversions._ - import scala.reflect.runtime.universe.runtimeMirror - import scala.reflect.runtime.{universe => unv} - import scala.reflect._ - - /** - * A tool for generating classes to be excluded during binary checking with MIMA. It is expected - * that this tool is run with ./spark-class. - * - * MIMA itself only supports JVM-level visibility and doesn't account for package-private classes. - * This tool looks at all currently package-private classes and generates exclusions for them. Note - * that this approach is not sound. It can lead to false positives if we move or rename a previously - * package-private class. It can lead to false negatives if someone explicitly makes a class - * package-private that wasn't before. This exists only to help catch certain classes of changes - * which might be difficult to catch during review. - */ - object GenerateMIMAIgnore { - private val classLoader = Thread.currentThread().getContextClassLoader - private val mirror = runtimeMirror(classLoader) - - private def classesPrivateWithin(packageName: String): Set[String] = { - - val classes = getClasses(packageName) - val privateClasses = mutable.HashSet[String]() - - def isPackagePrivate(className: String) = { - try { - /* Couldn't figure out if it's possible to determine a-priori whether a given symbol - is a module or class. */ - - val privateAsClass = mirror - .classSymbol(Class.forName(className, false, classLoader)) - .privateWithin - .fullName - .startsWith(packageName) - - val privateAsModule = mirror - .staticModule(className) - .privateWithin - .fullName - .startsWith(packageName) - - privateAsClass || privateAsModule - } catch { - case _: Throwable => { - println("Error determining visibility: " + className) - false - } +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.tools + +import java.io.File +import java.util.jar.JarFile + +import scala.collection.mutable +import scala.collection.JavaConversions._ +import scala.reflect.runtime.universe.runtimeMirror +import scala.reflect.runtime.{universe => unv} + +/** + * A tool for generating classes to be excluded during binary checking with MIMA. It is expected + * that this tool is run with ./spark-class. + * + * MIMA itself only supports JVM-level visibility and doesn't account for package-private classes. + * This tool looks at all currently package-private classes and generates exclusions for them. Note + * that this approach is not sound. It can lead to false positives if we move or rename a previously + * package-private class. It can lead to false negatives if someone explicitly makes a class + * package-private that wasn't before. This exists only to help catch certain classes of changes + * which might be difficult to catch during review. + */ +object GenerateMIMAIgnore { + private val classLoader = Thread.currentThread().getContextClassLoader + private val mirror = runtimeMirror(classLoader) + + private def classesPrivateWithin(packageName: String): Set[String] = { + + val classes = getClasses(packageName) + val privateClasses = mutable.HashSet[String]() + + def isPackagePrivate(className: String) = { + try { + /* Couldn't figure out if it's possible to determine a-priori whether a given symbol + is a module or class. */ + + val privateAsClass = mirror + .classSymbol(Class.forName(className, false, classLoader)) + .privateWithin + .fullName + .startsWith(packageName) + + val privateAsModule = mirror + .staticModule(className) + .privateWithin + .fullName + .startsWith(packageName) + + privateAsClass || privateAsModule + } catch { + case _: Throwable => { + println("Error determining visibility: " + className) + false } } + } + + def isDeveloperApi(className: String) = { + try { + val clazz = mirror.classSymbol(Class.forName(className, false, classLoader)) - def isDeveloperApi(className: String) = { - try { - val clazz = mirror - .classSymbol(Class.forName(className, false, classLoader)) - - clazz.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) - } catch { - case _: Throwable => { - println("Error determining Annotations: " + className) - false - } + clazz.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) + } catch { + case _: Throwable => { + println("Error determining Annotations: " + className) + false } } + } - for (className <- classes) { - val directlyPrivateSpark = isPackagePrivate(className) - val developerApi = isDeveloperApi(className) - - /* Inner classes defined within a private[spark] class or object are effectively - invisible, so we account for them as package private. */ - val indirectlyPrivateSpark = { - val maybeOuter = className.toString.takeWhile(_ != '$') - if (maybeOuter != className) { - isPackagePrivate(maybeOuter) - } else { - false - } + for (className <- classes) { + val directlyPrivateSpark = isPackagePrivate(className) + val developerApi = isDeveloperApi(className) + + /* Inner classes defined within a private[spark] class or object are effectively + invisible, so we account for them as package private. */ + val indirectlyPrivateSpark = { + val maybeOuter = className.toString.takeWhile(_ != '$') + if (maybeOuter != className) { + isPackagePrivate(maybeOuter) + } else { + false } - - if (directlyPrivateSpark || indirectlyPrivateSpark || developerApi) privateClasses += className } - privateClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet + if (directlyPrivateSpark || indirectlyPrivateSpark || developerApi) privateClasses += + className } + privateClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet + } - def main(args: Array[String]) { - scala.tools.nsc.io.File(".mima-excludes"). - writeAll(classesPrivateWithin("org.apache.spark").mkString("\n")) - println("Created : .mima-excludes in current directory.") - } + def main(args: Array[String]) { + scala.tools.nsc.io.File(".mima-excludes"). + writeAll(classesPrivateWithin("org.apache.spark").mkString("\n")) + println("Created : .mima-excludes in current directory.") + } - private def shouldExclude(name: String) = { - // Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala - name.contains("anon") || - name.endsWith("$class") || - name.contains("$sp") || - name.contains("hive") || - name.contains("Hive") - } - /** - * Scans all classes accessible from the context class loader which belong to the given package - * and subpackages both from directories and jars present on the classpath. - */ - private def getClasses(packageName: String): Set[String] = { - val path = packageName.replace('.', '/') - val resources = classLoader.getResources(path) + private def shouldExclude(name: String) = { + // Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala + name.contains("anon") || + name.endsWith("$class") || + name.contains("$sp") || + name.contains("hive") || + name.contains("Hive") + } + + /** + * Scans all classes accessible from the context class loader which belong to the given package + * and subpackages both from directories and jars present on the classpath. + */ + private def getClasses(packageName: String): Set[String] = { + val path = packageName.replace('.', '/') + val resources = classLoader.getResources(path) - val jars = resources.filter(x => x.getProtocol == "jar") - .map(_.getFile.split(":")(1).split("!")(0)).toSeq + val jars = resources.filter(x => x.getProtocol == "jar") + .map(_.getFile.split(":")(1).split("!")(0)).toSeq - jars.flatMap(getClassesFromJar(_, path)) - .map(_.getName) - .filterNot(shouldExclude).toSet - } + jars.flatMap(getClassesFromJar(_, path)) + .map(_.getName) + .filterNot(shouldExclude).toSet + } - /** - * Get all classes in a package from a jar file. - */ - private def getClassesFromJar(jarPath: String, packageName: String) = { - val jar = new JarFile(new File(jarPath)) - val enums = jar.entries().map(_.getName).filter(_.startsWith(packageName)) - val classes = for (entry <- enums if entry.endsWith(".class")) - yield Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader) - classes - } + /** + * Get all classes in a package from a jar file. + */ + private def getClassesFromJar(jarPath: String, packageName: String) = { + val jar = new JarFile(new File(jarPath)) + val enums = jar.entries().map(_.getName).filter(_.startsWith(packageName)) + val classes = for (entry <- enums if entry.endsWith(".class")) + yield Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader) + classes } +} \ No newline at end of file