Binary compatibility report for the spark-avro_2.10-0.2.0 library  between 1.2.0 and 1.1.0 versions   (relating to the portability of client application spark-avro_2.10-0.2.0.jar)

Test Info


Library Namespark-avro_2.10-0.2.0
Version #11.2.0
Version #21.1.0
Java Version1.7.0_75

Test Results


Total Java ARchives1
Total Methods / Classes367 / 405
VerdictIncompatible
(51.2%)

Problem Summary


SeverityCount
Added Methods-4
Removed MethodsHigh49
Problems with
Data Types
High5
Medium0
Low0
Problems with
Methods
High0
Medium0
Low0
Other Changes
in Data Types
-1

Added Methods (4)


spark-sql_2.10-1.1.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.decimalToLiteral ( scala.math.BigDecimal d )  :  catalyst.expressions.Literal
SQLContext.optimizer ( )  :  catalyst.optimizer.Optimizer.
SQLContext.parser ( )  :  catalyst.SqlParser
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType )  :  void

to the top

Removed Methods (49)


spark-sql_2.10-1.2.0.jar, JavaSchemaRDD.class
package org.apache.spark.sql.api.java
JavaSchemaRDD.collectAsync ( )  :  org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
JavaSchemaRDD.countAsync ( )  :  org.apache.spark.api.java.JavaFutureAction<Long>
JavaSchemaRDD.foreachAsync ( org.apache.spark.api.java.function.VoidFunction<Row> f )  :  org.apache.spark.api.java.JavaFutureAction<Void>
JavaSchemaRDD.foreachPartitionAsync ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> f )  :  org.apache.spark.api.java.JavaFutureAction<Void>
JavaSchemaRDD.schemaRDD ( )  :  org.apache.spark.sql.SchemaRDD
JavaSchemaRDD.takeAsync ( int num )  :  org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
JavaSchemaRDD.toJSON ( )  :  org.apache.spark.api.java.JavaRDD<String>

spark-sql_2.10-1.2.0.jar, JavaSQLContext.class
package org.apache.spark.sql.api.java
JavaSQLContext.baseRelationToSchemaRDD ( org.apache.spark.sql.sources.BaseRelation baseRelation )  :  JavaSchemaRDD

spark-sql_2.10-1.2.0.jar, RelationProvider.class
package org.apache.spark.sql.sources
RelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, scala.collection.immutable.Map<String,String> p2 ) [abstract]  :  BaseRelation

spark-sql_2.10-1.2.0.jar, SchemaRDD.class
package org.apache.spark.sql
SchemaRDD.cache ( )  :  org.apache.spark.rdd.RDD
SchemaRDD.cache ( )  :  SchemaRDD
SchemaRDD.collectToPython ( )  :  java.util.List<byte[ ]>
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel )  :  org.apache.spark.rdd.RDD
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel )  :  SchemaRDD
SchemaRDD.toJSON ( )  :  org.apache.spark.rdd.RDD<String>
SchemaRDD.unpersist ( boolean blocking )  :  org.apache.spark.rdd.RDD
SchemaRDD.unpersist ( boolean blocking )  :  SchemaRDD

spark-sql_2.10-1.2.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.abs ( catalyst.expressions.Expression e )  :  catalyst.expressions.Abs
SQLContext.baseRelationToSchemaRDD ( sources.BaseRelation baseRelation )  :  SchemaRDD
SQLContext.bigDecimalToLiteral ( scala.math.BigDecimal d )  :  catalyst.expressions.Literal
SQLContext.cacheQuery ( SchemaRDD query, scala.Option<String> tableName, org.apache.spark.storage.StorageLevel storageLevel )  :  void
SQLContext.clearCache ( )  :  void
SQLContext.columnNameOfCorruptRecord ( )  :  String
SQLContext.dateToLiteral ( java.sql.Date d )  :  catalyst.expressions.Literal
SQLContext.ddlParser ( )  :  sources.DDLParser
SQLContext.decimalToLiteral ( catalyst.types.decimal.Decimal d )  :  catalyst.expressions.Literal
SQLContext.dropTempTable ( String tableName )  :  void
SQLContext.externalSortEnabled ( )  :  boolean
SQLContext.extraStrategies ( )  :  scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>>
SQLContext.extraStrategies_.eq ( scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>> p1 )  :  void
SQLContext.inMemoryPartitionPruning ( )  :  boolean
SQLContext.invalidateCache ( catalyst.plans.logical.LogicalPlan plan )  :  void
SQLContext.last ( catalyst.expressions.Expression e )  :  catalyst.expressions.Last
SQLContext.lookupCachedData ( catalyst.plans.logical.LogicalPlan plan )  :  scala.Option<CachedData>
SQLContext.lookupCachedData ( SchemaRDD query )  :  scala.Option<CachedData>
SQLContext.optimizer ( )  :  catalyst.optimizer.Optimizer
SQLContext.CacheManager..cachedData ( )  :  scala.collection.mutable.ArrayBuffer<CachedData>
SQLContext.CacheManager..cacheLock ( )  :  java.util.concurrent.locks.ReentrantReadWriteLock
SQLContext.CacheManager._setter_.CacheManager..cachedData_.eq ( scala.collection.mutable.ArrayBuffer p1 )  :  void
SQLContext.CacheManager._setter_.CacheManager..cacheLock_.eq ( java.util.concurrent.locks.ReentrantReadWriteLock p1 )  :  void
SQLContext.parquetFilterPushDown ( )  :  boolean
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType )  :  void
SQLContext.sqlParser ( )  :  catalyst.SparkSQLParser
SQLContext.sqrt ( catalyst.expressions.Expression e )  :  catalyst.expressions.Sqrt
SQLContext.tryUncacheQuery ( SchemaRDD query, boolean blocking )  :  boolean
SQLContext.uncacheQuery ( SchemaRDD query, boolean blocking )  :  void
SQLContext.useCachedData ( catalyst.plans.logical.LogicalPlan plan )  :  catalyst.plans.logical.LogicalPlan

spark-sql_2.10-1.2.0.jar, TableScan.class
package org.apache.spark.sql.sources
TableScan.buildScan ( ) [abstract]  :  org.apache.spark.rdd.RDD<org.apache.spark.sql.catalyst.expressions.Row>
TableScan.TableScan ( )

to the top

Problems with Data Types, High Severity (5)


spark-sql_2.10-1.2.0.jar
package org.apache.spark.sql
[+] SQLContext (1)

package org.apache.spark.sql.api.java
[+] DataType (2)

package org.apache.spark.sql.sources
[+] RelationProvider (1)
[+] TableScan (1)

to the top

Other Changes in Data Types (1)


spark-sql_2.10-1.2.0.jar
package org.apache.spark.sql.api.java
[+] DataType (1)

to the top

Java ARchives (1)


spark-sql_2.10-1.2.0.jar

to the top




Generated on Sat Apr 11 01:08:28 2015 for spark-avro_2.10-0.2.0 by Java API Compliance Checker 1.4.1  
A tool for checking backward compatibility of a Java library API