Binary compatibility report for the elasticsearch-spark_2.10-2.1.0.Beta2 library  between 1.1.0 and 1.2.0 versions   (relating to the portability of client application elasticsearch-spark_2.10-2.1.0.Beta2.jar)

Test Info


Library Nameelasticsearch-spark_2.10-2.1.0.Beta2
Version #11.1.0
Version #21.2.0
Java Version1.7.0_75

Test Results


Total Java ARchives2
Total Methods / Classes851 / 1440
VerdictIncompatible
(6.8%)

Problem Summary


SeverityCount
Added Methods-80
Removed MethodsHigh9
Problems with
Data Types
High14
Medium0
Low0
Problems with
Methods
High9
Medium0
Low0
Other Changes
in Data Types
-3

Added Methods (80)


spark-core_2.10-1.2.0.jar, JavaSparkContext.class
package org.apache.spark.api.java
JavaSparkContext.binaryFiles ( String path )  :  JavaPairRDD<String,org.apache.spark.input.PortableDataStream>
JavaSparkContext.binaryFiles ( String path, int minPartitions )  :  JavaPairRDD<String,org.apache.spark.input.PortableDataStream>
JavaSparkContext.binaryRecords ( String path, int recordLength )  :  JavaRDD<byte[ ]>
JavaSparkContext.close ( )  :  void
JavaSparkContext.statusTracker ( )  :  JavaSparkStatusTracker

spark-core_2.10-1.2.0.jar, RDD<T>.class
package org.apache.spark.rdd
RDD<T>.parent ( int j, scala.reflect.ClassTag<U> p2 )  :  RDD<U>

spark-core_2.10-1.2.0.jar, SparkConf.class
package org.apache.spark
SparkConf.getAppId ( )  :  String
SparkConf.registerKryoClasses ( Class<?>[ ] classes )  :  SparkConf

spark-core_2.10-1.2.0.jar, SparkContext.class
package org.apache.spark
SparkContext.applicationId ( )  :  String
SparkContext.binaryFiles ( String path, int minPartitions )  :  rdd.RDD<scala.Tuple2<String,input.PortableDataStream>>
SparkContext.binaryRecords ( String path, int recordLength, org.apache.hadoop.conf.Configuration conf )  :  rdd.RDD<byte[ ]>
SparkContext.eventLogDir ( )  :  scala.Option<String>
SparkContext.executorAllocationManager ( )  :  scala.Option<ExecutorAllocationManager>
SparkContext.getExecutorThreadDump ( String executorId )  :  scala.Option<util.ThreadStackTrace[ ]>
SparkContext.isEventLogEnabled ( )  :  boolean
SparkContext.jobProgressListener ( )  :  ui.jobs.JobProgressListener
SparkContext.killExecutor ( String executorId )  :  boolean
SparkContext.killExecutors ( scala.collection.Seq<String> executorIds )  :  boolean
SparkContext.metricsSystem ( )  :  metrics.MetricsSystem
SparkContext.SparkContext..creationSite ( )  :  util.CallSite
SparkContext.progressBar ( )  :  scala.Option<ui.ConsoleProgressBar>
SparkContext.requestExecutors ( int numAdditionalExecutors )  :  boolean
SparkContext.schedulerBackend ( )  :  scheduler.SchedulerBackend
SparkContext.schedulerBackend_.eq ( scheduler.SchedulerBackend p1 )  :  void
SparkContext.setCallSite ( util.CallSite callSite )  :  void
SparkContext.statusTracker ( )  :  SparkStatusTracker
SparkContext.ui ( )  :  scala.Option<ui.SparkUI>

spark-core_2.10-1.2.0.jar, TaskContext.class
package org.apache.spark
TaskContext.get ( ) [static]  :  TaskContext
TaskContext.isRunningLocally ( ) [abstract]  :  boolean
TaskContext.TaskContext ( )

spark-sql_2.10-1.2.0.jar, DataType.class
package org.apache.spark.sql.api.java
DataType.createStructField ( String name, DataType dataType, boolean nullable, Metadata metadata ) [static]  :  StructField

spark-sql_2.10-1.2.0.jar, JavaSchemaRDD.class
package org.apache.spark.sql.api.java
JavaSchemaRDD.collectAsync ( )  :  org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
JavaSchemaRDD.countAsync ( )  :  org.apache.spark.api.java.JavaFutureAction<Long>
JavaSchemaRDD.foreachAsync ( org.apache.spark.api.java.function.VoidFunction<Row> f )  :  org.apache.spark.api.java.JavaFutureAction<Void>
JavaSchemaRDD.foreachPartitionAsync ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> f )  :  org.apache.spark.api.java.JavaFutureAction<Void>
JavaSchemaRDD.schemaRDD ( )  :  org.apache.spark.sql.SchemaRDD
JavaSchemaRDD.takeAsync ( int num )  :  org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
JavaSchemaRDD.toJSON ( )  :  org.apache.spark.api.java.JavaRDD<String>

spark-sql_2.10-1.2.0.jar, JavaSQLContext.class
package org.apache.spark.sql.api.java
JavaSQLContext.baseRelationToSchemaRDD ( org.apache.spark.sql.sources.BaseRelation baseRelation )  :  JavaSchemaRDD

spark-sql_2.10-1.2.0.jar, Row.class
package org.apache.spark.sql.api.java
Row.toString ( )  :  String

spark-sql_2.10-1.2.0.jar, SchemaRDD.class
package org.apache.spark.sql
SchemaRDD.cache ( )  :  org.apache.spark.rdd.RDD
SchemaRDD.cache ( )  :  SchemaRDD
SchemaRDD.collectToPython ( )  :  java.util.List<byte[ ]>
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel )  :  org.apache.spark.rdd.RDD
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel )  :  SchemaRDD
SchemaRDD.toJSON ( )  :  org.apache.spark.rdd.RDD<String>
SchemaRDD.unpersist ( boolean blocking )  :  org.apache.spark.rdd.RDD
SchemaRDD.unpersist ( boolean blocking )  :  SchemaRDD

spark-sql_2.10-1.2.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.abs ( catalyst.expressions.Expression e )  :  catalyst.expressions.Abs
SQLContext.baseRelationToSchemaRDD ( sources.BaseRelation baseRelation )  :  SchemaRDD
SQLContext.bigDecimalToLiteral ( scala.math.BigDecimal d )  :  catalyst.expressions.Literal
SQLContext.cacheQuery ( SchemaRDD query, scala.Option<String> tableName, org.apache.spark.storage.StorageLevel storageLevel )  :  void
SQLContext.clearCache ( )  :  void
SQLContext.columnNameOfCorruptRecord ( )  :  String
SQLContext.dateToLiteral ( java.sql.Date d )  :  catalyst.expressions.Literal
SQLContext.ddlParser ( )  :  sources.DDLParser
SQLContext.decimalToLiteral ( catalyst.types.decimal.Decimal d )  :  catalyst.expressions.Literal
SQLContext.dropTempTable ( String tableName )  :  void
SQLContext.externalSortEnabled ( )  :  boolean
SQLContext.extraStrategies ( )  :  scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>>
SQLContext.extraStrategies_.eq ( scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>> p1 )  :  void
SQLContext.inMemoryPartitionPruning ( )  :  boolean
SQLContext.invalidateCache ( catalyst.plans.logical.LogicalPlan plan )  :  void
SQLContext.last ( catalyst.expressions.Expression e )  :  catalyst.expressions.Last
SQLContext.lookupCachedData ( catalyst.plans.logical.LogicalPlan plan )  :  scala.Option<CachedData>
SQLContext.lookupCachedData ( SchemaRDD query )  :  scala.Option<CachedData>
SQLContext.optimizer ( )  :  catalyst.optimizer.Optimizer
SQLContext.CacheManager..cachedData ( )  :  scala.collection.mutable.ArrayBuffer<CachedData>
SQLContext.CacheManager..cacheLock ( )  :  java.util.concurrent.locks.ReentrantReadWriteLock
SQLContext.CacheManager._setter_.CacheManager..cachedData_.eq ( scala.collection.mutable.ArrayBuffer p1 )  :  void
SQLContext.CacheManager._setter_.CacheManager..cacheLock_.eq ( java.util.concurrent.locks.ReentrantReadWriteLock p1 )  :  void
SQLContext.parquetFilterPushDown ( )  :  boolean
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType )  :  void
SQLContext.sqlParser ( )  :  catalyst.SparkSQLParser
SQLContext.sqrt ( catalyst.expressions.Expression e )  :  catalyst.expressions.Sqrt
SQLContext.tryUncacheQuery ( SchemaRDD query, boolean blocking )  :  boolean
SQLContext.uncacheQuery ( SchemaRDD query, boolean blocking )  :  void
SQLContext.useCachedData ( catalyst.plans.logical.LogicalPlan plan )  :  catalyst.plans.logical.LogicalPlan

spark-sql_2.10-1.2.0.jar, StructField.class
package org.apache.spark.sql.api.java
StructField.getMetadata ( )  :  Metadata
StructField.StructField ( String name, DataType dataType, boolean nullable, Metadata metadata )

to the top

Removed Methods (9)


spark-core_2.10-1.1.0.jar, SparkContext.class
package org.apache.spark
SparkContext.ui ( )  :  ui.SparkUI

spark-core_2.10-1.1.0.jar, TaskContext.class
package org.apache.spark
TaskContext.markInterrupted ( )  :  void
TaskContext.markTaskCompleted ( )  :  void
TaskContext.TaskContext ( int stageId, int partitionId, long attemptId, boolean runningLocally, executor.TaskMetrics taskMetrics )

spark-sql_2.10-1.1.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.decimalToLiteral ( scala.math.BigDecimal d )  :  catalyst.expressions.Literal
SQLContext.optimizer ( )  :  catalyst.optimizer.Optimizer.
SQLContext.parser ( )  :  catalyst.SqlParser
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType )  :  void

spark-sql_2.10-1.1.0.jar, StructField.class
package org.apache.spark.sql.api.java
StructField.StructField ( String name, DataType dataType, boolean nullable )

to the top

Problems with Data Types, High Severity (14)


spark-core_2.10-1.1.0.jar
package org.apache.spark
[+] TaskContext (12)

package org.apache.spark.rdd
[+] PairRDDFunctions<K,V> (1)

spark-sql_2.10-1.1.0.jar
package org.apache.spark.sql.api.java
[+] DataType (1)

to the top

Problems with Methods, High Severity (9)


spark-core_2.10-1.1.0.jar, TaskContext
package org.apache.spark
[+] TaskContext.addTaskCompletionListener ( util.TaskCompletionListener listener )  :  TaskContext (1)
[+] TaskContext.addTaskCompletionListener ( scala.Function1<TaskContext,scala.runtime.BoxedUnit> f )  :  TaskContext (1)
[+] TaskContext.attemptId ( )  :  long (1)
[+] TaskContext.isCompleted ( )  :  boolean (1)
[+] TaskContext.isInterrupted ( )  :  boolean (1)
[+] TaskContext.partitionId ( )  :  int (1)
[+] TaskContext.runningLocally ( )  :  boolean (1)
[+] TaskContext.stageId ( )  :  int (1)
[+] TaskContext.taskMetrics ( )  :  executor.TaskMetrics (1)

to the top

Other Changes in Data Types (3)


spark-core_2.10-1.1.0.jar
package org.apache.spark.broadcast
[+] Broadcast<T> (1)

spark-sql_2.10-1.1.0.jar
package org.apache.spark.sql.api.java
[+] DataType (2)

to the top

Java ARchives (2)


spark-core_2.10-1.1.0.jar
spark-sql_2.10-1.1.0.jar

to the top




Generated on Mon Jun 29 20:33:30 2015 for elasticsearch-spark_2.10-2.1.0.Beta2 by Java API Compliance Checker 1.4.1  
A tool for checking backward compatibility of a Java library API