Binary compatibility report for the infinispan-spark_2.10-0.2 library  between 1.5.0 and 1.0.0 versions   (relating to the portability of client application infinispan-spark_2.10-0.2.jar)

Test Info


Library Nameinfinispan-spark_2.10-0.2
Version #11.5.0
Version #21.0.0
Java Version1.7.0_85

Test Results


Total Java ARchives4
Total Methods / Classes509 / 3088
VerdictIncompatible
(80.4%)

Problem Summary


SeverityCount
Added Methods-15
Removed MethodsHigh131
Problems with
Data Types
High24
Medium6
Low3
Problems with
Methods
High0
Medium0
Low3

Added Methods (15)


spark-core_2.10-1.0.0.jar, SparkContext.class
package org.apache.spark
SparkContext.clean ( F f )  :  F
SparkContext.getCallSite ( )  :  String
SparkContext.ui ( )  :  ui.SparkUI

spark-core_2.10-1.0.0.jar, TaskContext.class
package org.apache.spark
TaskContext.completed ( )  :  boolean
TaskContext.completed_.eq ( boolean p1 )  :  void
TaskContext.executeOnCompleteCallbacks ( )  :  void
TaskContext.interrupted ( )  :  boolean
TaskContext.interrupted_.eq ( boolean p1 )  :  void
TaskContext.TaskContext ( int stageId, int partitionId, long attemptId, boolean runningLocally, executor.TaskMetrics taskMetrics )

spark-streaming_2.10-1.0.0.jar, JavaStreamingContext.class
package org.apache.spark.streaming.api.java
JavaStreamingContext.fileStream ( String directory )  :  JavaPairInputDStream<K,V>

spark-streaming_2.10-1.0.0.jar, StreamingContext.class
package org.apache.spark.streaming
StreamingContext.getNewReceiverStreamId ( )  :  int
StreamingContext.state ( )  :  scala.Enumeration.Value
StreamingContext.state_.eq ( scala.Enumeration.Value p1 )  :  void
StreamingContext.StreamingContextState ( )  :  StreamingContext.StreamingContextState.
StreamingContext.uiTab ( )  :  ui.StreamingTab

to the top

Removed Methods (131)


spark-core_2.10-1.5.0.jar, DeserializationStream.class
package org.apache.spark.serializer
DeserializationStream.asKeyValueIterator ( )  :  scala.collection.Iterator<scala.Tuple2<Object,Object>>
DeserializationStream.DeserializationStream ( )
DeserializationStream.readKey ( scala.reflect.ClassTag<T> p1 )  :  T
DeserializationStream.readValue ( scala.reflect.ClassTag<T> p1 )  :  T

spark-core_2.10-1.5.0.jar, JavaSparkContext.class
package org.apache.spark.api.java
JavaSparkContext.accumulable ( T initialValue, String name, org.apache.spark.AccumulableParam<T,R> param )  :  org.apache.spark.Accumulable<T,R>
JavaSparkContext.accumulator ( double initialValue, String name )  :  org.apache.spark.Accumulator<Double>
JavaSparkContext.accumulator ( int initialValue, String name )  :  org.apache.spark.Accumulator<Integer>
JavaSparkContext.accumulator ( T initialValue, String name, org.apache.spark.AccumulatorParam<T> accumulatorParam )  :  org.apache.spark.Accumulator<T>
JavaSparkContext.binaryFiles ( String path )  :  JavaPairRDD<String,org.apache.spark.input.PortableDataStream>
JavaSparkContext.binaryFiles ( String path, int minPartitions )  :  JavaPairRDD<String,org.apache.spark.input.PortableDataStream>
JavaSparkContext.binaryRecords ( String path, int recordLength )  :  JavaRDD<byte[ ]>
JavaSparkContext.close ( )  :  void
JavaSparkContext.doubleAccumulator ( double initialValue, String name )  :  org.apache.spark.Accumulator<Double>
JavaSparkContext.emptyRDD ( )  :  JavaRDD<T>
JavaSparkContext.intAccumulator ( int initialValue, String name )  :  org.apache.spark.Accumulator<Integer>
JavaSparkContext.setLogLevel ( String logLevel )  :  void
JavaSparkContext.statusTracker ( )  :  JavaSparkStatusTracker
JavaSparkContext.version ( )  :  String

spark-core_2.10-1.5.0.jar, Logging.class
package org.apache.spark
Logging.logName ( ) [abstract]  :  String

spark-core_2.10-1.5.0.jar, SerializationStream.class
package org.apache.spark.serializer
SerializationStream.SerializationStream ( )
SerializationStream.writeKey ( T key, scala.reflect.ClassTag<T> p2 )  :  SerializationStream
SerializationStream.writeValue ( T value, scala.reflect.ClassTag<T> p2 )  :  SerializationStream

spark-core_2.10-1.5.0.jar, Serializer.class
package org.apache.spark.serializer
Serializer.defaultClassLoader ( )  :  scala.Option<ClassLoader>
Serializer.defaultClassLoader_.eq ( scala.Option<ClassLoader> p1 )  :  void
Serializer.getSerializer ( Serializer p1 ) [static]  :  Serializer
Serializer.getSerializer ( scala.Option<Serializer> p1 ) [static]  :  Serializer
Serializer.Serializer ( )
Serializer.setDefaultClassLoader ( ClassLoader classLoader )  :  Serializer
Serializer.supportsRelocationOfSerializedObjects ( )  :  boolean

spark-core_2.10-1.5.0.jar, SparkContext.class
package org.apache.spark
SparkContext.accumulable ( R initialValue, String name, AccumulableParam<R,T> param )  :  Accumulable<R,T>
SparkContext.accumulator ( T initialValue, String name, AccumulatorParam<T> param )  :  Accumulator<T>
SparkContext.addFile ( String path, boolean recursive )  :  void
SparkContext.applicationAttemptId ( )  :  scala.Option<String>
SparkContext.applicationId ( )  :  String
SparkContext.binaryFiles ( String path, int minPartitions )  :  rdd.RDD<scala.Tuple2<String,input.PortableDataStream>>
SparkContext.binaryRecords ( String path, int recordLength, org.apache.hadoop.conf.Configuration conf )  :  rdd.RDD<byte[ ]>
SparkContext.clean ( F f, boolean checkSerializable )  :  F
SparkContext.createSparkEnv ( SparkConf conf, boolean isLocal, scheduler.LiveListenerBus listenerBus )  :  SparkEnv
SparkContext.eventLogCodec ( )  :  scala.Option<String>
SparkContext.eventLogDir ( )  :  scala.Option<java.net.URI>
SparkContext.executorAllocationManager ( )  :  scala.Option<ExecutorAllocationManager>
SparkContext.externalBlockStoreFolderName ( )  :  String
SparkContext.getCallSite ( )  :  util.CallSite
SparkContext.getExecutorThreadDump ( String executorId )  :  scala.Option<util.ThreadStackTrace[ ]>
SparkContext.getOrCreate ( ) [static]  :  SparkContext
SparkContext.getOrCreate ( SparkConf p1 ) [static]  :  SparkContext
SparkContext.isEventLogEnabled ( )  :  boolean
SparkContext.jobProgressListener ( )  :  ui.jobs.JobProgressListener
SparkContext.killAndReplaceExecutor ( String executorId )  :  boolean
SparkContext.killExecutor ( String executorId )  :  boolean
SparkContext.killExecutors ( scala.collection.Seq<String> executorIds )  :  boolean
SparkContext.logName ( )  :  String
SparkContext.metricsSystem ( )  :  metrics.MetricsSystem
SparkContext.SparkContext.._cleaner ( )  :  scala.Option<ContextCleaner>
SparkContext.SparkContext.._conf ( )  :  SparkConf
SparkContext.SparkContext.._dagScheduler ( )  :  scheduler.DAGScheduler
SparkContext.SparkContext.._env ( )  :  SparkEnv
SparkContext.SparkContext.._eventLogger ( )  :  scala.Option<scheduler.EventLoggingListener>
SparkContext.SparkContext.._executorAllocationManager ( )  :  scala.Option<ExecutorAllocationManager>
SparkContext.SparkContext.._heartbeatReceiver ( )  :  rpc.RpcEndpointRef
SparkContext.SparkContext.._listenerBusStarted_.eq ( boolean p1 )  :  void
SparkContext.SparkContext.._progressBar ( )  :  scala.Option<ui.ConsoleProgressBar>
SparkContext.SparkContext.._ui ( )  :  scala.Option<ui.SparkUI>
SparkContext.SparkContext..assertNotStopped ( )  :  void
SparkContext.SparkContext..creationSite ( )  :  util.CallSite
SparkContext.SparkContext..postApplicationEnd ( )  :  void
SparkContext.progressBar ( )  :  scala.Option<ui.ConsoleProgressBar>
SparkContext.range ( long start, long end, long step, int numSlices )  :  rdd.RDD<Object>
SparkContext.requestExecutors ( int numAdditionalExecutors )  :  boolean
SparkContext.requestTotalExecutors ( int numExecutors, int localityAwareTasks, scala.collection.immutable.Map<String,Object> hostToLocalTaskCount )  :  boolean
SparkContext.runJob ( rdd.RDD<T> rdd, scala.Function1<scala.collection.Iterator<T>,U> func, scala.collection.Seq<Object> partitions, scala.reflect.ClassTag<U> p4 )  :  Object
SparkContext.runJob ( rdd.RDD<T> rdd, scala.Function2<TaskContext,scala.collection.Iterator<T>,U> func, scala.collection.Seq<Object> partitions, scala.Function2<Object,U,scala.runtime.BoxedUnit> resultHandler, scala.reflect.ClassTag<U> p5 )  :  void
SparkContext.runJob ( rdd.RDD<T> rdd, scala.Function2<TaskContext,scala.collection.Iterator<T>,U> func, scala.collection.Seq<Object> partitions, scala.reflect.ClassTag<U> p4 )  :  Object
SparkContext.schedulerBackend ( )  :  scheduler.SchedulerBackend
SparkContext.schedulerBackend_.eq ( scheduler.SchedulerBackend sb )  :  void
SparkContext.setCallSite ( util.CallSite callSite )  :  void
SparkContext.setLogLevel ( String logLevel )  :  void
SparkContext.statusTracker ( )  :  SparkStatusTracker
SparkContext.ui ( )  :  scala.Option<ui.SparkUI>
SparkContext.withScope ( scala.Function0<U> body )  :  U

spark-core_2.10-1.5.0.jar, StorageLevel.class
package org.apache.spark.storage
StorageLevel.fromString ( String p1 ) [static]  :  StorageLevel
StorageLevel.StorageLevel.._deserialized_.eq ( boolean p1 )  :  void
StorageLevel.StorageLevel.._replication ( )  :  int
StorageLevel.StorageLevel.._replication_.eq ( int p1 )  :  void
StorageLevel.StorageLevel.._useDisk_.eq ( boolean p1 )  :  void
StorageLevel.StorageLevel.._useMemory_.eq ( boolean p1 )  :  void
StorageLevel.StorageLevel.._useOffHeap_.eq ( boolean p1 )  :  void

spark-core_2.10-1.5.0.jar, TaskContext.class
package org.apache.spark
TaskContext.addTaskCompletionListener ( util.TaskCompletionListener p1 ) [abstract]  :  TaskContext
TaskContext.addTaskCompletionListener ( scala.Function1<TaskContext,scala.runtime.BoxedUnit> p1 ) [abstract]  :  TaskContext
TaskContext.attemptNumber ( ) [abstract]  :  int
TaskContext.collectAccumulators ( ) [abstract]  :  scala.collection.immutable.Map<Object,Object>
TaskContext.collectInternalAccumulators ( ) [abstract]  :  scala.collection.immutable.Map<Object,Object>
TaskContext.get ( ) [static]  :  TaskContext
TaskContext.getMetricsSources ( String p1 ) [abstract]  :  scala.collection.Seq<metrics.source.Source>
TaskContext.getPartitionId ( ) [static]  :  int
TaskContext.internalMetricsToAccumulators ( ) [abstract]  :  scala.collection.immutable.Map<String,Accumulator<Object>>
TaskContext.isCompleted ( ) [abstract]  :  boolean
TaskContext.isInterrupted ( ) [abstract]  :  boolean
TaskContext.isRunningLocally ( ) [abstract]  :  boolean
TaskContext.registerAccumulator ( Accumulable<?,?> p1 ) [abstract]  :  void
TaskContext.taskAttemptId ( ) [abstract]  :  long
TaskContext.TaskContext ( )
TaskContext.taskMemoryManager ( ) [abstract]  :  unsafe.memory.TaskMemoryManager

spark-streaming_2.10-1.5.0.jar, JavaStreamingContext.class
package org.apache.spark.streaming.api.java
JavaStreamingContext.awaitTerminationOrTimeout ( long timeout )  :  boolean
JavaStreamingContext.binaryRecordsStream ( String directory, int recordLength )  :  JavaDStream<byte[ ]>
JavaStreamingContext.close ( )  :  void
JavaStreamingContext.fileStream ( String directory, Class<K> kClass, Class<V> vClass, Class<F> fClass )  :  JavaPairInputDStream<K,V>
JavaStreamingContext.fileStream ( String directory, Class<K> kClass, Class<V> vClass, Class<F> fClass, org.apache.spark.api.java.function.Function<org.apache.hadoop.fs.Path,Boolean> filter, boolean newFilesOnly )  :  JavaPairInputDStream<K,V>
JavaStreamingContext.fileStream ( String directory, Class<K> kClass, Class<V> vClass, Class<F> fClass, org.apache.spark.api.java.function.Function<org.apache.hadoop.fs.Path,Boolean> filter, boolean newFilesOnly, org.apache.hadoop.conf.Configuration conf )  :  JavaPairInputDStream<K,V>
JavaStreamingContext.getOrCreate ( String p1, org.apache.spark.api.java.function.Function0<JavaStreamingContext> p2 ) [static]  :  JavaStreamingContext
JavaStreamingContext.getOrCreate ( String p1, org.apache.spark.api.java.function.Function0<JavaStreamingContext> p2, org.apache.hadoop.conf.Configuration p3 ) [static]  :  JavaStreamingContext
JavaStreamingContext.getOrCreate ( String p1, org.apache.spark.api.java.function.Function0<JavaStreamingContext> p2, org.apache.hadoop.conf.Configuration p3, boolean p4 ) [static]  :  JavaStreamingContext
JavaStreamingContext.getState ( )  :  org.apache.spark.streaming.StreamingContextState

spark-streaming_2.10-1.5.0.jar, StreamingContext.class
package org.apache.spark.streaming
StreamingContext.awaitTerminationOrTimeout ( long timeout )  :  boolean
StreamingContext.binaryRecordsStream ( String directory, int recordLength )  :  dstream.DStream<byte[ ]>
StreamingContext.fileStream ( String directory, scala.Function1<org.apache.hadoop.fs.Path,Object> filter, boolean newFilesOnly, org.apache.hadoop.conf.Configuration conf, scala.reflect.ClassTag<K> p5, scala.reflect.ClassTag<V> p6, scala.reflect.ClassTag<F> p7 )  :  dstream.InputDStream<scala.Tuple2<K,V>>
StreamingContext.getActive ( ) [static]  :  scala.Option<StreamingContext>
StreamingContext.getActiveOrCreate ( scala.Function0<StreamingContext> p1 ) [static]  :  StreamingContext
StreamingContext.getActiveOrCreate ( String p1, scala.Function0<StreamingContext> p2, org.apache.hadoop.conf.Configuration p3, boolean p4 ) [static]  :  StreamingContext
StreamingContext.getNewInputStreamId ( )  :  int
StreamingContext.getState ( )  :  StreamingContextState
StreamingContext.isCheckpointingEnabled ( )  :  boolean
StreamingContext.logName ( )  :  String
StreamingContext.StreamingContext..startSite ( )  :  java.util.concurrent.atomic.AtomicReference<org.apache.spark.util.CallSite>
StreamingContext.StreamingContext..stopOnShutdown ( )  :  void
StreamingContext.progressListener ( )  :  ui.StreamingJobProgressListener
StreamingContext.StreamingContext ( String path )
StreamingContext.StreamingContext ( String path, org.apache.spark.SparkContext sparkContext )
StreamingContext.uiTab ( )  :  scala.Option<ui.StreamingTab>
StreamingContext.withNamedScope ( String name, scala.Function0<U> body )  :  U
StreamingContext.withScope ( scala.Function0<U> body )  :  U

to the top

Problems with Data Types, High Severity (24)


spark-core_2.10-1.5.0.jar
package org.apache.spark
[+] Logging (1)
[+] SparkContext (1)
[+] TaskContext (14)

package org.apache.spark.api.java
[+] JavaSparkContext (1)

package org.apache.spark.broadcast
[+] Broadcast<T> (1)

package org.apache.spark.rdd
[+] PairRDDFunctions<K,V> (1)

package org.apache.spark.serializer
[+] DeserializationStream (1)
[+] SerializationStream (1)
[+] Serializer (1)
[+] SerializerInstance (1)

spark-streaming_2.10-1.5.0.jar
package org.apache.spark.streaming.api.java
[+] JavaStreamingContext (1)

to the top

Problems with Data Types, Medium Severity (6)


spark-core_2.10-1.5.0.jar
package org.apache.spark.api.java
[+] JavaDoubleRDD (1)
[+] JavaPairRDD<K,V> (1)
[+] JavaRDD<T> (1)

package org.apache.spark.scheduler
[+] LiveListenerBus (1)

spark-streaming_2.10-1.5.0.jar
package org.apache.spark.streaming.api.java
[+] JavaDStream<T> (1)
[+] JavaPairDStream<K,V> (1)

to the top

Problems with Data Types, Low Severity (3)


spark-core_2.10-1.5.0.jar
package org.apache.spark
[+] TaskContext (3)

to the top

Problems with Methods, Low Severity (3)


spark-core_2.10-1.5.0.jar, TaskContext
package org.apache.spark
[+] TaskContext.partitionId ( ) [abstract]  :  int (1)
[+] TaskContext.stageId ( ) [abstract]  :  int (1)
[+] TaskContext.taskMetrics ( ) [abstract]  :  executor.TaskMetrics (1)

to the top

Java ARchives (4)


spark-core_2.10-1.5.0.jar
spark-hive_2.10-1.5.0.jar
spark-sql_2.10-1.5.0.jar
spark-streaming_2.10-1.5.0.jar

to the top




Generated on Wed Dec 9 07:38:56 2015 for infinispan-spark_2.10-0.2 by Java API Compliance Checker 1.4.1  
A tool for checking backward compatibility of a Java library API