Binary compatibility report for the infinispan-spark_2.10-0.1 library  between 1.4.0 and 1.2.0 versions   (relating to the portability of client application infinispan-spark_2.10-0.1.jar)

Test Info


Library Nameinfinispan-spark_2.10-0.1
Version #11.4.0
Version #21.2.0
Java Version1.7.0_75

Test Results


Total Java ARchives3
Total Methods / Classes485 / 2720
VerdictIncompatible
(51%)

Problem Summary


SeverityCount
Added Methods-5
Removed MethodsHigh48
Problems with
Data Types
High5
Medium6
Low0
Problems with
Methods
High0
Medium0
Low0

Added Methods (5)


spark-streaming_2.10-1.2.0.jar, JavaStreamingContext.class
package org.apache.spark.streaming.api.java
JavaStreamingContext.fileStream ( String directory )  :  JavaPairInputDStream<K,V>

spark-streaming_2.10-1.2.0.jar, StreamingContext.class
package org.apache.spark.streaming
StreamingContext.getNewReceiverStreamId ( )  :  int
StreamingContext.state ( )  :  scala.Enumeration.Value
StreamingContext.state_.eq ( scala.Enumeration.Value p1 )  :  void
StreamingContext.StreamingContextState ( )  :  StreamingContext.StreamingContextState.

to the top

Removed Methods (48)


spark-core_2.10-1.4.0.jar, DeserializationStream.class
package org.apache.spark.serializer
DeserializationStream.asKeyValueIterator ( )  :  scala.collection.Iterator<scala.Tuple2<Object,Object>>
DeserializationStream.readKey ( scala.reflect.ClassTag<T> p1 )  :  T
DeserializationStream.readValue ( scala.reflect.ClassTag<T> p1 )  :  T

spark-core_2.10-1.4.0.jar, JavaSparkContext.class
package org.apache.spark.api.java
JavaSparkContext.setLogLevel ( String logLevel )  :  void

spark-core_2.10-1.4.0.jar, SerializationStream.class
package org.apache.spark.serializer
SerializationStream.writeKey ( T key, scala.reflect.ClassTag<T> p2 )  :  SerializationStream
SerializationStream.writeValue ( T value, scala.reflect.ClassTag<T> p2 )  :  SerializationStream

spark-core_2.10-1.4.0.jar, Serializer.class
package org.apache.spark.serializer
Serializer.supportsRelocationOfSerializedObjects ( )  :  boolean

spark-core_2.10-1.4.0.jar, SparkContext.class
package org.apache.spark
SparkContext.addFile ( String path, boolean recursive )  :  void
SparkContext.applicationAttemptId ( )  :  scala.Option<String>
SparkContext.createSparkEnv ( SparkConf conf, boolean isLocal, scheduler.LiveListenerBus listenerBus )  :  SparkEnv
SparkContext.eventLogCodec ( )  :  scala.Option<String>
SparkContext.externalBlockStoreFolderName ( )  :  String
SparkContext.getOrCreate ( ) [static]  :  SparkContext
SparkContext.getOrCreate ( SparkConf p1 ) [static]  :  SparkContext
SparkContext.SparkContext.._conf ( )  :  SparkConf
SparkContext.SparkContext.._env ( )  :  SparkEnv
SparkContext.SparkContext..assertNotStopped ( )  :  void
SparkContext.range ( long start, long end, long step, int numSlices )  :  rdd.RDD<Object>
SparkContext.requestTotalExecutors ( int numExecutors )  :  boolean
SparkContext.setLogLevel ( String logLevel )  :  void
SparkContext.supportDynamicAllocation ( )  :  boolean
SparkContext.withScope ( scala.Function0<U> body )  :  U

spark-core_2.10-1.4.0.jar, TaskContext.class
package org.apache.spark
TaskContext.attemptNumber ( ) [abstract]  :  int
TaskContext.taskAttemptId ( ) [abstract]  :  long
TaskContext.taskMemoryManager ( ) [abstract]  :  unsafe.memory.TaskMemoryManager

spark-streaming_2.10-1.4.0.jar, JavaStreamingContext.class
package org.apache.spark.streaming.api.java
JavaStreamingContext.awaitTerminationOrTimeout ( long timeout )  :  boolean
JavaStreamingContext.binaryRecordsStream ( String directory, int recordLength )  :  JavaDStream<byte[ ]>
JavaStreamingContext.fileStream ( String directory, Class<K> kClass, Class<V> vClass, Class<F> fClass )  :  JavaPairInputDStream<K,V>
JavaStreamingContext.fileStream ( String directory, Class<K> kClass, Class<V> vClass, Class<F> fClass, org.apache.spark.api.java.function.Function<org.apache.hadoop.fs.Path,Boolean> filter, boolean newFilesOnly )  :  JavaPairInputDStream<K,V>
JavaStreamingContext.fileStream ( String directory, Class<K> kClass, Class<V> vClass, Class<F> fClass, org.apache.spark.api.java.function.Function<org.apache.hadoop.fs.Path,Boolean> filter, boolean newFilesOnly, org.apache.hadoop.conf.Configuration conf )  :  JavaPairInputDStream<K,V>
JavaStreamingContext.getOrCreate ( String p1, org.apache.spark.api.java.function.Function0<JavaStreamingContext> p2 ) [static]  :  JavaStreamingContext
JavaStreamingContext.getOrCreate ( String p1, org.apache.spark.api.java.function.Function0<JavaStreamingContext> p2, org.apache.hadoop.conf.Configuration p3 ) [static]  :  JavaStreamingContext
JavaStreamingContext.getOrCreate ( String p1, org.apache.spark.api.java.function.Function0<JavaStreamingContext> p2, org.apache.hadoop.conf.Configuration p3, boolean p4 ) [static]  :  JavaStreamingContext
JavaStreamingContext.getState ( )  :  org.apache.spark.streaming.StreamingContextState

spark-streaming_2.10-1.4.0.jar, StreamingContext.class
package org.apache.spark.streaming
StreamingContext.awaitTerminationOrTimeout ( long timeout )  :  boolean
StreamingContext.binaryRecordsStream ( String directory, int recordLength )  :  dstream.DStream<byte[ ]>
StreamingContext.fileStream ( String directory, scala.Function1<org.apache.hadoop.fs.Path,Object> filter, boolean newFilesOnly, org.apache.hadoop.conf.Configuration conf, scala.reflect.ClassTag<K> p5, scala.reflect.ClassTag<V> p6, scala.reflect.ClassTag<F> p7 )  :  dstream.InputDStream<scala.Tuple2<K,V>>
StreamingContext.getActive ( ) [static]  :  scala.Option<StreamingContext>
StreamingContext.getActiveOrCreate ( scala.Function0<StreamingContext> p1 ) [static]  :  StreamingContext
StreamingContext.getActiveOrCreate ( String p1, scala.Function0<StreamingContext> p2, org.apache.hadoop.conf.Configuration p3, boolean p4 ) [static]  :  StreamingContext
StreamingContext.getNewInputStreamId ( )  :  int
StreamingContext.getState ( )  :  StreamingContextState
StreamingContext.isCheckpointingEnabled ( )  :  boolean
StreamingContext.StreamingContext..startSite ( )  :  java.util.concurrent.atomic.AtomicReference<org.apache.spark.util.CallSite>
StreamingContext.StreamingContext..stopOnShutdown ( )  :  void
StreamingContext.StreamingContext ( String path, org.apache.spark.SparkContext sparkContext )
StreamingContext.withNamedScope ( String name, scala.Function0<U> body )  :  U
StreamingContext.withScope ( scala.Function0<U> body )  :  U

to the top

Problems with Data Types, High Severity (5)


spark-core_2.10-1.4.0.jar
package org.apache.spark
[+] SparkContext (1)
[+] TaskContext (3)

spark-streaming_2.10-1.4.0.jar
package org.apache.spark.streaming.ui
[+] StreamingJobProgressListener (1)

to the top

Problems with Data Types, Medium Severity (6)


spark-core_2.10-1.4.0.jar
package org.apache.spark.api.java
[+] JavaDoubleRDD (1)
[+] JavaPairRDD<K,V> (1)
[+] JavaRDD<T> (1)

package org.apache.spark.scheduler
[+] LiveListenerBus (1)

spark-streaming_2.10-1.4.0.jar
package org.apache.spark.streaming.api.java
[+] JavaDStream<T> (1)
[+] JavaPairDStream<K,V> (1)

to the top

Java ARchives (3)


spark-core_2.10-1.4.0.jar
spark-sql_2.10-1.4.0.jar
spark-streaming_2.10-1.4.0.jar

to the top




Generated on Thu Aug 13 09:47:16 2015 for infinispan-spark_2.10-0.1 by Java API Compliance Checker 1.4.1  
A tool for checking backward compatibility of a Java library API