Binary compatibility report for the spark-testing-base_2.10-1.5.1_0.2.0 library between 1.5.0 and 1.3.0 versions (relating to the portability of client application spark-testing-base_2.10-1.5.1_0.2.0.jar)
Test Info
Library Name | spark-testing-base_2.10-1.5.1_0.2.0 |
Version #1 | 1.5.0 |
Version #2 | 1.3.0 |
Java Version | 1.7.0_75 |
Test Results
Total Java ARchives | 7 |
---|
Total Methods / Classes | 1258 / 4156 |
---|
Verdict | Incompatible (17.6%) |
Problem Summary
| Severity | Count |
---|
Added Methods | - | 23 |
---|
Removed Methods | High | 174 |
---|
Problems with Data Types | High | 10 |
---|
Medium | 3 |
Low | 1 |
Problems with Methods | High | 0 |
---|
Medium | 0 |
Low | 0 |
Added Methods (23)
spark-core_2.10-1.3.0.jar, RDD<T>.class
package org.apache.spark.rdd
RDD<T>.markCheckpointed ( RDD<?> checkpointRDD ) : void
[mangled: org/apache/spark/rdd/RDD<T>.markCheckpointed:(Lorg/apache/spark/rdd/RDD;)V]
spark-core_2.10-1.3.0.jar, SparkConf.class
package org.apache.spark
SparkConf.translateConfKey ( String p1, boolean p2 ) [static] : String
[mangled: org/apache/spark/SparkConf.translateConfKey:(Ljava/lang/String;Z)Ljava/lang/String;]
spark-core_2.10-1.3.0.jar, SparkContext.class
package org.apache.spark
SparkContext.requestTotalExecutors ( int numExecutors ) : boolean
[mangled: org/apache/spark/SparkContext.requestTotalExecutors:(I)Z]
spark-sql_2.10-1.3.0.jar, DataFrame.class
package org.apache.spark.sql
DataFrame.cache ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.cache:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.collect ( ) : Object
[mangled: org/apache/spark/sql/DataFrame.collect:()Ljava/lang/Object;]
DataFrame.first ( ) : Object
[mangled: org/apache/spark/sql/DataFrame.first:()Ljava/lang/Object;]
DataFrame.persist ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.persist:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.persist ( org.apache.spark.storage.StorageLevel newLevel ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/RDDApi;]
DataFrame.showString ( int numRows ) : String
[mangled: org/apache/spark/sql/DataFrame.showString:(I)Ljava/lang/String;]
DataFrame.take ( int n ) : Object
[mangled: org/apache/spark/sql/DataFrame.take:(I)Ljava/lang/Object;]
DataFrame.unpersist ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.unpersist:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.unpersist ( boolean blocking ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.unpersist:(Z)Lorg/apache/spark/sql/RDDApi;]
spark-sql_2.10-1.3.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.cacheManager ( ) : CacheManager
[mangled: org/apache/spark/sql/SQLContext.cacheManager:()Lorg/apache/spark/sql/CacheManager;]
SQLContext.checkAnalysis ( ) : catalyst.analysis.CheckAnalysis
[mangled: org/apache/spark/sql/SQLContext.checkAnalysis:()Lorg/apache/spark/sql/catalyst/analysis/CheckAnalysis;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, java.util.List<String> columns ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/util/List;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.ddlParser ( ) : sources.DDLParser
[mangled: org/apache/spark/sql/SQLContext.ddlParser:()Lorg/apache/spark/sql/sources/DDLParser;]
spark-streaming_2.10-1.3.0.jar, Checkpoint.class
package org.apache.spark.streaming
Checkpoint.getCheckpointFiles ( String p1, org.apache.hadoop.fs.FileSystem p2 ) [static] : scala.collection.Seq<org.apache.hadoop.fs.Path>
[mangled: org/apache/spark/streaming/Checkpoint.getCheckpointFiles:(Ljava/lang/String;Lorg/apache/hadoop/fs/FileSystem;)Lscala/collection/Seq;]
Checkpoint.sparkConf ( ) : org.apache.spark.SparkConf
[mangled: org/apache/spark/streaming/Checkpoint.sparkConf:()Lorg/apache/spark/SparkConf;]
spark-streaming_2.10-1.3.0.jar, DStream<T>.class
package org.apache.spark.streaming.dstream
DStream<T>.validate ( ) : void
[mangled: org/apache/spark/streaming/dstream/DStream<T>.validate:()V]
spark-streaming_2.10-1.3.0.jar, StreamingContext.class
package org.apache.spark.streaming
StreamingContext.getNewReceiverStreamId ( ) : int
[mangled: org/apache/spark/streaming/StreamingContext.getNewReceiverStreamId:()I]
StreamingContext.state ( ) : scala.Enumeration.Value
[mangled: org/apache/spark/streaming/StreamingContext.state:()Lscala/Enumeration$Value;]
StreamingContext.state_.eq ( scala.Enumeration.Value p1 ) : void
[mangled: org/apache/spark/streaming/StreamingContext.state_.eq:(Lscala/Enumeration$Value;)V]
StreamingContext.StreamingContextState ( ) : StreamingContext.StreamingContextState.
[mangled: org/apache/spark/streaming/StreamingContext.StreamingContextState:()Lorg/apache/spark/streaming/StreamingContext$StreamingContextState$;]
to the top
Removed Methods (174)
spark-catalyst_2.10-1.5.0.jar, Row.class
package org.apache.spark.sql
Row.fieldIndex ( String p1 ) [abstract] : int
[mangled: org/apache/spark/sql/Row.fieldIndex:(Ljava/lang/String;)I]
Row.getAs ( String p1 ) [abstract] : T
[mangled: org/apache/spark/sql/Row.getAs:(Ljava/lang/String;)Ljava/lang/Object;]
Row.getTimestamp ( int p1 ) [abstract] : java.sql.Timestamp
[mangled: org/apache/spark/sql/Row.getTimestamp:(I)Ljava/sql/Timestamp;]
Row.getValuesMap ( scala.collection.Seq<String> p1 ) [abstract] : scala.collection.immutable.Map<String,T>
[mangled: org/apache/spark/sql/Row.getValuesMap:(Lscala/collection/Seq;)Lscala/collection/immutable/Map;]
spark-catalyst_2.10-1.5.0.jar, StructType.class
package org.apache.spark.sql.types
StructType.add ( StructField field ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Lorg/apache/spark/sql/types/StructField;)Lorg/apache/spark/sql/types/StructType;]
StructType.add ( String name, DataType dataType ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Ljava/lang/String;Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/types/StructType;]
StructType.add ( String name, DataType dataType, boolean nullable ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Ljava/lang/String;Lorg/apache/spark/sql/types/DataType;Z)Lorg/apache/spark/sql/types/StructType;]
StructType.add ( String name, DataType dataType, boolean nullable, Metadata metadata ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Ljava/lang/String;Lorg/apache/spark/sql/types/DataType;ZLorg/apache/spark/sql/types/Metadata;)Lorg/apache/spark/sql/types/StructType;]
StructType.add ( String name, String dataType ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/types/StructType;]
StructType.add ( String name, String dataType, boolean nullable ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Ljava/lang/String;Ljava/lang/String;Z)Lorg/apache/spark/sql/types/StructType;]
StructType.add ( String name, String dataType, boolean nullable, Metadata metadata ) : StructType
[mangled: org/apache/spark/sql/types/StructType.add:(Ljava/lang/String;Ljava/lang/String;ZLorg/apache/spark/sql/types/Metadata;)Lorg/apache/spark/sql/types/StructType;]
StructType.existsRecursively ( scala.Function1<DataType,Object> f ) : boolean
[mangled: org/apache/spark/sql/types/StructType.existsRecursively:(Lscala/Function1;)Z]
StructType.fieldIndex ( String name ) : int
[mangled: org/apache/spark/sql/types/StructType.fieldIndex:(Ljava/lang/String;)I]
StructType.getFieldIndex ( String name ) : scala.Option<Object>
[mangled: org/apache/spark/sql/types/StructType.getFieldIndex:(Ljava/lang/String;)Lscala/Option;]
StructType.interpretedOrdering ( ) : org.apache.spark.sql.catalyst.expressions.InterpretedOrdering
[mangled: org/apache/spark/sql/types/StructType.interpretedOrdering:()Lorg/apache/spark/sql/catalyst/expressions/InterpretedOrdering;]
StructType.StructType ( )
[mangled: org/apache/spark/sql/types/StructType."<init>":()V]
spark-core_2.10-1.5.0.jar, RDD<T>.class
package org.apache.spark.rdd
RDD<T>.isLocallyCheckpointed ( ) : boolean
[mangled: org/apache/spark/rdd/RDD<T>.isLocallyCheckpointed:()Z]
RDD<T>.localCheckpoint ( ) : RDD<T>
[mangled: org/apache/spark/rdd/RDD<T>.localCheckpoint:()Lorg/apache/spark/rdd/RDD;]
RDD<T>.markCheckpointed ( ) : void
[mangled: org/apache/spark/rdd/RDD<T>.markCheckpointed:()V]
RDD<T>.RDD..doCheckpointCalled ( ) : boolean
[mangled: org/apache/spark/rdd/RDD<T>.org.apache.spark.rdd.RDD..doCheckpointCalled:()Z]
RDD<T>.RDD..doCheckpointCalled_.eq ( boolean p1 ) : void
[mangled: org/apache/spark/rdd/RDD<T>.org.apache.spark.rdd.RDD..doCheckpointCalled_.eq:(Z)V]
RDD<T>.RDD..sc ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/rdd/RDD<T>.org.apache.spark.rdd.RDD..sc:()Lorg/apache/spark/SparkContext;]
RDD<T>.randomSampleWithRange ( double lb, double ub, long seed ) : RDD<T>
[mangled: org/apache/spark/rdd/RDD<T>.randomSampleWithRange:(DDJ)Lorg/apache/spark/rdd/RDD;]
RDD<T>.scope ( ) : scala.Option<RDDOperationScope>
[mangled: org/apache/spark/rdd/RDD<T>.scope:()Lscala/Option;]
RDD<T>.withScope ( scala.Function0<U> body ) : U
[mangled: org/apache/spark/rdd/RDD<T>.withScope:(Lscala/Function0;)Ljava/lang/Object;]
spark-core_2.10-1.5.0.jar, SparkConf.class
package org.apache.spark
SparkConf.getAvroSchema ( ) : scala.collection.immutable.Map<Object,String>
[mangled: org/apache/spark/SparkConf.getAvroSchema:()Lscala/collection/immutable/Map;]
SparkConf.getDeprecatedConfig ( String p1, SparkConf p2 ) [static] : scala.Option<String>
[mangled: org/apache/spark/SparkConf.getDeprecatedConfig:(Ljava/lang/String;Lorg/apache/spark/SparkConf;)Lscala/Option;]
SparkConf.getSizeAsBytes ( String key ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsBytes:(Ljava/lang/String;)J]
SparkConf.getSizeAsBytes ( String key, long defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsBytes:(Ljava/lang/String;J)J]
SparkConf.getSizeAsBytes ( String key, String defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsBytes:(Ljava/lang/String;Ljava/lang/String;)J]
SparkConf.getSizeAsGb ( String key ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsGb:(Ljava/lang/String;)J]
SparkConf.getSizeAsGb ( String key, String defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsGb:(Ljava/lang/String;Ljava/lang/String;)J]
SparkConf.getSizeAsKb ( String key ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsKb:(Ljava/lang/String;)J]
SparkConf.getSizeAsKb ( String key, String defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsKb:(Ljava/lang/String;Ljava/lang/String;)J]
SparkConf.getSizeAsMb ( String key ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsMb:(Ljava/lang/String;)J]
SparkConf.getSizeAsMb ( String key, String defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getSizeAsMb:(Ljava/lang/String;Ljava/lang/String;)J]
SparkConf.getTimeAsMs ( String key ) : long
[mangled: org/apache/spark/SparkConf.getTimeAsMs:(Ljava/lang/String;)J]
SparkConf.getTimeAsMs ( String key, String defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getTimeAsMs:(Ljava/lang/String;Ljava/lang/String;)J]
SparkConf.getTimeAsSeconds ( String key ) : long
[mangled: org/apache/spark/SparkConf.getTimeAsSeconds:(Ljava/lang/String;)J]
SparkConf.getTimeAsSeconds ( String key, String defaultValue ) : long
[mangled: org/apache/spark/SparkConf.getTimeAsSeconds:(Ljava/lang/String;Ljava/lang/String;)J]
SparkConf.logDeprecationWarning ( String p1 ) [static] : void
[mangled: org/apache/spark/SparkConf.logDeprecationWarning:(Ljava/lang/String;)V]
SparkConf.registerAvroSchemas ( scala.collection.Seq<org.apache.avro.Schema> schemas ) : SparkConf
[mangled: org/apache/spark/SparkConf.registerAvroSchemas:(Lscala/collection/Seq;)Lorg/apache/spark/SparkConf;]
spark-core_2.10-1.5.0.jar, SparkContext.class
package org.apache.spark
SparkContext.applicationAttemptId ( ) : scala.Option<String>
[mangled: org/apache/spark/SparkContext.applicationAttemptId:()Lscala/Option;]
SparkContext.externalBlockStoreFolderName ( ) : String
[mangled: org/apache/spark/SparkContext.externalBlockStoreFolderName:()Ljava/lang/String;]
SparkContext.getOrCreate ( ) [static] : SparkContext
[mangled: org/apache/spark/SparkContext.getOrCreate:()Lorg/apache/spark/SparkContext;]
SparkContext.getOrCreate ( SparkConf p1 ) [static] : SparkContext
[mangled: org/apache/spark/SparkContext.getOrCreate:(Lorg/apache/spark/SparkConf;)Lorg/apache/spark/SparkContext;]
SparkContext.killAndReplaceExecutor ( String executorId ) : boolean
[mangled: org/apache/spark/SparkContext.killAndReplaceExecutor:(Ljava/lang/String;)Z]
SparkContext.SparkContext.._cleaner ( ) : scala.Option<ContextCleaner>
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._cleaner:()Lscala/Option;]
SparkContext.SparkContext.._conf ( ) : SparkConf
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._conf:()Lorg/apache/spark/SparkConf;]
SparkContext.SparkContext.._dagScheduler ( ) : scheduler.DAGScheduler
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._dagScheduler:()Lorg/apache/spark/scheduler/DAGScheduler;]
SparkContext.SparkContext.._env ( ) : SparkEnv
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._env:()Lorg/apache/spark/SparkEnv;]
SparkContext.SparkContext.._eventLogger ( ) : scala.Option<scheduler.EventLoggingListener>
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._eventLogger:()Lscala/Option;]
SparkContext.SparkContext.._executorAllocationManager ( ) : scala.Option<ExecutorAllocationManager>
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._executorAllocationManager:()Lscala/Option;]
SparkContext.SparkContext.._heartbeatReceiver ( ) : rpc.RpcEndpointRef
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._heartbeatReceiver:()Lorg/apache/spark/rpc/RpcEndpointRef;]
SparkContext.SparkContext.._listenerBusStarted_.eq ( boolean p1 ) : void
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._listenerBusStarted_.eq:(Z)V]
SparkContext.SparkContext.._progressBar ( ) : scala.Option<ui.ConsoleProgressBar>
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._progressBar:()Lscala/Option;]
SparkContext.SparkContext.._ui ( ) : scala.Option<ui.SparkUI>
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext.._ui:()Lscala/Option;]
SparkContext.SparkContext..assertNotStopped ( ) : void
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext..assertNotStopped:()V]
SparkContext.SparkContext..postApplicationEnd ( ) : void
[mangled: org/apache/spark/SparkContext.org.apache.spark.SparkContext..postApplicationEnd:()V]
SparkContext.range ( long start, long end, long step, int numSlices ) : rdd.RDD<Object>
[mangled: org/apache/spark/SparkContext.range:(JJJI)Lorg/apache/spark/rdd/RDD;]
SparkContext.requestTotalExecutors ( int numExecutors, int localityAwareTasks, scala.collection.immutable.Map<String,Object> hostToLocalTaskCount ) : boolean
[mangled: org/apache/spark/SparkContext.requestTotalExecutors:(IILscala/collection/immutable/Map;)Z]
SparkContext.runJob ( rdd.RDD<T> rdd, scala.Function1<scala.collection.Iterator<T>,U> func, scala.collection.Seq<Object> partitions, scala.reflect.ClassTag<U> p4 ) : Object
[mangled: org/apache/spark/SparkContext.runJob:(Lorg/apache/spark/rdd/RDD;Lscala/Function1;Lscala/collection/Seq;Lscala/reflect/ClassTag;)Ljava/lang/Object;]
SparkContext.runJob ( rdd.RDD<T> rdd, scala.Function2<TaskContext,scala.collection.Iterator<T>,U> func, scala.collection.Seq<Object> partitions, scala.Function2<Object,U,scala.runtime.BoxedUnit> resultHandler, scala.reflect.ClassTag<U> p5 ) : void
[mangled: org/apache/spark/SparkContext.runJob:(Lorg/apache/spark/rdd/RDD;Lscala/Function2;Lscala/collection/Seq;Lscala/Function2;Lscala/reflect/ClassTag;)V]
SparkContext.runJob ( rdd.RDD<T> rdd, scala.Function2<TaskContext,scala.collection.Iterator<T>,U> func, scala.collection.Seq<Object> partitions, scala.reflect.ClassTag<U> p4 ) : Object
[mangled: org/apache/spark/SparkContext.runJob:(Lorg/apache/spark/rdd/RDD;Lscala/Function2;Lscala/collection/Seq;Lscala/reflect/ClassTag;)Ljava/lang/Object;]
SparkContext.setLogLevel ( String logLevel ) : void
[mangled: org/apache/spark/SparkContext.setLogLevel:(Ljava/lang/String;)V]
SparkContext.withScope ( scala.Function0<U> body ) : U
[mangled: org/apache/spark/SparkContext.withScope:(Lscala/Function0;)Ljava/lang/Object;]
spark-sql_2.10-1.5.0.jar, DataFrame.class
package org.apache.spark.sql
DataFrame.coalesce ( int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.coalesce:(I)Lorg/apache/spark/sql/DataFrame;]
DataFrame.cube ( Column... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.cube:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.cube ( scala.collection.Seq<Column> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.cube:(Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.cube ( String col1, scala.collection.Seq<String> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.cube:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.cube ( String col1, String... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.cube:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.describe ( scala.collection.Seq<String> cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.describe:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.describe ( String... cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.describe:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.drop ( Column col ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.drop:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.drop ( String colName ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.drop:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.dropDuplicates ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.dropDuplicates:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.dropDuplicates ( scala.collection.Seq<String> colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.dropDuplicates:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.dropDuplicates ( String[ ] colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.dropDuplicates:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.inputFiles ( ) : String[ ]
[mangled: org/apache/spark/sql/DataFrame.inputFiles:()[Ljava/lang/String;]
DataFrame.join ( DataFrame right, scala.collection.Seq<String> usingColumns ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.join ( DataFrame right, String usingColumn ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.na ( ) : DataFrameNaFunctions
[mangled: org/apache/spark/sql/DataFrame.na:()Lorg/apache/spark/sql/DataFrameNaFunctions;]
DataFrame.DataFrame..logicalPlanToDataFrame ( catalyst.plans.logical.LogicalPlan logicalPlan ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.org.apache.spark.sql.DataFrame..logicalPlanToDataFrame:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.randomSplit ( double[ ] weights ) : DataFrame[ ]
[mangled: org/apache/spark/sql/DataFrame.randomSplit:([D)[Lorg/apache/spark/sql/DataFrame;]
DataFrame.randomSplit ( double[ ] weights, long seed ) : DataFrame[ ]
[mangled: org/apache/spark/sql/DataFrame.randomSplit:([DJ)[Lorg/apache/spark/sql/DataFrame;]
DataFrame.randomSplit ( scala.collection.immutable.List<Object> weights, long seed ) : DataFrame[ ]
[mangled: org/apache/spark/sql/DataFrame.randomSplit:(Lscala/collection/immutable/List;J)[Lorg/apache/spark/sql/DataFrame;]
DataFrame.rollup ( Column... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.rollup:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.rollup ( scala.collection.Seq<Column> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.rollup:(Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.rollup ( String col1, scala.collection.Seq<String> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.rollup:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.rollup ( String col1, String... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.rollup:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.show ( boolean truncate ) : void
[mangled: org/apache/spark/sql/DataFrame.show:(Z)V]
DataFrame.show ( int numRows, boolean truncate ) : void
[mangled: org/apache/spark/sql/DataFrame.show:(IZ)V]
DataFrame.showString ( int _numRows, boolean truncate ) : String
[mangled: org/apache/spark/sql/DataFrame.showString:(IZ)Ljava/lang/String;]
DataFrame.stat ( ) : DataFrameStatFunctions
[mangled: org/apache/spark/sql/DataFrame.stat:()Lorg/apache/spark/sql/DataFrameStatFunctions;]
DataFrame.where ( String conditionExpr ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.where:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.withNewExecutionId ( scala.Function0<T> body ) : T
[mangled: org/apache/spark/sql/DataFrame.withNewExecutionId:(Lscala/Function0;)Ljava/lang/Object;]
DataFrame.write ( ) : DataFrameWriter
[mangled: org/apache/spark/sql/DataFrame.write:()Lorg/apache/spark/sql/DataFrameWriter;]
spark-sql_2.10-1.5.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.cacheManager ( ) : execution.CacheManager
[mangled: org/apache/spark/sql/SQLContext.cacheManager:()Lorg/apache/spark/sql/execution/CacheManager;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<Row> rowRDD, types.StructType schema, boolean needsConversion ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;Z)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createSession ( ) : SQLContext.SQLSession
[mangled: org/apache/spark/sql/SQLContext.createSession:()Lorg/apache/spark/sql/SQLContext$SQLSession;]
SQLContext.currentSession ( ) : SQLContext.SQLSession
[mangled: org/apache/spark/sql/SQLContext.currentSession:()Lorg/apache/spark/sql/SQLContext$SQLSession;]
SQLContext.ddlParser ( ) : execution.datasources.DDLParser
[mangled: org/apache/spark/sql/SQLContext.ddlParser:()Lorg/apache/spark/sql/execution/datasources/DDLParser;]
SQLContext.defaultSession ( ) : SQLContext.SQLSession
[mangled: org/apache/spark/sql/SQLContext.defaultSession:()Lorg/apache/spark/sql/SQLContext$SQLSession;]
SQLContext.detachSession ( ) : void
[mangled: org/apache/spark/sql/SQLContext.detachSession:()V]
SQLContext.dialectClassName ( ) : String
[mangled: org/apache/spark/sql/SQLContext.dialectClassName:()Ljava/lang/String;]
SQLContext.getConf ( SQLConf.SQLConfEntry<T> entry ) : T
[mangled: org/apache/spark/sql/SQLContext.getConf:(Lorg/apache/spark/sql/SQLConf$SQLConfEntry;)Ljava/lang/Object;]
SQLContext.getConf ( SQLConf.SQLConfEntry<T> entry, T defaultValue ) : T
[mangled: org/apache/spark/sql/SQLContext.getConf:(Lorg/apache/spark/sql/SQLConf$SQLConfEntry;Ljava/lang/Object;)Ljava/lang/Object;]
SQLContext.getOrCreate ( org.apache.spark.SparkContext p1 ) [static] : SQLContext
[mangled: org/apache/spark/sql/SQLContext.getOrCreate:(Lorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/SQLContext;]
SQLContext.getSQLDialect ( ) : catalyst.ParserDialect
[mangled: org/apache/spark/sql/SQLContext.getSQLDialect:()Lorg/apache/spark/sql/catalyst/ParserDialect;]
SQLContext.internalCreateDataFrame ( org.apache.spark.rdd.RDD<catalyst.InternalRow> catalystRows, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.internalCreateDataFrame:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.listener ( ) : execution.ui.SQLListener
[mangled: org/apache/spark/sql/SQLContext.listener:()Lorg/apache/spark/sql/execution/ui/SQLListener;]
SQLContext.openSession ( ) : SQLContext.SQLSession
[mangled: org/apache/spark/sql/SQLContext.openSession:()Lorg/apache/spark/sql/SQLContext$SQLSession;]
SQLContext.range ( long end ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.range:(J)Lorg/apache/spark/sql/DataFrame;]
SQLContext.range ( long start, long end ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.range:(JJ)Lorg/apache/spark/sql/DataFrame;]
SQLContext.range ( long start, long end, long step, int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.range:(JJJI)Lorg/apache/spark/sql/DataFrame;]
SQLContext.read ( ) : DataFrameReader
[mangled: org/apache/spark/sql/SQLContext.read:()Lorg/apache/spark/sql/DataFrameReader;]
SQLContext.setConf ( SQLConf.SQLConfEntry<T> entry, T value ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Lorg/apache/spark/sql/SQLConf$SQLConfEntry;Ljava/lang/Object;)V]
SQLContext.setSession ( SQLContext.SQLSession session ) : void
[mangled: org/apache/spark/sql/SQLContext.setSession:(Lorg/apache/spark/sql/SQLContext$SQLSession;)V]
SQLContext.tlSession ( ) : ThreadLocal<SQLContext.SQLSession>
[mangled: org/apache/spark/sql/SQLContext.tlSession:()Ljava/lang/ThreadLocal;]
spark-streaming-kafka_2.10-1.5.0.jar, KafkaTestUtils.class
package org.apache.spark.streaming.kafka
KafkaTestUtils.brokerAddress ( ) : String
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.brokerAddress:()Ljava/lang/String;]
KafkaTestUtils.createTopic ( String topic ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.createTopic:(Ljava/lang/String;)V]
KafkaTestUtils.eventually ( org.apache.spark.streaming.Time timeout, org.apache.spark.streaming.Time interval, scala.Function0<T> func ) : T
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.eventually:(Lorg/apache/spark/streaming/Time;Lorg/apache/spark/streaming/Time;Lscala/Function0;)Ljava/lang/Object;]
KafkaTestUtils.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.isTraceEnabled:()Z]
KafkaTestUtils.KafkaTestUtils ( )
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils."<init>":()V]
KafkaTestUtils.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.log:()Lorg/slf4j/Logger;]
KafkaTestUtils.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logDebug:(Lscala/Function0;)V]
KafkaTestUtils.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
KafkaTestUtils.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logError:(Lscala/Function0;)V]
KafkaTestUtils.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
KafkaTestUtils.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logInfo:(Lscala/Function0;)V]
KafkaTestUtils.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
KafkaTestUtils.logName ( ) : String
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logName:()Ljava/lang/String;]
KafkaTestUtils.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logTrace:(Lscala/Function0;)V]
KafkaTestUtils.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
KafkaTestUtils.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logWarning:(Lscala/Function0;)V]
KafkaTestUtils.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
KafkaTestUtils.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
KafkaTestUtils.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
KafkaTestUtils.KafkaTestUtils..brokerConf ( ) : kafka.server.KafkaConfig
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.streaming.kafka.KafkaTestUtils..brokerConf:()Lkafka/server/KafkaConfig;]
KafkaTestUtils.KafkaTestUtils..brokerConf_.eq ( kafka.server.KafkaConfig p1 ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.streaming.kafka.KafkaTestUtils..brokerConf_.eq:(Lkafka/server/KafkaConfig;)V]
KafkaTestUtils.KafkaTestUtils..brokerConfiguration ( ) : java.util.Properties
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.streaming.kafka.KafkaTestUtils..brokerConfiguration:()Ljava/util/Properties;]
KafkaTestUtils.KafkaTestUtils..brokerPort_.eq ( int p1 ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.streaming.kafka.KafkaTestUtils..brokerPort_.eq:(I)V]
KafkaTestUtils.KafkaTestUtils..server ( ) : kafka.server.KafkaServer
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.streaming.kafka.KafkaTestUtils..server:()Lkafka/server/KafkaServer;]
KafkaTestUtils.KafkaTestUtils..server_.eq ( kafka.server.KafkaServer p1 ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.org.apache.spark.streaming.kafka.KafkaTestUtils..server_.eq:(Lkafka/server/KafkaServer;)V]
KafkaTestUtils.sendMessages ( String topic, java.util.Map<String,Integer> messageToFreq ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.sendMessages:(Ljava/lang/String;Ljava/util/Map;)V]
KafkaTestUtils.sendMessages ( String topic, scala.collection.immutable.Map<String,Object> messageToFreq ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.sendMessages:(Ljava/lang/String;Lscala/collection/immutable/Map;)V]
KafkaTestUtils.sendMessages ( String topic, String[ ] messages ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.sendMessages:(Ljava/lang/String;[Ljava/lang/String;)V]
KafkaTestUtils.setup ( ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.setup:()V]
KafkaTestUtils.teardown ( ) : void
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.teardown:()V]
KafkaTestUtils.zkAddress ( ) : String
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.zkAddress:()Ljava/lang/String;]
KafkaTestUtils.zookeeperClient ( ) : org.I0Itec.zkclient.ZkClient
[mangled: org/apache/spark/streaming/kafka/KafkaTestUtils.zookeeperClient:()Lorg/I0Itec/zkclient/ZkClient;]
spark-streaming_2.10-1.5.0.jar, Checkpoint.class
package org.apache.spark.streaming
Checkpoint.createSparkConf ( ) : org.apache.spark.SparkConf
[mangled: org/apache/spark/streaming/Checkpoint.createSparkConf:()Lorg/apache/spark/SparkConf;]
Checkpoint.deserialize ( java.io.InputStream p1, org.apache.spark.SparkConf p2 ) [static] : Checkpoint
[mangled: org/apache/spark/streaming/Checkpoint.deserialize:(Ljava/io/InputStream;Lorg/apache/spark/SparkConf;)Lorg/apache/spark/streaming/Checkpoint;]
Checkpoint.getCheckpointFiles ( String p1, scala.Option<org.apache.hadoop.fs.FileSystem> p2 ) [static] : scala.collection.Seq<org.apache.hadoop.fs.Path>
[mangled: org/apache/spark/streaming/Checkpoint.getCheckpointFiles:(Ljava/lang/String;Lscala/Option;)Lscala/collection/Seq;]
Checkpoint.serialize ( Checkpoint p1, org.apache.spark.SparkConf p2 ) [static] : byte[ ]
[mangled: org/apache/spark/streaming/Checkpoint.serialize:(Lorg/apache/spark/streaming/Checkpoint;Lorg/apache/spark/SparkConf;)[B]
spark-streaming_2.10-1.5.0.jar, DStream<T>.class
package org.apache.spark.streaming.dstream
DStream<T>.baseScope ( ) : scala.Option<String>
[mangled: org/apache/spark/streaming/dstream/DStream<T>.baseScope:()Lscala/Option;]
DStream<T>.createRDDWithLocalProperties ( org.apache.spark.streaming.Time time, scala.Function0<U> body ) : U
[mangled: org/apache/spark/streaming/dstream/DStream<T>.createRDDWithLocalProperties:(Lorg/apache/spark/streaming/Time;Lscala/Function0;)Ljava/lang/Object;]
DStream<T>.validateAtStart ( ) : void
[mangled: org/apache/spark/streaming/dstream/DStream<T>.validateAtStart:()V]
spark-streaming_2.10-1.5.0.jar, InputDStream<T>.class
package org.apache.spark.streaming.dstream
InputDStream<T>.baseScope ( ) : scala.Option<String>
[mangled: org/apache/spark/streaming/dstream/InputDStream<T>.baseScope:()Lscala/Option;]
InputDStream<T>.id ( ) : int
[mangled: org/apache/spark/streaming/dstream/InputDStream<T>.id:()I]
InputDStream<T>.name ( ) : String
[mangled: org/apache/spark/streaming/dstream/InputDStream<T>.name:()Ljava/lang/String;]
InputDStream<T>.rateController ( ) : scala.Option<org.apache.spark.streaming.scheduler.RateController>
[mangled: org/apache/spark/streaming/dstream/InputDStream<T>.rateController:()Lscala/Option;]
spark-streaming_2.10-1.5.0.jar, StreamingContext.class
package org.apache.spark.streaming
StreamingContext.getActive ( ) [static] : scala.Option<StreamingContext>
[mangled: org/apache/spark/streaming/StreamingContext.getActive:()Lscala/Option;]
StreamingContext.getActiveOrCreate ( scala.Function0<StreamingContext> p1 ) [static] : StreamingContext
[mangled: org/apache/spark/streaming/StreamingContext.getActiveOrCreate:(Lscala/Function0;)Lorg/apache/spark/streaming/StreamingContext;]
StreamingContext.getActiveOrCreate ( String p1, scala.Function0<StreamingContext> p2, org.apache.hadoop.conf.Configuration p3, boolean p4 ) [static] : StreamingContext
[mangled: org/apache/spark/streaming/StreamingContext.getActiveOrCreate:(Ljava/lang/String;Lscala/Function0;Lorg/apache/hadoop/conf/Configuration;Z)Lorg/apache/spark/streaming/StreamingContext;]
StreamingContext.getNewInputStreamId ( ) : int
[mangled: org/apache/spark/streaming/StreamingContext.getNewInputStreamId:()I]
StreamingContext.getState ( ) : StreamingContextState
[mangled: org/apache/spark/streaming/StreamingContext.getState:()Lorg/apache/spark/streaming/StreamingContextState;]
StreamingContext.isCheckpointingEnabled ( ) : boolean
[mangled: org/apache/spark/streaming/StreamingContext.isCheckpointingEnabled:()Z]
StreamingContext.StreamingContext..startSite ( ) : java.util.concurrent.atomic.AtomicReference<org.apache.spark.util.CallSite>
[mangled: org/apache/spark/streaming/StreamingContext.org.apache.spark.streaming.StreamingContext..startSite:()Ljava/util/concurrent/atomic/AtomicReference;]
StreamingContext.StreamingContext..stopOnShutdown ( ) : void
[mangled: org/apache/spark/streaming/StreamingContext.org.apache.spark.streaming.StreamingContext..stopOnShutdown:()V]
StreamingContext.StreamingContext ( String path, org.apache.spark.SparkContext sparkContext )
[mangled: org/apache/spark/streaming/StreamingContext."<init>":(Ljava/lang/String;Lorg/apache/spark/SparkContext;)V]
StreamingContext.withNamedScope ( String name, scala.Function0<U> body ) : U
[mangled: org/apache/spark/streaming/StreamingContext.withNamedScope:(Ljava/lang/String;Lscala/Function0;)Ljava/lang/Object;]
StreamingContext.withScope ( scala.Function0<U> body ) : U
[mangled: org/apache/spark/streaming/StreamingContext.withScope:(Lscala/Function0;)Ljava/lang/Object;]
spark-streaming_2.10-1.5.0.jar, Time.class
package org.apache.spark.streaming
Time.floor ( Duration that, Time zeroTime ) : Time
[mangled: org/apache/spark/streaming/Time.floor:(Lorg/apache/spark/streaming/Duration;Lorg/apache/spark/streaming/Time;)Lorg/apache/spark/streaming/Time;]
to the top
Problems with Data Types, High Severity (10)
spark-catalyst_2.10-1.5.0.jar
package org.apache.spark.sql
[+] Row (4)
| Change | Effect |
---|
1 | Abstract method fieldIndex ( java.lang.String ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method getAs ( java.lang.String ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
3 | Abstract method getTimestamp ( int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
4 | Abstract method getValuesMap ( scala.collection.Seq<java.lang.String> ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (33)
first ( )Return value of this method has type 'Row'.
head ( )Return value of this method has type 'Row'.
anyNull ( )This abstract method is from 'Row' interface.
apply ( int )This abstract method is from 'Row' interface.
copy ( )Return value of this abstract method has type 'Row'.
equals ( java.lang.Object )This abstract method is from 'Row' interface.
get ( int )This abstract method is from 'Row' interface.
getAs ( int )This abstract method is from 'Row' interface.
getBoolean ( int )This abstract method is from 'Row' interface.
getByte ( int )This abstract method is from 'Row' interface.
getDate ( int )This abstract method is from 'Row' interface.
getDecimal ( int )This abstract method is from 'Row' interface.
getDouble ( int )This abstract method is from 'Row' interface.
getFloat ( int )This abstract method is from 'Row' interface.
getInt ( int )This abstract method is from 'Row' interface.
getJavaMap ( int )This abstract method is from 'Row' interface.
getList ( int )This abstract method is from 'Row' interface.
getLong ( int )This abstract method is from 'Row' interface.
getMap ( int )This abstract method is from 'Row' interface.
getSeq ( int )This abstract method is from 'Row' interface.
getShort ( int )This abstract method is from 'Row' interface.
getString ( int )This abstract method is from 'Row' interface.
getStruct ( int )Return value of this abstract method has type 'Row'.
hashCode ( )This abstract method is from 'Row' interface.
isNullAt ( int )This abstract method is from 'Row' interface.
length ( )This abstract method is from 'Row' interface.
mkString ( )This abstract method is from 'Row' interface.
mkString ( java.lang.String )This abstract method is from 'Row' interface.
mkString ( java.lang.String, java.lang.String, java.lang.String )This abstract method is from 'Row' interface.
schema ( )This abstract method is from 'Row' interface.
size ( )This abstract method is from 'Row' interface.
toSeq ( )This abstract method is from 'Row' interface.
toString ( )This abstract method is from 'Row' interface.
package org.apache.spark.sql.catalyst.analysis
[+] Analyzer (1)
| Change | Effect |
---|
1 | Removed super-interface CheckAnalysis. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
analyzer ( )Return value of this method has type 'Analyzer'.
package org.apache.spark.sql.catalyst.expressions
[+] NamedExpression (1)
| Change | Effect |
---|
1 | This interface became class. | A client program may be interrupted by IncompatibleClassChangeError exception. |
[+] affected methods (1)
resolve ( java.lang.String )Return value of this method has type 'NamedExpression'.
spark-sql_2.10-1.5.0.jar
package org.apache.spark.sql
[+] Column (1)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.Logging. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (9)
agg ( Column, Column... )1st parameter 'expr' of this method has type 'Column'.
agg ( Column, scala.collection.Seq<Column> )1st parameter 'expr' of this method has type 'Column'.
apply ( java.lang.String )Return value of this method has type 'Column'.
col ( java.lang.String )Return value of this method has type 'Column'.
filter ( Column )1st parameter 'condition' of this method has type 'Column'.
join ( DataFrame, Column )2nd parameter 'joinExprs' of this method has type 'Column'.
join ( DataFrame, Column, java.lang.String )2nd parameter 'joinExprs' of this method has type 'Column'.
where ( Column )1st parameter 'condition' of this method has type 'Column'.
withColumn ( java.lang.String, Column )2nd parameter 'col' of this method has type 'Column'.
[+] SQLConf (1)
| Change | Effect |
---|
1 | Removed super-interface catalyst.CatalystConf. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
conf ( )Return value of this method has type 'SQLConf'.
spark-streaming-kafka_2.10-1.5.0.jar
package org.apache.spark.streaming.kafka
[+] KafkaTestUtils (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (32)
brokerAddress ( )This method is from 'KafkaTestUtils' class.
createTopic ( java.lang.String )This method is from 'KafkaTestUtils' class.
eventually ( org.apache.spark.streaming.Time, org.apache.spark.streaming.Time, scala.Function0<T> )This method is from 'KafkaTestUtils' class.
isTraceEnabled ( )This method is from 'KafkaTestUtils' class.
KafkaTestUtils ( )This constructor is from 'KafkaTestUtils' class.
log ( )This method is from 'KafkaTestUtils' class.
logDebug ( scala.Function0<java.lang.String> )This method is from 'KafkaTestUtils' class.
logDebug ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'KafkaTestUtils' class.
logError ( scala.Function0<java.lang.String> )This method is from 'KafkaTestUtils' class.
logError ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'KafkaTestUtils' class.
logInfo ( scala.Function0<java.lang.String> )This method is from 'KafkaTestUtils' class.
logInfo ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'KafkaTestUtils' class.
logName ( )This method is from 'KafkaTestUtils' class.
logTrace ( scala.Function0<java.lang.String> )This method is from 'KafkaTestUtils' class.
logTrace ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'KafkaTestUtils' class.
logWarning ( scala.Function0<java.lang.String> )This method is from 'KafkaTestUtils' class.
logWarning ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'KafkaTestUtils' class.
org.apache.spark.Logging..log_ ( )This method is from 'KafkaTestUtils' class.
org.apache.spark.Logging..log__.eq ( org.slf4j.Logger )This method is from 'KafkaTestUtils' class.
KafkaTestUtils..brokerConf ( )This method is from 'KafkaTestUtils' class.
KafkaTestUtils..brokerConf_.eq ( kafka.server.KafkaConfig )This method is from 'KafkaTestUtils' class.
KafkaTestUtils..brokerConfiguration ( )This method is from 'KafkaTestUtils' class.
KafkaTestUtils..brokerPort_.eq ( int )This method is from 'KafkaTestUtils' class.
KafkaTestUtils..server ( )This method is from 'KafkaTestUtils' class.
KafkaTestUtils..server_.eq ( kafka.server.KafkaServer )This method is from 'KafkaTestUtils' class.
sendMessages ( java.lang.String, java.lang.String[ ] )This method is from 'KafkaTestUtils' class.
sendMessages ( java.lang.String, java.util.Map<java.lang.String,java.lang.Integer> )This method is from 'KafkaTestUtils' class.
sendMessages ( java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.Object> )This method is from 'KafkaTestUtils' class.
setup ( )This method is from 'KafkaTestUtils' class.
teardown ( )This method is from 'KafkaTestUtils' class.
zkAddress ( )This method is from 'KafkaTestUtils' class.
zookeeperClient ( )This method is from 'KafkaTestUtils' class.
spark-streaming_2.10-1.5.0.jar
package org.apache.spark.streaming.ui
[+] StreamingJobProgressListener (1)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.scheduler.SparkListener. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
progressListener ( )Return value of this method has type 'StreamingJobProgressListener'.
to the top
Problems with Data Types, Medium Severity (3)
spark-catalyst_2.10-1.5.0.jar
package org.apache.spark.sql.types
[+] DataType (1)
| Change | Effect |
---|
1 | Removed super-class AbstractDataType. | Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. |
[+] affected methods (2)
parseDataType ( java.lang.String )Return value of this method has type 'DataType'.
asNullable ( )Return value of this method has type 'DataType'.
spark-core_2.10-1.5.0.jar
package org.apache.spark.api.java
[+] JavaRDD<T> (1)
| Change | Effect |
---|
1 | Removed super-class AbstractJavaRDDLike<T,JavaRDD<T>>. | Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. |
[+] affected methods (1)
toJavaRDD ( )Return value of this method has type 'JavaRDD<T>'.
spark-sql_2.10-1.5.0.jar
package org.apache.spark.sql
[+] SQLContext.implicits. (1)
| Change | Effect |
---|
1 | Removed super-class SQLImplicits. | Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. |
[+] affected methods (1)
implicits ( )Return value of this method has type 'SQLContext.implicits.'.
to the top
Problems with Data Types, Low Severity (1)
spark-sql_2.10-1.5.0.jar
package org.apache.spark.sql
[+] DataFrame (1)
| Change | Effect |
---|
1 | Added super-class java.lang.Object. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (110)
agg ( java.util.Map<java.lang.String,java.lang.String> )Return value of this method has type 'DataFrame'.
agg ( Column, Column... )Return value of this method has type 'DataFrame'.
agg ( Column, scala.collection.Seq<Column> )Return value of this method has type 'DataFrame'.
agg ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )Return value of this method has type 'DataFrame'.
agg ( scala.Tuple2<java.lang.String,java.lang.String>, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> )Return value of this method has type 'DataFrame'.
apply ( java.lang.String )This method is from 'DataFrame' class.
as ( java.lang.String )Return value of this method has type 'DataFrame'.
as ( scala.Symbol )Return value of this method has type 'DataFrame'.
cache ( )Return value of this method has type 'DataFrame'.
col ( java.lang.String )This method is from 'DataFrame' class.
collect ( )This method is from 'DataFrame' class.
collectAsList ( )This method is from 'DataFrame' class.
columns ( )This method is from 'DataFrame' class.
count ( )This method is from 'DataFrame' class.
DataFrame ( SQLContext, catalyst.plans.logical.LogicalPlan )This constructor is from 'DataFrame' class.
DataFrame ( SQLContext, SQLContext.QueryExecution )This constructor is from 'DataFrame' class.
distinct ( )Return value of this method has type 'DataFrame'.
dtypes ( )This method is from 'DataFrame' class.
except ( DataFrame )Return value of this method has type 'DataFrame'.
explain ( )This method is from 'DataFrame' class.
explain ( boolean )This method is from 'DataFrame' class.
explode ( java.lang.String, java.lang.String, scala.Function1<A,scala.collection.TraversableOnce<B>>, scala.reflect.api.TypeTags.TypeTag<B> )Return value of this method has type 'DataFrame'.
explode ( scala.collection.Seq<Column>, scala.Function1<Row,scala.collection.TraversableOnce<A>>, scala.reflect.api.TypeTags.TypeTag<A> )Return value of this method has type 'DataFrame'.
filter ( java.lang.String )Return value of this method has type 'DataFrame'.
filter ( Column )Return value of this method has type 'DataFrame'.
first ( )This method is from 'DataFrame' class.
flatMap ( scala.Function1<Row,scala.collection.TraversableOnce<R>>, scala.reflect.ClassTag<R> )This method is from 'DataFrame' class.
foreach ( scala.Function1<Row,scala.runtime.BoxedUnit> )This method is from 'DataFrame' class.
foreachPartition ( scala.Function1<scala.collection.Iterator<Row>,scala.runtime.BoxedUnit> )This method is from 'DataFrame' class.
groupBy ( java.lang.String, java.lang.String... )This method is from 'DataFrame' class.
groupBy ( java.lang.String, scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
groupBy ( Column... )This method is from 'DataFrame' class.
groupBy ( scala.collection.Seq<Column> )This method is from 'DataFrame' class.
head ( )This method is from 'DataFrame' class.
head ( int )This method is from 'DataFrame' class.
intersect ( DataFrame )Return value of this method has type 'DataFrame'.
isLocal ( )This method is from 'DataFrame' class.
javaRDD ( )This method is from 'DataFrame' class.
javaToPython ( )This method is from 'DataFrame' class.
join ( DataFrame )1st parameter 'right' of this method has type 'DataFrame'.
join ( DataFrame, Column )1st parameter 'right' of this method has type 'DataFrame'.
join ( DataFrame, Column, java.lang.String )1st parameter 'right' of this method has type 'DataFrame'.
limit ( int )Return value of this method has type 'DataFrame'.
logicalPlan ( )This method is from 'DataFrame' class.
map ( scala.Function1<Row,R>, scala.reflect.ClassTag<R> )This method is from 'DataFrame' class.
mapPartitions ( scala.Function1<scala.collection.Iterator<Row>,scala.collection.Iterator<R>>, scala.reflect.ClassTag<R> )This method is from 'DataFrame' class.
numericColumns ( )This method is from 'DataFrame' class.
orderBy ( java.lang.String, java.lang.String... )Return value of this method has type 'DataFrame'.
orderBy ( java.lang.String, scala.collection.Seq<java.lang.String> )Return value of this method has type 'DataFrame'.
orderBy ( Column... )Return value of this method has type 'DataFrame'.
orderBy ( scala.collection.Seq<Column> )Return value of this method has type 'DataFrame'.
persist ( )Return value of this method has type 'DataFrame'.
persist ( org.apache.spark.storage.StorageLevel )Return value of this method has type 'DataFrame'.
printSchema ( )This method is from 'DataFrame' class.
queryExecution ( )This method is from 'DataFrame' class.
rdd ( )This method is from 'DataFrame' class.
registerTempTable ( java.lang.String )This method is from 'DataFrame' class.
repartition ( int )Return value of this method has type 'DataFrame'.
resolve ( java.lang.String )This method is from 'DataFrame' class.
sample ( boolean, double )Return value of this method has type 'DataFrame'.
sample ( boolean, double, long )Return value of this method has type 'DataFrame'.
schema ( )This method is from 'DataFrame' class.
select ( java.lang.String, java.lang.String... )Return value of this method has type 'DataFrame'.
select ( java.lang.String, scala.collection.Seq<java.lang.String> )Return value of this method has type 'DataFrame'.
select ( Column... )Return value of this method has type 'DataFrame'.
select ( scala.collection.Seq<Column> )Return value of this method has type 'DataFrame'.
selectExpr ( java.lang.String... )Return value of this method has type 'DataFrame'.
selectExpr ( scala.collection.Seq<java.lang.String> )Return value of this method has type 'DataFrame'.
show ( )This method is from 'DataFrame' class.
show ( int )This method is from 'DataFrame' class.
sort ( java.lang.String, java.lang.String... )Return value of this method has type 'DataFrame'.
sort ( java.lang.String, scala.collection.Seq<java.lang.String> )Return value of this method has type 'DataFrame'.
sort ( Column... )Return value of this method has type 'DataFrame'.
sort ( scala.collection.Seq<Column> )Return value of this method has type 'DataFrame'.
sqlContext ( )This method is from 'DataFrame' class.
take ( int )This method is from 'DataFrame' class.
toDF ( )Return value of this method has type 'DataFrame'.
toDF ( java.lang.String... )Return value of this method has type 'DataFrame'.
toDF ( scala.collection.Seq<java.lang.String> )Return value of this method has type 'DataFrame'.
toJavaRDD ( )This method is from 'DataFrame' class.
toJSON ( )This method is from 'DataFrame' class.
toString ( )This method is from 'DataFrame' class.
unionAll ( DataFrame )Return value of this method has type 'DataFrame'.
unpersist ( )Return value of this method has type 'DataFrame'.
unpersist ( boolean )Return value of this method has type 'DataFrame'.
where ( Column )Return value of this method has type 'DataFrame'.
withColumn ( java.lang.String, Column )Return value of this method has type 'DataFrame'.
withColumnRenamed ( java.lang.String, java.lang.String )Return value of this method has type 'DataFrame'.
applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<java.lang.Object[ ]>, java.lang.String )Return value of this method has type 'DataFrame'.
applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<java.lang.Object[ ]>, types.StructType )Return value of this method has type 'DataFrame'.
baseRelationToDataFrame ( sources.BaseRelation )Return value of this method has type 'DataFrame'.
createDataFrame ( org.apache.spark.api.java.JavaRDD<?>, java.lang.Class<?> )Return value of this method has type 'DataFrame'.
createDataFrame ( org.apache.spark.api.java.JavaRDD<Row>, types.StructType )Return value of this method has type 'DataFrame'.
createDataFrame ( org.apache.spark.rdd.RDD<?>, java.lang.Class<?> )Return value of this method has type 'DataFrame'.
createDataFrame ( org.apache.spark.rdd.RDD<A>, scala.reflect.api.TypeTags.TypeTag<A> )Return value of this method has type 'DataFrame'.
createDataFrame ( org.apache.spark.rdd.RDD<Row>, types.StructType )Return value of this method has type 'DataFrame'.
createDataFrame ( scala.collection.Seq<A>, scala.reflect.api.TypeTags.TypeTag<A> )Return value of this method has type 'DataFrame'.
createExternalTable ( java.lang.String, java.lang.String )Return value of this method has type 'DataFrame'.
createExternalTable ( java.lang.String, java.lang.String, java.lang.String )Return value of this method has type 'DataFrame'.
createExternalTable ( java.lang.String, java.lang.String, java.util.Map<java.lang.String,java.lang.String> )Return value of this method has type 'DataFrame'.
createExternalTable ( java.lang.String, java.lang.String, types.StructType, java.util.Map<java.lang.String,java.lang.String> )Return value of this method has type 'DataFrame'.
createExternalTable ( java.lang.String, java.lang.String, types.StructType, scala.collection.immutable.Map<java.lang.String,java.lang.String> )Return value of this method has type 'DataFrame'.
createExternalTable ( java.lang.String, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )Return value of this method has type 'DataFrame'.
emptyDataFrame ( )Return value of this method has type 'DataFrame'.
parquetFile ( java.lang.String... )Return value of this method has type 'DataFrame'.
registerDataFrameAsTable ( DataFrame, java.lang.String )1st parameter 'df' of this method has type 'DataFrame'.
sql ( java.lang.String )Return value of this method has type 'DataFrame'.
table ( java.lang.String )Return value of this method has type 'DataFrame'.
tables ( )Return value of this method has type 'DataFrame'.
tables ( java.lang.String )Return value of this method has type 'DataFrame'.
to the top
Java ARchives (7)
spark-catalyst_2.10-1.5.0.jar
spark-core_2.10-1.5.0.jar
spark-hive_2.10-1.5.0.jar
spark-sql_2.10-1.5.0.jar
spark-streaming-kafka_2.10-1.5.0.jar
spark-streaming_2.10-1.5.0.jar
spark-yarn_2.10-1.5.0.jar
to the top
Generated on Tue Oct 27 23:46:35 2015 for spark-testing-base_2.10-1.5.1_0.2.0 by Java API Compliance Checker 1.4.1
A tool for checking backward compatibility of a Java library API