Binary compatibility report for the Spark-SQL-on-HBase-1.0.0 library  between 1.4.0 and 1.3.0 versions   (relating to the portability of client application Spark-SQL-on-HBase-1.0.0.jar)

Test Info


Library NameSpark-SQL-on-HBase-1.0.0
Version #11.4.0
Version #21.3.0
Java Version1.7.0_75

Test Results


Total Java ARchives3
Total Methods / Classes1029 / 3171
VerdictIncompatible
(40.1%)

Problem Summary


SeverityCount
Added Methods-19
Removed MethodsHigh137
Problems with
Data Types
High23
Medium7
Low0
Problems with
Methods
High1
Medium0
Low0

Added Methods (19)


spark-catalyst_2.10-1.3.0.jar, AttributeReference.class
package org.apache.spark.sql.catalyst.expressions
AttributeReference.children ( )  :  scala.collection.immutable.Nil.

spark-catalyst_2.10-1.3.0.jar, Catalog.class
package org.apache.spark.sql.catalyst.analysis
Catalog.caseSensitive ( ) [abstract]  :  boolean

spark-catalyst_2.10-1.3.0.jar, DataType.class
package org.apache.spark.sql.types
DataType.isPrimitive ( )  :  boolean

spark-catalyst_2.10-1.3.0.jar, GenericMutableRow.class
package org.apache.spark.sql.catalyst.expressions
GenericMutableRow.copy ( )  :  GenericRow

spark-catalyst_2.10-1.3.0.jar, LogicalPlan.class
package org.apache.spark.sql.catalyst.plans.logical
LogicalPlan.LogicalPlan..resolveAsColumn ( String[ ] nameParts, scala.Function2<String,String,Object> resolver, org.apache.spark.sql.catalyst.expressions.Attribute attribute )  :  scala.Option<scala.Tuple2<org.apache.spark.sql.catalyst.expressions.Attribute,scala.collection.immutable.List<String>>>
LogicalPlan.LogicalPlan..resolveAsTableColumn ( String[ ] nameParts, scala.Function2<String,String,Object> resolver, org.apache.spark.sql.catalyst.expressions.Attribute attribute )  :  scala.Option<scala.Tuple2<org.apache.spark.sql.catalyst.expressions.Attribute,scala.collection.immutable.List<String>>>
LogicalPlan.resolve ( String name, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> input, scala.Function2<String,String,Object> resolver )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>
LogicalPlan.resolve ( String name, scala.Function2<String,String,Object> resolver )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>
LogicalPlan.resolveChildren ( String name, scala.Function2<String,String,Object> resolver )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>

spark-catalyst_2.10-1.3.0.jar, SqlParser.class
package org.apache.spark.sql.catalyst
SqlParser.DATE ( )  :  AbstractSparkSQLParser.Keyword
SqlParser.DECIMAL ( )  :  AbstractSparkSQLParser.Keyword
SqlParser.DOUBLE ( )  :  AbstractSparkSQLParser.Keyword
SqlParser.INT ( )  :  AbstractSparkSQLParser.Keyword
SqlParser.STRING ( )  :  AbstractSparkSQLParser.Keyword
SqlParser.TIMESTAMP ( )  :  AbstractSparkSQLParser.Keyword

spark-sql_2.10-1.3.0.jar, LogicalRelation.class
package org.apache.spark.sql.sources
LogicalRelation.newInstance ( )  :  org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation

spark-sql_2.10-1.3.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.cacheManager ( )  :  CacheManager
SQLContext.checkAnalysis ( )  :  catalyst.analysis.CheckAnalysis
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, java.util.List<String> columns )  :  DataFrame

to the top

Removed Methods (137)


spark-catalyst_2.10-1.4.0.jar, AtomicType.class
package org.apache.spark.sql.types
AtomicType.AtomicType ( )
AtomicType.classTag ( )  :  scala.reflect.ClassTag<Object>
AtomicType.ordering ( ) [abstract]  :  scala.math.Ordering<Object>
AtomicType.tag ( ) [abstract]  :  scala.reflect.api.TypeTags.TypeTag<Object>

spark-catalyst_2.10-1.4.0.jar, AttributeReference.class
package org.apache.spark.sql.catalyst.expressions
AttributeReference.sameRef ( AttributeReference other )  :  boolean
AttributeReference.semanticEquals ( Expression other )  :  boolean

spark-catalyst_2.10-1.4.0.jar, Catalog.class
package org.apache.spark.sql.catalyst.analysis
Catalog.conf ( ) [abstract]  :  org.apache.spark.sql.catalyst.CatalystConf

spark-catalyst_2.10-1.4.0.jar, Expression.class
package org.apache.spark.sql.catalyst.expressions
Expression.deterministic ( )  :  boolean
Expression.semanticEquals ( Expression other )  :  boolean

spark-catalyst_2.10-1.4.0.jar, Literal.class
package org.apache.spark.sql.catalyst.expressions
Literal.create ( Object p1, org.apache.spark.sql.types.DataType p2 ) [static]  :  Literal

spark-catalyst_2.10-1.4.0.jar, LogicalPlan.class
package org.apache.spark.sql.catalyst.plans.logical
LogicalPlan.LogicalPlan..resolveAsColumn ( scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver, org.apache.spark.sql.catalyst.expressions.Attribute attribute )  :  scala.Option<scala.Tuple2<org.apache.spark.sql.catalyst.expressions.Attribute,scala.collection.immutable.List<String>>>
LogicalPlan.LogicalPlan..resolveAsTableColumn ( scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver, org.apache.spark.sql.catalyst.expressions.Attribute attribute )  :  scala.Option<scala.Tuple2<org.apache.spark.sql.catalyst.expressions.Attribute,scala.collection.immutable.List<String>>>
LogicalPlan.resolve ( scala.collection.Seq<String> nameParts, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> input, scala.Function2<String,String,Object> resolver, boolean throwErrors )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>
LogicalPlan.resolve ( scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver, boolean throwErrors )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>
LogicalPlan.resolveChildren ( scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver, boolean throwErrors )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>
LogicalPlan.resolveQuoted ( String name, scala.Function2<String,String,Object> resolver )  :  scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression>

spark-catalyst_2.10-1.4.0.jar, ParserDialect.class
package org.apache.spark.sql.catalyst
ParserDialect.parse ( String p1 ) [abstract]  :  plans.logical.LogicalPlan
ParserDialect.ParserDialect ( )

spark-catalyst_2.10-1.4.0.jar, Row.class
package org.apache.spark.sql
Row.fieldIndex ( String p1 ) [abstract]  :  int
Row.getAs ( String p1 ) [abstract]  :  T
Row.getValuesMap ( scala.collection.Seq<String> p1 ) [abstract]  :  scala.collection.immutable.Map<String,T>

spark-catalyst_2.10-1.4.0.jar, SimpleCatalystConf.class
package org.apache.spark.sql.catalyst
SimpleCatalystConf.SimpleCatalystConf ( boolean caseSensitiveAnalysis )

spark-catalyst_2.10-1.4.0.jar, SqlParser.class
package org.apache.spark.sql.catalyst
SqlParser.arrayType ( )  :  scala.util.parsing.combinator.Parsers.Parser<org.apache.spark.sql.types.DataType>
SqlParser.cte ( )  :  scala.util.parsing.combinator.Parsers.Parser<plans.logical.LogicalPlan>
SqlParser.mapType ( )  :  scala.util.parsing.combinator.Parsers.Parser<org.apache.spark.sql.types.DataType>
SqlParser.primitiveType ( )  :  scala.util.parsing.combinator.Parsers.Parser<org.apache.spark.sql.types.DataType>
SqlParser.regexToParser ( scala.util.matching.Regex regex )  :  scala.util.parsing.combinator.Parsers.Parser<String>
SqlParser.start1 ( )  :  scala.util.parsing.combinator.Parsers.Parser<plans.logical.LogicalPlan>
SqlParser.structField ( )  :  scala.util.parsing.combinator.Parsers.Parser<org.apache.spark.sql.types.StructField>
SqlParser.structType ( )  :  scala.util.parsing.combinator.Parsers.Parser<org.apache.spark.sql.types.DataType>
SqlParser.toDataType ( String dataTypeString )  :  org.apache.spark.sql.types.DataType
SqlParser.varchar ( )  :  scala.util.parsing.combinator.Parsers.Parser<org.apache.spark.sql.types.DataType>
SqlParser.WITH ( )  :  AbstractSparkSQLParser.Keyword

spark-catalyst_2.10-1.4.0.jar, UTF8String.class
package org.apache.spark.sql.types
UTF8String.apply ( byte[ ] p1 ) [static]  :  UTF8String
UTF8String.apply ( String that ) [static]  :  UTF8String
UTF8String.clone ( )  :  Object
UTF8String.clone ( )  :  UTF8String
UTF8String.compare ( Object that )  :  int
UTF8String.compare ( UTF8String other )  :  int
UTF8String.compareTo ( Object that )  :  int
UTF8String.compareTo ( UTF8String other )  :  int
UTF8String.contains ( UTF8String sub )  :  boolean
UTF8String.endsWith ( UTF8String suffix )  :  boolean
UTF8String.equals ( Object other )  :  boolean
UTF8String.getBytes ( )  :  byte[ ]
UTF8String.hashCode ( )  :  int
UTF8String.length ( )  :  int
UTF8String.set ( byte[ ] bytes )  :  UTF8String
UTF8String.set ( String str )  :  UTF8String
UTF8String.slice ( int start, int until )  :  UTF8String
UTF8String.startsWith ( UTF8String prefix )  :  boolean
UTF8String.toLowerCase ( )  :  UTF8String
UTF8String.toString ( )  :  String
UTF8String.toUpperCase ( )  :  UTF8String
UTF8String.UTF8String ( )

spark-core_2.10-1.4.0.jar, JavaSparkContext.class
package org.apache.spark.api.java
JavaSparkContext.setLogLevel ( String logLevel )  :  void

spark-core_2.10-1.4.0.jar, SparkContext.class
package org.apache.spark
SparkContext.applicationAttemptId ( )  :  scala.Option<String>
SparkContext.externalBlockStoreFolderName ( )  :  String
SparkContext.getOrCreate ( ) [static]  :  SparkContext
SparkContext.getOrCreate ( SparkConf p1 ) [static]  :  SparkContext
SparkContext.SparkContext.._conf ( )  :  SparkConf
SparkContext.SparkContext.._env ( )  :  SparkEnv
SparkContext.SparkContext..assertNotStopped ( )  :  void
SparkContext.range ( long start, long end, long step, int numSlices )  :  rdd.RDD<Object>
SparkContext.setLogLevel ( String logLevel )  :  void
SparkContext.supportDynamicAllocation ( )  :  boolean
SparkContext.withScope ( scala.Function0<U> body )  :  U

spark-core_2.10-1.4.0.jar, TaskContext.class
package org.apache.spark
TaskContext.taskMemoryManager ( ) [abstract]  :  unsafe.memory.TaskMemoryManager

spark-core_2.10-1.4.0.jar, TaskContextImpl.class
package org.apache.spark
TaskContextImpl.TaskContextImpl ( int stageId, int partitionId, long taskAttemptId, int attemptNumber, unsafe.memory.TaskMemoryManager taskMemoryManager, boolean runningLocally, executor.TaskMetrics taskMetrics )

spark-sql_2.10-1.4.0.jar, BaseRelation.class
package org.apache.spark.sql.sources
BaseRelation.needConversion ( )  :  boolean

spark-sql_2.10-1.4.0.jar, DataFrame.class
package org.apache.spark.sql
DataFrame.coalesce ( int numPartitions )  :  DataFrame
DataFrame.cube ( Column... cols )  :  GroupedData
DataFrame.cube ( scala.collection.Seq<Column> cols )  :  GroupedData
DataFrame.cube ( String col1, scala.collection.Seq<String> cols )  :  GroupedData
DataFrame.cube ( String col1, String... cols )  :  GroupedData
DataFrame.describe ( scala.collection.Seq<String> cols )  :  DataFrame
DataFrame.describe ( String... cols )  :  DataFrame
DataFrame.drop ( String colName )  :  DataFrame
DataFrame.dropDuplicates ( )  :  DataFrame
DataFrame.dropDuplicates ( scala.collection.Seq<String> colNames )  :  DataFrame
DataFrame.dropDuplicates ( String[ ] colNames )  :  DataFrame
DataFrame.join ( DataFrame right, String usingColumn )  :  DataFrame
DataFrame.na ( )  :  DataFrameNaFunctions
DataFrame.DataFrame..logicalPlanToDataFrame ( catalyst.plans.logical.LogicalPlan logicalPlan )  :  DataFrame
DataFrame.randomSplit ( double[ ] weights )  :  DataFrame[ ]
DataFrame.randomSplit ( double[ ] weights, long seed )  :  DataFrame[ ]
DataFrame.randomSplit ( scala.collection.immutable.List<Object> weights, long seed )  :  DataFrame[ ]
DataFrame.rollup ( Column... cols )  :  GroupedData
DataFrame.rollup ( scala.collection.Seq<Column> cols )  :  GroupedData
DataFrame.rollup ( String col1, scala.collection.Seq<String> cols )  :  GroupedData
DataFrame.rollup ( String col1, String... cols )  :  GroupedData
DataFrame.stat ( )  :  DataFrameStatFunctions
DataFrame.write ( )  :  DataFrameWriter

spark-sql_2.10-1.4.0.jar, GeneratedAggregate.class
package org.apache.spark.sql.execution
GeneratedAggregate.GeneratedAggregate ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, boolean unsafeEnabled, SparkPlan child )

spark-sql_2.10-1.4.0.jar, LogicalRelation.class
package org.apache.spark.sql.sources
LogicalRelation.newInstance ( )  :  org.apache.spark.sql.catalyst.plans.logical.LogicalPlan

spark-sql_2.10-1.4.0.jar, RunnableCommand.class
package org.apache.spark.sql.execution
RunnableCommand.children ( ) [abstract]  :  scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
RunnableCommand.output ( ) [abstract]  :  scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>

spark-sql_2.10-1.4.0.jar, SparkPlan.class
package org.apache.spark.sql.execution
SparkPlan.doExecute ( ) [abstract]  :  org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
SparkPlan.outputOrdering ( )  :  scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>
SparkPlan.requiredChildOrdering ( )  :  scala.collection.Seq<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>>

spark-sql_2.10-1.4.0.jar, SQLConf.class
package org.apache.spark.sql
SQLConf.CASE_SENSITIVE ( ) [static]  :  String
SQLConf.caseSensitiveAnalysis ( )  :  boolean
SQLConf.DATAFRAME_RETAIN_GROUP_COLUMNS ( ) [static]  :  String
SQLConf.DATAFRAME_SELF_JOIN_AUTO_RESOLVE_AMBIGUITY ( ) [static]  :  String
SQLConf.dataFrameRetainGroupColumns ( )  :  boolean
SQLConf.dataFrameSelfJoinAutoResolveAmbiguity ( )  :  boolean
SQLConf.HIVE_VERIFY_PARTITIONPATH ( ) [static]  :  String
SQLConf.ORC_FILTER_PUSHDOWN_ENABLED ( ) [static]  :  String
SQLConf.orcFilterPushDown ( )  :  boolean
SQLConf.OUTPUT_COMMITTER_CLASS ( ) [static]  :  String
SQLConf.PARTITION_DISCOVERY_ENABLED ( ) [static]  :  String
SQLConf.partitionDiscoveryEnabled ( )  :  boolean
SQLConf.SORTMERGE_JOIN ( ) [static]  :  String
SQLConf.sortMergeJoinEnabled ( )  :  boolean
SQLConf.THRIFTSERVER_UI_SESSION_LIMIT ( ) [static]  :  String
SQLConf.THRIFTSERVER_UI_STATEMENT_LIMIT ( ) [static]  :  String
SQLConf.UNSAFE_ENABLED ( ) [static]  :  String
SQLConf.unsafeEnabled ( )  :  boolean
SQLConf.USE_JACKSON_STREAMING_API ( ) [static]  :  String
SQLConf.USE_SQL_SERIALIZER2 ( ) [static]  :  String
SQLConf.useJacksonStreamingAPI ( )  :  boolean
SQLConf.useSqlSerializer2 ( )  :  boolean
SQLConf.verifyPartitionPath ( )  :  boolean

spark-sql_2.10-1.4.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.cacheManager ( )  :  execution.CacheManager
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<Row> rowRDD, types.StructType schema, boolean needsConversion )  :  DataFrame
SQLContext.createSession ( )  :  SQLContext.SQLSession
SQLContext.currentSession ( )  :  SQLContext.SQLSession
SQLContext.defaultSession ( )  :  SQLContext.SQLSession
SQLContext.detachSession ( )  :  void
SQLContext.dialectClassName ( )  :  String
SQLContext.getOrCreate ( org.apache.spark.SparkContext p1 ) [static]  :  SQLContext
SQLContext.getSQLDialect ( )  :  catalyst.ParserDialect
SQLContext.openSession ( )  :  SQLContext.SQLSession
SQLContext.range ( long start, long end )  :  DataFrame
SQLContext.range ( long start, long end, long step, int numPartitions )  :  DataFrame
SQLContext.read ( )  :  DataFrameReader
SQLContext.tlSession ( )  :  ThreadLocal<SQLContext.SQLSession>

to the top

Problems with Data Types, High Severity (23)


spark-catalyst_2.10-1.4.0.jar
package org.apache.spark.sql
[+] Row (3)

package org.apache.spark.sql.catalyst
[+] ParserDialect (1)
[+] SimpleCatalystConf (1)
[+] SqlParser (1)

package org.apache.spark.sql.catalyst.analysis
[+] Analyzer (1)
[+] Catalog (1)

package org.apache.spark.sql.catalyst.expressions
[+] And (2)
[+] Not (1)
[+] Or (2)

package org.apache.spark.sql.types
[+] AtomicType (1)
[+] UTF8String (1)

spark-core_2.10-1.4.0.jar
package org.apache.spark
[+] TaskContext (1)

spark-sql_2.10-1.4.0.jar
package org.apache.spark.sql
[+] Column (1)
[+] SQLConf (1)

package org.apache.spark.sql.execution
[+] RunnableCommand (2)
[+] SparkPlan (2)

package org.apache.spark.sql.sources
[+] DDLParser (1)

to the top

Problems with Methods, High Severity (1)


spark-sql_2.10-1.4.0.jar, SparkPlan
package org.apache.spark.sql.execution
[+] SparkPlan.execute ( )  :  org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> (1)

to the top

Problems with Data Types, Medium Severity (7)


spark-catalyst_2.10-1.4.0.jar
package org.apache.spark.sql.catalyst.analysis
[+] Catalog (1)

package org.apache.spark.sql.catalyst.expressions
[+] And (1)
[+] BoundReference (1)
[+] Or (1)

spark-core_2.10-1.4.0.jar
package org.apache.spark.api.java
[+] JavaDoubleRDD (1)
[+] JavaPairRDD<K,V> (1)
[+] JavaRDD<T> (1)

to the top

Java ARchives (3)


spark-catalyst_2.10-1.4.0.jar
spark-core_2.10-1.4.0.jar
spark-sql_2.10-1.4.0.jar

to the top




Generated on Fri Jul 17 23:36:47 2015 for Spark-SQL-on-HBase-1.0.0 by Java API Compliance Checker 1.4.1  
A tool for checking backward compatibility of a Java library API