Binary compatibility report for the spark-avro_2.10-0.2.0 library between 1.2.0 and 1.0.0 versions (relating to the portability of client application spark-avro_2.10-0.2.0.jar)
Test Info
Library Name | spark-avro_2.10-0.2.0 |
Version #1 | 1.2.0 |
Version #2 | 1.0.0 |
Java Version | 1.7.0_75 |
Test Results
Total Java ARchives | 1 |
---|
Total Methods / Classes | 368 / 405 |
---|
Verdict | Incompatible (59.8%) |
Problem Summary
| Severity | Count |
---|
Added Methods | - | 5 |
---|
Removed Methods | High | 174 |
---|
Problems with Data Types | High | 7 |
---|
Medium | 0 |
Low | 0 |
Problems with Methods | High | 0 |
---|
Medium | 0 |
Low | 0 |
Added Methods (5)
spark-sql_2.10-1.0.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.decimalToLiteral ( scala.math.BigDecimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.decimalToLiteral:(Lscala/math/BigDecimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.inferSchema ( org.apache.spark.rdd.RDD<scala.collection.immutable.Map<String,Object>> rdd ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.inferSchema:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.logger ( ) : com.typesafe.scalalogging.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.logger:()Lcom/typesafe/scalalogging/slf4j/Logger;]
SQLContext.optimizer ( ) : catalyst.optimizer.Optimizer.
[mangled: org/apache/spark/sql/SQLContext.optimizer:()Lorg/apache/spark/sql/catalyst/optimizer/Optimizer$;]
SQLContext.parser ( ) : catalyst.SqlParser
[mangled: org/apache/spark/sql/SQLContext.parser:()Lorg/apache/spark/sql/catalyst/SqlParser;]
to the top
Removed Methods (174)
spark-sql_2.10-1.2.0.jar, JavaSchemaRDD.class
package org.apache.spark.sql.api.java
JavaSchemaRDD.baseLogicalPlan ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.baseLogicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
JavaSchemaRDD.collectAsync ( ) : org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.collectAsync:()Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.countAsync ( ) : org.apache.spark.api.java.JavaFutureAction<Long>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countAsync:()Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.foreachAsync ( org.apache.spark.api.java.function.VoidFunction<Row> f ) : org.apache.spark.api.java.JavaFutureAction<Void>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.foreachAsync:(Lorg/apache/spark/api/java/function/VoidFunction;)Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.foreachPartitionAsync ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> f ) : org.apache.spark.api.java.JavaFutureAction<Void>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.foreachPartitionAsync:(Lorg/apache/spark/api/java/function/VoidFunction;)Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan p1 ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
JavaSchemaRDD.partitions ( ) : java.util.List<org.apache.spark.Partition>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.partitions:()Ljava/util/List;]
JavaSchemaRDD.printSchema ( ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.printSchema:()V]
JavaSchemaRDD.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.registerTempTable:(Ljava/lang/String;)V]
JavaSchemaRDD.schema ( ) : StructType
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schema:()Lorg/apache/spark/sql/api/java/StructType;]
JavaSchemaRDD.schemaRDD ( ) : org.apache.spark.sql.SchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schemaRDD:()Lorg/apache/spark/sql/SchemaRDD;]
JavaSchemaRDD.schemaString ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schemaString:()Ljava/lang/String;]
JavaSchemaRDD.takeAsync ( int num ) : org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.takeAsync:(I)Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.toJSON ( ) : org.apache.spark.api.java.JavaRDD<String>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.toJSON:()Lorg/apache/spark/api/java/JavaRDD;]
spark-sql_2.10-1.2.0.jar, JavaSQLContext.class
package org.apache.spark.sql.api.java
JavaSQLContext.applySchema ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.applySchema:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.baseRelationToSchemaRDD ( org.apache.spark.sql.sources.BaseRelation baseRelation ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.baseRelationToSchemaRDD:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonFile ( String path ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonFile ( String path, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.registerFunction ( String name, UDF10<?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF10;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF11<?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF11;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF12<?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF12;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF13<?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF13;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF14;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF15;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF16;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF17;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF18;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF19;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF1<?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF1;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF20;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF21;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF22;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF2<?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF2;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF3<?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF3;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF4<?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF4;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF5<?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF5;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF6<?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF6;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF7<?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF7;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF8<?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF8;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF9<?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF9;Lorg/apache/spark/sql/api/java/DataType;)V]
spark-sql_2.10-1.2.0.jar, RelationProvider.class
package org.apache.spark.sql.sources
RelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, scala.collection.immutable.Map<String,String> p2 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/RelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.2.0.jar, SchemaRDD.class
package org.apache.spark.sql
SchemaRDD.baseLogicalPlan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/SchemaRDD.baseLogicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
SchemaRDD.cache ( ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.cache:()Lorg/apache/spark/rdd/RDD;]
SchemaRDD.cache ( ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.cache:()Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.collect ( ) : Object
[mangled: org/apache/spark/sql/SchemaRDD.collect:()Ljava/lang/Object;]
SchemaRDD.collect ( ) : catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/SchemaRDD.collect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
SchemaRDD.collectToPython ( ) : java.util.List<byte[ ]>
[mangled: org/apache/spark/sql/SchemaRDD.collectToPython:()Ljava/util/List;]
SchemaRDD.except ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.except:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.intersect ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.intersect:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.limit ( int limitNum ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.limit:(I)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.SchemaRDDLike._setter_.logicalPlan_.eq ( catalyst.plans.logical.LogicalPlan p1 ) : void
[mangled: org/apache/spark/sql/SchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.printSchema ( ) : void
[mangled: org/apache/spark/sql/SchemaRDD.printSchema:()V]
SchemaRDD.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SchemaRDD.registerTempTable:(Ljava/lang/String;)V]
SchemaRDD.schema ( ) : catalyst.types.StructType
[mangled: org/apache/spark/sql/SchemaRDD.schema:()Lorg/apache/spark/sql/catalyst/types/StructType;]
SchemaRDD.schemaString ( ) : String
[mangled: org/apache/spark/sql/SchemaRDD.schemaString:()Ljava/lang/String;]
SchemaRDD.take ( int num ) : Object
[mangled: org/apache/spark/sql/SchemaRDD.take:(I)Ljava/lang/Object;]
SchemaRDD.take ( int num ) : catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/SchemaRDD.take:(I)[Lorg/apache/spark/sql/catalyst/expressions/Row;]
SchemaRDD.toJSON ( ) : org.apache.spark.rdd.RDD<String>
[mangled: org/apache/spark/sql/SchemaRDD.toJSON:()Lorg/apache/spark/rdd/RDD;]
SchemaRDD.unpersist ( boolean blocking ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.unpersist:(Z)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.unpersist ( boolean blocking ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.unpersist:(Z)Lorg/apache/spark/sql/SchemaRDD;]
spark-sql_2.10-1.2.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.abs ( catalyst.expressions.Expression e ) : catalyst.expressions.Abs
[mangled: org/apache/spark/sql/SQLContext.abs:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Abs;]
SQLContext.applySchema ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> rowRDD, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchema:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.approxCountDistinct ( catalyst.expressions.Expression e, double rsd ) : catalyst.expressions.ApproxCountDistinct
[mangled: org/apache/spark/sql/SQLContext.approxCountDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;D)Lorg/apache/spark/sql/catalyst/expressions/ApproxCountDistinct;]
SQLContext.autoBroadcastJoinThreshold ( ) : int
[mangled: org/apache/spark/sql/SQLContext.autoBroadcastJoinThreshold:()I]
SQLContext.avg ( catalyst.expressions.Expression e ) : catalyst.expressions.Average
[mangled: org/apache/spark/sql/SQLContext.avg:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Average;]
SQLContext.baseRelationToSchemaRDD ( sources.BaseRelation baseRelation ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.baseRelationToSchemaRDD:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.bigDecimalToLiteral ( scala.math.BigDecimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.bigDecimalToLiteral:(Lscala/math/BigDecimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.cacheQuery ( SchemaRDD query, scala.Option<String> tableName, org.apache.spark.storage.StorageLevel storageLevel ) : void
[mangled: org/apache/spark/sql/SQLContext.cacheQuery:(Lorg/apache/spark/sql/SchemaRDD;Lscala/Option;Lorg/apache/spark/storage/StorageLevel;)V]
SQLContext.clear ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clear:()V]
SQLContext.clearCache ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clearCache:()V]
SQLContext.codegenEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.codegenEnabled:()Z]
SQLContext.columnBatchSize ( ) : int
[mangled: org/apache/spark/sql/SQLContext.columnBatchSize:()I]
SQLContext.columnNameOfCorruptRecord ( ) : String
[mangled: org/apache/spark/sql/SQLContext.columnNameOfCorruptRecord:()Ljava/lang/String;]
SQLContext.count ( catalyst.expressions.Expression e ) : catalyst.expressions.Count
[mangled: org/apache/spark/sql/SQLContext.count:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Count;]
SQLContext.countDistinct ( scala.collection.Seq<catalyst.expressions.Expression> e ) : catalyst.expressions.CountDistinct
[mangled: org/apache/spark/sql/SQLContext.countDistinct:(Lscala/collection/Seq;)Lorg/apache/spark/sql/catalyst/expressions/CountDistinct;]
SQLContext.dateToLiteral ( java.sql.Date d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.dateToLiteral:(Ljava/sql/Date;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.ddlParser ( ) : sources.DDLParser
[mangled: org/apache/spark/sql/SQLContext.ddlParser:()Lorg/apache/spark/sql/sources/DDLParser;]
SQLContext.decimalToLiteral ( catalyst.types.decimal.Decimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.decimalToLiteral:(Lorg/apache/spark/sql/catalyst/types/decimal/Decimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.defaultSizeInBytes ( ) : long
[mangled: org/apache/spark/sql/SQLContext.defaultSizeInBytes:()J]
SQLContext.dialect ( ) : String
[mangled: org/apache/spark/sql/SQLContext.dialect:()Ljava/lang/String;]
SQLContext.dropTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.dropTempTable:(Ljava/lang/String;)V]
SQLContext.emptyResult ( ) : org.apache.spark.rdd.RDD<catalyst.expressions.Row>
[mangled: org/apache/spark/sql/SQLContext.emptyResult:()Lorg/apache/spark/rdd/RDD;]
SQLContext.externalSortEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.externalSortEnabled:()Z]
SQLContext.extraStrategies ( ) : scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>>
[mangled: org/apache/spark/sql/SQLContext.extraStrategies:()Lscala/collection/Seq;]
SQLContext.extraStrategies_.eq ( scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>> p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.extraStrategies_.eq:(Lscala/collection/Seq;)V]
SQLContext.first ( catalyst.expressions.Expression e ) : catalyst.expressions.First
[mangled: org/apache/spark/sql/SQLContext.first:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/First;]
SQLContext.functionRegistry ( ) : catalyst.analysis.FunctionRegistry
[mangled: org/apache/spark/sql/SQLContext.functionRegistry:()Lorg/apache/spark/sql/catalyst/analysis/FunctionRegistry;]
SQLContext.getAllConfs ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.getAllConfs:()Lscala/collection/immutable/Map;]
SQLContext.getConf ( String key ) : String
[mangled: org/apache/spark/sql/SQLContext.getConf:(Ljava/lang/String;)Ljava/lang/String;]
SQLContext.getConf ( String key, String defaultValue ) : String
[mangled: org/apache/spark/sql/SQLContext.getConf:(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;]
SQLContext.inMemoryPartitionPruning ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.inMemoryPartitionPruning:()Z]
SQLContext.invalidateCache ( catalyst.plans.logical.LogicalPlan plan ) : void
[mangled: org/apache/spark/sql/SQLContext.invalidateCache:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
SQLContext.isCached ( String tableName ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isCached:(Ljava/lang/String;)Z]
SQLContext.isParquetBinaryAsString ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isParquetBinaryAsString:()Z]
SQLContext.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isTraceEnabled:()Z]
SQLContext.jsonFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.last ( catalyst.expressions.Expression e ) : catalyst.expressions.Last
[mangled: org/apache/spark/sql/SQLContext.last:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Last;]
SQLContext.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.log:()Lorg/slf4j/Logger;]
SQLContext.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logDebug:(Lscala/Function0;)V]
SQLContext.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logError:(Lscala/Function0;)V]
SQLContext.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logInfo:(Lscala/Function0;)V]
SQLContext.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logName ( ) : String
[mangled: org/apache/spark/sql/SQLContext.logName:()Ljava/lang/String;]
SQLContext.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logTrace:(Lscala/Function0;)V]
SQLContext.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logWarning:(Lscala/Function0;)V]
SQLContext.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.lookupCachedData ( catalyst.plans.logical.LogicalPlan plan ) : scala.Option<CachedData>
[mangled: org/apache/spark/sql/SQLContext.lookupCachedData:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lscala/Option;]
SQLContext.lookupCachedData ( SchemaRDD query ) : scala.Option<CachedData>
[mangled: org/apache/spark/sql/SQLContext.lookupCachedData:(Lorg/apache/spark/sql/SchemaRDD;)Lscala/Option;]
SQLContext.lower ( catalyst.expressions.Expression e ) : catalyst.expressions.Lower
[mangled: org/apache/spark/sql/SQLContext.lower:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Lower;]
SQLContext.max ( catalyst.expressions.Expression e ) : catalyst.expressions.Max
[mangled: org/apache/spark/sql/SQLContext.max:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Max;]
SQLContext.min ( catalyst.expressions.Expression e ) : catalyst.expressions.Min
[mangled: org/apache/spark/sql/SQLContext.min:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Min;]
SQLContext.numShufflePartitions ( ) : int
[mangled: org/apache/spark/sql/SQLContext.numShufflePartitions:()I]
SQLContext.optimizer ( ) : catalyst.optimizer.Optimizer
[mangled: org/apache/spark/sql/SQLContext.optimizer:()Lorg/apache/spark/sql/catalyst/optimizer/Optimizer;]
SQLContext.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
SQLContext.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
SQLContext.CacheManager..cachedData ( ) : scala.collection.mutable.ArrayBuffer<CachedData>
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager..cachedData:()Lscala/collection/mutable/ArrayBuffer;]
SQLContext.CacheManager..cacheLock ( ) : java.util.concurrent.locks.ReentrantReadWriteLock
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager..cacheLock:()Ljava/util/concurrent/locks/ReentrantReadWriteLock;]
SQLContext.CacheManager._setter_.CacheManager..cachedData_.eq ( scala.collection.mutable.ArrayBuffer p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager._setter_.org.apache.spark.sql.CacheManager..cachedData_.eq:(Lscala/collection/mutable/ArrayBuffer;)V]
SQLContext.CacheManager._setter_.CacheManager..cacheLock_.eq ( java.util.concurrent.locks.ReentrantReadWriteLock p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager._setter_.org.apache.spark.sql.CacheManager..cacheLock_.eq:(Ljava/util/concurrent/locks/ReentrantReadWriteLock;)V]
SQLContext.SQLConf._setter_.settings_.eq ( java.util.Map p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.SQLConf._setter_.settings_.eq:(Ljava/util/Map;)V]
SQLContext.parquetCompressionCodec ( ) : String
[mangled: org/apache/spark/sql/SQLContext.parquetCompressionCodec:()Ljava/lang/String;]
SQLContext.parquetFilterPushDown ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.parquetFilterPushDown:()Z]
SQLContext.parseDataType ( String dataTypeString ) : catalyst.types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/types/DataType;]
SQLContext.registerFunction ( String name, scala.Function10<?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function10;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function11<?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function11;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function12<?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function12;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function13<?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function13;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function14;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function15;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function16;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function17;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function18;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function19;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function1<?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function20;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function21;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function22;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function2<?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function2;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function3<?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function3;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function4<?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function4;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function5<?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function5;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function6<?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function6;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function7<?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function7;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function8<?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function8;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function9<?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function9;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType ) : void
[mangled: org/apache/spark/sql/SQLContext.registerPython:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Ljava/lang/String;)V]
SQLContext.setConf ( java.util.Properties props ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Ljava/util/Properties;)V]
SQLContext.setConf ( String key, String value ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Ljava/lang/String;Ljava/lang/String;)V]
SQLContext.settings ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.settings:()Ljava/util/Map;]
SQLContext.sqlParser ( ) : catalyst.SparkSQLParser
[mangled: org/apache/spark/sql/SQLContext.sqlParser:()Lorg/apache/spark/sql/catalyst/SparkSQLParser;]
SQLContext.sqrt ( catalyst.expressions.Expression e ) : catalyst.expressions.Sqrt
[mangled: org/apache/spark/sql/SQLContext.sqrt:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Sqrt;]
SQLContext.sum ( catalyst.expressions.Expression e ) : catalyst.expressions.Sum
[mangled: org/apache/spark/sql/SQLContext.sum:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Sum;]
SQLContext.sumDistinct ( catalyst.expressions.Expression e ) : catalyst.expressions.SumDistinct
[mangled: org/apache/spark/sql/SQLContext.sumDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/SumDistinct;]
SQLContext.tryUncacheQuery ( SchemaRDD query, boolean blocking ) : boolean
[mangled: org/apache/spark/sql/SQLContext.tryUncacheQuery:(Lorg/apache/spark/sql/SchemaRDD;Z)Z]
SQLContext.uncacheQuery ( SchemaRDD query, boolean blocking ) : void
[mangled: org/apache/spark/sql/SQLContext.uncacheQuery:(Lorg/apache/spark/sql/SchemaRDD;Z)V]
SQLContext.upper ( catalyst.expressions.Expression e ) : catalyst.expressions.Upper
[mangled: org/apache/spark/sql/SQLContext.upper:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Upper;]
SQLContext.useCachedData ( catalyst.plans.logical.LogicalPlan plan ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/SQLContext.useCachedData:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
SQLContext.useCompression ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.useCompression:()Z]
spark-sql_2.10-1.2.0.jar, TableScan.class
package org.apache.spark.sql.sources
TableScan.buildScan ( ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.catalyst.expressions.Row>
[mangled: org/apache/spark/sql/sources/TableScan.buildScan:()Lorg/apache/spark/rdd/RDD;]
TableScan.TableScan ( )
[mangled: org/apache/spark/sql/sources/TableScan."<init>":()V]
to the top
Problems with Data Types, High Severity (7)
spark-sql_2.10-1.2.0.jar
package org.apache.spark.sql
[+] SQLContext (4)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.Logging. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface CacheManager. | A client program may be interrupted by NoSuchMethodError exception. |
3 | Removed super-interface SQLConf. | A client program may be interrupted by NoSuchMethodError exception. |
4 | Removed super-interface UDFRegistration. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (39)
JavaSchemaRDD ( SQLContext, catalyst.plans.logical.LogicalPlan )1st parameter 'sqlContext' of this method has type 'SQLContext'.
sqlContext ( )Return value of this method has type 'SQLContext'.
JavaSQLContext ( SQLContext )1st parameter 'sqlContext' of this method has type 'SQLContext'.
sqlContext ( )Return value of this method has type 'SQLContext'.
SchemaRDD ( SQLContext, catalyst.plans.logical.LogicalPlan )1st parameter 'sqlContext' of this method has type 'SQLContext'.
sqlContext ( )Return value of this method has type 'SQLContext'.
analyzer ( )This method is from 'SQLContext' class.
binaryToLiteral ( byte[ ] )This method is from 'SQLContext' class.
booleanToLiteral ( boolean )This method is from 'SQLContext' class.
byteToLiteral ( byte )This method is from 'SQLContext' class.
cacheTable ( java.lang.String )This method is from 'SQLContext' class.
catalog ( )This method is from 'SQLContext' class.
createParquetFile ( java.lang.String, boolean, org.apache.hadoop.conf.Configuration, scala.reflect.api.TypeTags.TypeTag<A> )This method is from 'SQLContext' class.
createSchemaRDD ( org.apache.spark.rdd.RDD<A>, scala.reflect.api.TypeTags.TypeTag<A> )This method is from 'SQLContext' class.
doubleToLiteral ( double )This method is from 'SQLContext' class.
DslAttribute ( catalyst.expressions.AttributeReference )This method is from 'SQLContext' class.
DslExpression ( catalyst.expressions.Expression )This method is from 'SQLContext' class.
DslString ( java.lang.String )This method is from 'SQLContext' class.
DslSymbol ( scala.Symbol )This method is from 'SQLContext' class.
executePlan ( catalyst.plans.logical.LogicalPlan )This method is from 'SQLContext' class.
executeSql ( java.lang.String )This method is from 'SQLContext' class.
floatToLiteral ( float )This method is from 'SQLContext' class.
intToLiteral ( int )This method is from 'SQLContext' class.
logicalPlanToSparkQuery ( catalyst.plans.logical.LogicalPlan )This method is from 'SQLContext' class.
longToLiteral ( long )This method is from 'SQLContext' class.
parquetFile ( java.lang.String )This method is from 'SQLContext' class.
parseSql ( java.lang.String )This method is from 'SQLContext' class.
planner ( )This method is from 'SQLContext' class.
prepareForExecution ( )This method is from 'SQLContext' class.
registerRDDAsTable ( SchemaRDD, java.lang.String )This method is from 'SQLContext' class.
shortToLiteral ( short )This method is from 'SQLContext' class.
sparkContext ( )This method is from 'SQLContext' class.
sql ( java.lang.String )This method is from 'SQLContext' class.
SQLContext ( org.apache.spark.SparkContext )This constructor is from 'SQLContext' class.
stringToLiteral ( java.lang.String )This method is from 'SQLContext' class.
symbolToUnresolvedAttribute ( scala.Symbol )This method is from 'SQLContext' class.
table ( java.lang.String )This method is from 'SQLContext' class.
timestampToLiteral ( java.sql.Timestamp )This method is from 'SQLContext' class.
uncacheTable ( java.lang.String )This method is from 'SQLContext' class.
package org.apache.spark.sql.api.java
[+] JavaSQLContext (1)
| Change | Effect |
---|
1 | Removed super-interface UDFRegistration. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (9)
applySchema ( org.apache.spark.api.java.JavaRDD<?>, java.lang.Class<?> )This method is from 'JavaSQLContext' class.
createParquetFile ( java.lang.Class<?>, java.lang.String, boolean, org.apache.hadoop.conf.Configuration )This method is from 'JavaSQLContext' class.
getSchema ( java.lang.Class<?> )This method is from 'JavaSQLContext' class.
JavaSQLContext ( org.apache.spark.api.java.JavaSparkContext )This constructor is from 'JavaSQLContext' class.
JavaSQLContext ( org.apache.spark.sql.SQLContext )This constructor is from 'JavaSQLContext' class.
parquetFile ( java.lang.String )This method is from 'JavaSQLContext' class.
registerRDDAsTable ( JavaSchemaRDD, java.lang.String )This method is from 'JavaSQLContext' class.
sql ( java.lang.String )This method is from 'JavaSQLContext' class.
sqlContext ( )This method is from 'JavaSQLContext' class.
package org.apache.spark.sql.sources
[+] RelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This abstract method is from 'RelationProvider' interface.
[+] TableScan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
buildScan ( )This abstract method is from 'TableScan' abstract class.
TableScan ( )This constructor is from 'TableScan' abstract class.
to the top
Java ARchives (1)
spark-sql_2.10-1.2.0.jar
to the top
Generated on Sat Apr 11 01:08:11 2015 for spark-avro_2.10-0.2.0 by Java API Compliance Checker 1.4.1
A tool for checking backward compatibility of a Java library API