Binary compatibility report for the elasticsearch-spark_2.10-2.1.0.Beta3 library between 1.1.0 and 1.0.0 versions (relating to the portability of client application elasticsearch-spark_2.10-2.1.0.Beta3.jar)
Test Info
Library Name | elasticsearch-spark_2.10-2.1.0.Beta3 |
Version #1 | 1.1.0 |
Version #2 | 1.0.0 |
Java Version | 1.7.0_75 |
Test Results
Total Java ARchives | 2 |
---|
Total Methods / Classes | 778 / 1440 |
---|
Verdict | Incompatible (29.7%) |
Problem Summary
| Severity | Count |
---|
Added Methods | - | 7 |
---|
Removed Methods | High | 182 |
---|
Problems with Data Types | High | 9 |
---|
Medium | 0 |
Low | 0 |
Problems with Methods | High | 0 |
---|
Medium | 0 |
Low | 0 |
Added Methods (7)
spark-core_2.10-1.0.0.jar, RDD<T>.class
package org.apache.spark.rdd
RDD<T>.creationSiteInfo ( ) : org.apache.spark.util.Utils.CallSiteInfo
[mangled: org/apache/spark/rdd/RDD<T>.creationSiteInfo:()Lorg/apache/spark/util/Utils$CallSiteInfo;]
spark-core_2.10-1.0.0.jar, SparkConf.class
package org.apache.spark
SparkConf.SparkConf..settings ( ) : scala.collection.mutable.HashMap<String,String>
[mangled: org/apache/spark/SparkConf.org.apache.spark.SparkConf..settings:()Lscala/collection/mutable/HashMap;]
spark-core_2.10-1.0.0.jar, SparkContext.class
package org.apache.spark
SparkContext.clean ( F f ) : F
[mangled: org/apache/spark/SparkContext.clean:(Ljava/lang/Object;)Ljava/lang/Object;]
SparkContext.getCallSite ( ) : String
[mangled: org/apache/spark/SparkContext.getCallSite:()Ljava/lang/String;]
spark-core_2.10-1.0.0.jar, TaskContext.class
package org.apache.spark
TaskContext.executeOnCompleteCallbacks ( ) : void
[mangled: org/apache/spark/TaskContext.executeOnCompleteCallbacks:()V]
spark-sql_2.10-1.0.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.inferSchema ( org.apache.spark.rdd.RDD<scala.collection.immutable.Map<String,Object>> rdd ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.inferSchema:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.logger ( ) : com.typesafe.scalalogging.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.logger:()Lcom/typesafe/scalalogging/slf4j/Logger;]
to the top
Removed Methods (182)
spark-core_2.10-1.1.0.jar, JavaSparkContext.class
package org.apache.spark.api.java
JavaSparkContext.accumulable ( T initialValue, String name, org.apache.spark.AccumulableParam<T,R> param ) : org.apache.spark.Accumulable<T,R>
[mangled: org/apache/spark/api/java/JavaSparkContext.accumulable:(Ljava/lang/Object;Ljava/lang/String;Lorg/apache/spark/AccumulableParam;)Lorg/apache/spark/Accumulable;]
JavaSparkContext.accumulator ( double initialValue, String name ) : org.apache.spark.Accumulator<Double>
[mangled: org/apache/spark/api/java/JavaSparkContext.accumulator:(DLjava/lang/String;)Lorg/apache/spark/Accumulator;]
JavaSparkContext.accumulator ( int initialValue, String name ) : org.apache.spark.Accumulator<Integer>
[mangled: org/apache/spark/api/java/JavaSparkContext.accumulator:(ILjava/lang/String;)Lorg/apache/spark/Accumulator;]
JavaSparkContext.accumulator ( T initialValue, String name, org.apache.spark.AccumulatorParam<T> accumulatorParam ) : org.apache.spark.Accumulator<T>
[mangled: org/apache/spark/api/java/JavaSparkContext.accumulator:(Ljava/lang/Object;Ljava/lang/String;Lorg/apache/spark/AccumulatorParam;)Lorg/apache/spark/Accumulator;]
JavaSparkContext.doubleAccumulator ( double initialValue, String name ) : org.apache.spark.Accumulator<Double>
[mangled: org/apache/spark/api/java/JavaSparkContext.doubleAccumulator:(DLjava/lang/String;)Lorg/apache/spark/Accumulator;]
JavaSparkContext.emptyRDD ( ) : JavaRDD<T>
[mangled: org/apache/spark/api/java/JavaSparkContext.emptyRDD:()Lorg/apache/spark/api/java/JavaRDD;]
JavaSparkContext.intAccumulator ( int initialValue, String name ) : org.apache.spark.Accumulator<Integer>
[mangled: org/apache/spark/api/java/JavaSparkContext.intAccumulator:(ILjava/lang/String;)Lorg/apache/spark/Accumulator;]
JavaSparkContext.version ( ) : String
[mangled: org/apache/spark/api/java/JavaSparkContext.version:()Ljava/lang/String;]
spark-core_2.10-1.1.0.jar, RDD<T>.class
package org.apache.spark.rdd
RDD<T>.countApproxDistinct ( int p, int sp ) : long
[mangled: org/apache/spark/rdd/RDD<T>.countApproxDistinct:(II)J]
RDD<T>.creationSite ( ) : org.apache.spark.util.CallSite
[mangled: org/apache/spark/rdd/RDD<T>.creationSite:()Lorg/apache/spark/util/CallSite;]
RDD<T>.logName ( ) : String
[mangled: org/apache/spark/rdd/RDD<T>.logName:()Ljava/lang/String;]
RDD<T>.retag ( Class<T> cls ) : RDD<T>
[mangled: org/apache/spark/rdd/RDD<T>.retag:(Ljava/lang/Class;)Lorg/apache/spark/rdd/RDD;]
RDD<T>.retag ( scala.reflect.ClassTag<T> classTag ) : RDD<T>
[mangled: org/apache/spark/rdd/RDD<T>.retag:(Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
RDD<T>.sortBy ( scala.Function1<T,K> f, boolean ascending, int numPartitions, scala.math.Ordering<K> ord, scala.reflect.ClassTag<K> ctag ) : RDD<T>
[mangled: org/apache/spark/rdd/RDD<T>.sortBy:(Lscala/Function1;ZILscala/math/Ordering;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
spark-core_2.10-1.1.0.jar, SparkConf.class
package org.apache.spark
SparkConf.getenv ( String name ) : String
[mangled: org/apache/spark/SparkConf.getenv:(Ljava/lang/String;)Ljava/lang/String;]
SparkConf.isAkkaConf ( String p1 ) [static] : boolean
[mangled: org/apache/spark/SparkConf.isAkkaConf:(Ljava/lang/String;)Z]
SparkConf.isExecutorStartupConf ( String p1 ) [static] : boolean
[mangled: org/apache/spark/SparkConf.isExecutorStartupConf:(Ljava/lang/String;)Z]
SparkConf.isSparkPortConf ( String p1 ) [static] : boolean
[mangled: org/apache/spark/SparkConf.isSparkPortConf:(Ljava/lang/String;)Z]
SparkConf.logName ( ) : String
[mangled: org/apache/spark/SparkConf.logName:()Ljava/lang/String;]
SparkConf.settings ( ) : scala.collection.mutable.HashMap<String,String>
[mangled: org/apache/spark/SparkConf.settings:()Lscala/collection/mutable/HashMap;]
spark-core_2.10-1.1.0.jar, SparkContext.class
package org.apache.spark
SparkContext.accumulable ( T initialValue, String name, AccumulableParam<T,R> param ) : Accumulable<T,R>
[mangled: org/apache/spark/SparkContext.accumulable:(Ljava/lang/Object;Ljava/lang/String;Lorg/apache/spark/AccumulableParam;)Lorg/apache/spark/Accumulable;]
SparkContext.accumulator ( T initialValue, String name, AccumulatorParam<T> param ) : Accumulator<T>
[mangled: org/apache/spark/SparkContext.accumulator:(Ljava/lang/Object;Ljava/lang/String;Lorg/apache/spark/AccumulatorParam;)Lorg/apache/spark/Accumulator;]
SparkContext.clean ( F f, boolean checkSerializable ) : F
[mangled: org/apache/spark/SparkContext.clean:(Ljava/lang/Object;Z)Ljava/lang/Object;]
SparkContext.getCallSite ( ) : util.CallSite
[mangled: org/apache/spark/SparkContext.getCallSite:()Lorg/apache/spark/util/CallSite;]
SparkContext.logName ( ) : String
[mangled: org/apache/spark/SparkContext.logName:()Ljava/lang/String;]
spark-core_2.10-1.1.0.jar, TaskContext.class
package org.apache.spark
TaskContext.addTaskCompletionListener ( util.TaskCompletionListener listener ) : TaskContext
[mangled: org/apache/spark/TaskContext.addTaskCompletionListener:(Lorg/apache/spark/util/TaskCompletionListener;)Lorg/apache/spark/TaskContext;]
TaskContext.addTaskCompletionListener ( scala.Function1<TaskContext,scala.runtime.BoxedUnit> f ) : TaskContext
[mangled: org/apache/spark/TaskContext.addTaskCompletionListener:(Lscala/Function1;)Lorg/apache/spark/TaskContext;]
TaskContext.isCompleted ( ) : boolean
[mangled: org/apache/spark/TaskContext.isCompleted:()Z]
TaskContext.isInterrupted ( ) : boolean
[mangled: org/apache/spark/TaskContext.isInterrupted:()Z]
TaskContext.markInterrupted ( ) : void
[mangled: org/apache/spark/TaskContext.markInterrupted:()V]
TaskContext.markTaskCompleted ( ) : void
[mangled: org/apache/spark/TaskContext.markTaskCompleted:()V]
spark-sql_2.10-1.1.0.jar, ArrayType.class
package org.apache.spark.sql.api.java
ArrayType.getElementType ( ) : DataType
[mangled: org/apache/spark/sql/api/java/ArrayType.getElementType:()Lorg/apache/spark/sql/api/java/DataType;]
ArrayType.isContainsNull ( ) : boolean
[mangled: org/apache/spark/sql/api/java/ArrayType.isContainsNull:()Z]
spark-sql_2.10-1.1.0.jar, DataType.class
package org.apache.spark.sql.api.java
DataType.createArrayType ( DataType elementType ) [static] : ArrayType
[mangled: org/apache/spark/sql/api/java/DataType.createArrayType:(Lorg/apache/spark/sql/api/java/DataType;)Lorg/apache/spark/sql/api/java/ArrayType;]
DataType.createArrayType ( DataType elementType, boolean containsNull ) [static] : ArrayType
[mangled: org/apache/spark/sql/api/java/DataType.createArrayType:(Lorg/apache/spark/sql/api/java/DataType;Z)Lorg/apache/spark/sql/api/java/ArrayType;]
DataType.createMapType ( DataType keyType, DataType valueType ) [static] : MapType
[mangled: org/apache/spark/sql/api/java/DataType.createMapType:(Lorg/apache/spark/sql/api/java/DataType;Lorg/apache/spark/sql/api/java/DataType;)Lorg/apache/spark/sql/api/java/MapType;]
DataType.createMapType ( DataType keyType, DataType valueType, boolean valueContainsNull ) [static] : MapType
[mangled: org/apache/spark/sql/api/java/DataType.createMapType:(Lorg/apache/spark/sql/api/java/DataType;Lorg/apache/spark/sql/api/java/DataType;Z)Lorg/apache/spark/sql/api/java/MapType;]
DataType.createStructField ( String name, DataType dataType, boolean nullable ) [static] : StructField
[mangled: org/apache/spark/sql/api/java/DataType.createStructField:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/DataType;Z)Lorg/apache/spark/sql/api/java/StructField;]
DataType.createStructType ( java.util.List<StructField> fields ) [static] : StructType
[mangled: org/apache/spark/sql/api/java/DataType.createStructType:(Ljava/util/List;)Lorg/apache/spark/sql/api/java/StructType;]
DataType.createStructType ( StructField[ ] fields ) [static] : StructType
[mangled: org/apache/spark/sql/api/java/DataType.createStructType:([Lorg/apache/spark/sql/api/java/StructField;)Lorg/apache/spark/sql/api/java/StructType;]
DataType.DataType ( )
[mangled: org/apache/spark/sql/api/java/DataType."<init>":()V]
spark-sql_2.10-1.1.0.jar, JavaSchemaRDD.class
package org.apache.spark.sql.api.java
JavaSchemaRDD.baseLogicalPlan ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.baseLogicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan p1 ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
JavaSchemaRDD.partitions ( ) : java.util.List<org.apache.spark.Partition>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.partitions:()Ljava/util/List;]
JavaSchemaRDD.printSchema ( ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.printSchema:()V]
JavaSchemaRDD.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.registerTempTable:(Ljava/lang/String;)V]
JavaSchemaRDD.schema ( ) : StructType
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schema:()Lorg/apache/spark/sql/api/java/StructType;]
JavaSchemaRDD.schemaString ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schemaString:()Ljava/lang/String;]
spark-sql_2.10-1.1.0.jar, JavaSQLContext.class
package org.apache.spark.sql.api.java
JavaSQLContext.applySchema ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.applySchema:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonFile ( String path ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonFile ( String path, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.registerFunction ( String name, UDF10<?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF10;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF11<?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF11;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF12<?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF12;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF13<?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF13;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF14;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF15;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF16;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF17;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF18;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF19;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF1<?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF1;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF20;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF21;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF22;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF2<?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF2;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF3<?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF3;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF4<?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF4;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF5<?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF5;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF6<?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF6;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF7<?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF7;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF8<?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF8;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF9<?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF9;Lorg/apache/spark/sql/api/java/DataType;)V]
spark-sql_2.10-1.1.0.jar, MapType.class
package org.apache.spark.sql.api.java
MapType.getKeyType ( ) : DataType
[mangled: org/apache/spark/sql/api/java/MapType.getKeyType:()Lorg/apache/spark/sql/api/java/DataType;]
MapType.getValueType ( ) : DataType
[mangled: org/apache/spark/sql/api/java/MapType.getValueType:()Lorg/apache/spark/sql/api/java/DataType;]
MapType.isValueContainsNull ( ) : boolean
[mangled: org/apache/spark/sql/api/java/MapType.isValueContainsNull:()Z]
spark-sql_2.10-1.1.0.jar, Row.class
package org.apache.spark.sql.api.java
Row.canEqual ( Object other ) : boolean
[mangled: org/apache/spark/sql/api/java/Row.canEqual:(Ljava/lang/Object;)Z]
Row.create ( Object... p1 ) [static] : Row
[mangled: org/apache/spark/sql/api/java/Row.create:([Ljava/lang/Object;)Lorg/apache/spark/sql/api/java/Row;]
Row.create ( scala.collection.Seq<Object> p1 ) [static] : Row
[mangled: org/apache/spark/sql/api/java/Row.create:(Lscala/collection/Seq;)Lorg/apache/spark/sql/api/java/Row;]
Row.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/api/java/Row.equals:(Ljava/lang/Object;)Z]
Row.hashCode ( ) : int
[mangled: org/apache/spark/sql/api/java/Row.hashCode:()I]
spark-sql_2.10-1.1.0.jar, SchemaRDD.class
package org.apache.spark.sql
SchemaRDD.baseLogicalPlan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/SchemaRDD.baseLogicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
SchemaRDD.collect ( ) : Object
[mangled: org/apache/spark/sql/SchemaRDD.collect:()Ljava/lang/Object;]
SchemaRDD.collect ( ) : catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/SchemaRDD.collect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
SchemaRDD.except ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.except:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.intersect ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.intersect:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.limit ( int limitNum ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.limit:(I)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.SchemaRDDLike._setter_.logicalPlan_.eq ( catalyst.plans.logical.LogicalPlan p1 ) : void
[mangled: org/apache/spark/sql/SchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
SchemaRDD.printSchema ( ) : void
[mangled: org/apache/spark/sql/SchemaRDD.printSchema:()V]
SchemaRDD.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SchemaRDD.registerTempTable:(Ljava/lang/String;)V]
SchemaRDD.schema ( ) : catalyst.types.StructType
[mangled: org/apache/spark/sql/SchemaRDD.schema:()Lorg/apache/spark/sql/catalyst/types/StructType;]
SchemaRDD.schemaString ( ) : String
[mangled: org/apache/spark/sql/SchemaRDD.schemaString:()Ljava/lang/String;]
SchemaRDD.take ( int num ) : Object
[mangled: org/apache/spark/sql/SchemaRDD.take:(I)Ljava/lang/Object;]
SchemaRDD.take ( int num ) : catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/SchemaRDD.take:(I)[Lorg/apache/spark/sql/catalyst/expressions/Row;]
spark-sql_2.10-1.1.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.applySchema ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> rowRDD, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchema:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.approxCountDistinct ( catalyst.expressions.Expression e, double rsd ) : catalyst.expressions.ApproxCountDistinct
[mangled: org/apache/spark/sql/SQLContext.approxCountDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;D)Lorg/apache/spark/sql/catalyst/expressions/ApproxCountDistinct;]
SQLContext.autoBroadcastJoinThreshold ( ) : int
[mangled: org/apache/spark/sql/SQLContext.autoBroadcastJoinThreshold:()I]
SQLContext.avg ( catalyst.expressions.Expression e ) : catalyst.expressions.Average
[mangled: org/apache/spark/sql/SQLContext.avg:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Average;]
SQLContext.clear ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clear:()V]
SQLContext.codegenEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.codegenEnabled:()Z]
SQLContext.columnBatchSize ( ) : int
[mangled: org/apache/spark/sql/SQLContext.columnBatchSize:()I]
SQLContext.count ( catalyst.expressions.Expression e ) : catalyst.expressions.Count
[mangled: org/apache/spark/sql/SQLContext.count:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Count;]
SQLContext.countDistinct ( scala.collection.Seq<catalyst.expressions.Expression> e ) : catalyst.expressions.CountDistinct
[mangled: org/apache/spark/sql/SQLContext.countDistinct:(Lscala/collection/Seq;)Lorg/apache/spark/sql/catalyst/expressions/CountDistinct;]
SQLContext.defaultSizeInBytes ( ) : long
[mangled: org/apache/spark/sql/SQLContext.defaultSizeInBytes:()J]
SQLContext.dialect ( ) : String
[mangled: org/apache/spark/sql/SQLContext.dialect:()Ljava/lang/String;]
SQLContext.emptyResult ( ) : org.apache.spark.rdd.RDD<catalyst.expressions.Row>
[mangled: org/apache/spark/sql/SQLContext.emptyResult:()Lorg/apache/spark/rdd/RDD;]
SQLContext.first ( catalyst.expressions.Expression e ) : catalyst.expressions.First
[mangled: org/apache/spark/sql/SQLContext.first:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/First;]
SQLContext.functionRegistry ( ) : catalyst.analysis.FunctionRegistry
[mangled: org/apache/spark/sql/SQLContext.functionRegistry:()Lorg/apache/spark/sql/catalyst/analysis/FunctionRegistry;]
SQLContext.getAllConfs ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.getAllConfs:()Lscala/collection/immutable/Map;]
SQLContext.getConf ( String key ) : String
[mangled: org/apache/spark/sql/SQLContext.getConf:(Ljava/lang/String;)Ljava/lang/String;]
SQLContext.getConf ( String key, String defaultValue ) : String
[mangled: org/apache/spark/sql/SQLContext.getConf:(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;]
SQLContext.isCached ( String tableName ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isCached:(Ljava/lang/String;)Z]
SQLContext.isParquetBinaryAsString ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isParquetBinaryAsString:()Z]
SQLContext.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isTraceEnabled:()Z]
SQLContext.jsonFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.log:()Lorg/slf4j/Logger;]
SQLContext.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logDebug:(Lscala/Function0;)V]
SQLContext.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logError:(Lscala/Function0;)V]
SQLContext.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logInfo:(Lscala/Function0;)V]
SQLContext.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logName ( ) : String
[mangled: org/apache/spark/sql/SQLContext.logName:()Ljava/lang/String;]
SQLContext.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logTrace:(Lscala/Function0;)V]
SQLContext.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logWarning:(Lscala/Function0;)V]
SQLContext.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.lower ( catalyst.expressions.Expression e ) : catalyst.expressions.Lower
[mangled: org/apache/spark/sql/SQLContext.lower:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Lower;]
SQLContext.max ( catalyst.expressions.Expression e ) : catalyst.expressions.Max
[mangled: org/apache/spark/sql/SQLContext.max:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Max;]
SQLContext.min ( catalyst.expressions.Expression e ) : catalyst.expressions.Min
[mangled: org/apache/spark/sql/SQLContext.min:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Min;]
SQLContext.numShufflePartitions ( ) : int
[mangled: org/apache/spark/sql/SQLContext.numShufflePartitions:()I]
SQLContext.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
SQLContext.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
SQLContext.SQLConf._setter_.settings_.eq ( java.util.Map p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.SQLConf._setter_.settings_.eq:(Ljava/util/Map;)V]
SQLContext.parquetCompressionCodec ( ) : String
[mangled: org/apache/spark/sql/SQLContext.parquetCompressionCodec:()Ljava/lang/String;]
SQLContext.parseDataType ( String dataTypeString ) : catalyst.types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/types/DataType;]
SQLContext.registerFunction ( String name, scala.Function10<?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function10;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function11<?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function11;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function12<?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function12;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function13<?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function13;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function14;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function15;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function16;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function17;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function18;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function19;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function1<?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function20;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function21;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function22;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function2<?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function2;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function3<?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function3;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function4<?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function4;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function5<?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function5;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function6<?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function6;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function7<?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function7;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function8<?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function8;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function9<?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function9;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType ) : void
[mangled: org/apache/spark/sql/SQLContext.registerPython:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Lorg/apache/spark/Accumulator;Ljava/lang/String;)V]
SQLContext.setConf ( java.util.Properties props ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Ljava/util/Properties;)V]
SQLContext.setConf ( String key, String value ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Ljava/lang/String;Ljava/lang/String;)V]
SQLContext.settings ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.settings:()Ljava/util/Map;]
SQLContext.sum ( catalyst.expressions.Expression e ) : catalyst.expressions.Sum
[mangled: org/apache/spark/sql/SQLContext.sum:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Sum;]
SQLContext.sumDistinct ( catalyst.expressions.Expression e ) : catalyst.expressions.SumDistinct
[mangled: org/apache/spark/sql/SQLContext.sumDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/SumDistinct;]
SQLContext.upper ( catalyst.expressions.Expression e ) : catalyst.expressions.Upper
[mangled: org/apache/spark/sql/SQLContext.upper:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Upper;]
SQLContext.useCompression ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.useCompression:()Z]
spark-sql_2.10-1.1.0.jar, StructField.class
package org.apache.spark.sql.api.java
StructField.equals ( Object o ) : boolean
[mangled: org/apache/spark/sql/api/java/StructField.equals:(Ljava/lang/Object;)Z]
StructField.getDataType ( ) : DataType
[mangled: org/apache/spark/sql/api/java/StructField.getDataType:()Lorg/apache/spark/sql/api/java/DataType;]
StructField.getName ( ) : String
[mangled: org/apache/spark/sql/api/java/StructField.getName:()Ljava/lang/String;]
StructField.hashCode ( ) : int
[mangled: org/apache/spark/sql/api/java/StructField.hashCode:()I]
StructField.isNullable ( ) : boolean
[mangled: org/apache/spark/sql/api/java/StructField.isNullable:()Z]
StructField.StructField ( String name, DataType dataType, boolean nullable )
[mangled: org/apache/spark/sql/api/java/StructField."<init>":(Ljava/lang/String;Lorg/apache/spark/sql/api/java/DataType;Z)V]
spark-sql_2.10-1.1.0.jar, StructType.class
package org.apache.spark.sql.api.java
StructType.getFields ( ) : StructField[ ]
[mangled: org/apache/spark/sql/api/java/StructType.getFields:()[Lorg/apache/spark/sql/api/java/StructField;]
to the top
Problems with Data Types, High Severity (9)
spark-sql_2.10-1.1.0.jar
package org.apache.spark.sql
[+] SQLContext (3)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.Logging. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface SQLConf. | A client program may be interrupted by NoSuchMethodError exception. |
3 | Removed super-interface UDFRegistration. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (42)
JavaSchemaRDD ( SQLContext, catalyst.plans.logical.LogicalPlan )1st parameter 'sqlContext' of this method has type 'SQLContext'.
sqlContext ( )Return value of this method has type 'SQLContext'.
JavaSQLContext ( SQLContext )1st parameter 'sqlContext' of this method has type 'SQLContext'.
sqlContext ( )Return value of this method has type 'SQLContext'.
SchemaRDD ( SQLContext, catalyst.plans.logical.LogicalPlan )1st parameter 'sqlContext' of this method has type 'SQLContext'.
sqlContext ( )Return value of this method has type 'SQLContext'.
analyzer ( )This method is from 'SQLContext' class.
binaryToLiteral ( byte[ ] )This method is from 'SQLContext' class.
booleanToLiteral ( boolean )This method is from 'SQLContext' class.
byteToLiteral ( byte )This method is from 'SQLContext' class.
cacheTable ( java.lang.String )This method is from 'SQLContext' class.
catalog ( )This method is from 'SQLContext' class.
createParquetFile ( java.lang.String, boolean, org.apache.hadoop.conf.Configuration, scala.reflect.api.TypeTags.TypeTag<A> )This method is from 'SQLContext' class.
createSchemaRDD ( org.apache.spark.rdd.RDD<A>, scala.reflect.api.TypeTags.TypeTag<A> )This method is from 'SQLContext' class.
decimalToLiteral ( scala.math.BigDecimal )This method is from 'SQLContext' class.
doubleToLiteral ( double )This method is from 'SQLContext' class.
DslAttribute ( catalyst.expressions.AttributeReference )This method is from 'SQLContext' class.
DslExpression ( catalyst.expressions.Expression )This method is from 'SQLContext' class.
DslString ( java.lang.String )This method is from 'SQLContext' class.
DslSymbol ( scala.Symbol )This method is from 'SQLContext' class.
executePlan ( catalyst.plans.logical.LogicalPlan )This method is from 'SQLContext' class.
executeSql ( java.lang.String )This method is from 'SQLContext' class.
floatToLiteral ( float )This method is from 'SQLContext' class.
intToLiteral ( int )This method is from 'SQLContext' class.
logicalPlanToSparkQuery ( catalyst.plans.logical.LogicalPlan )This method is from 'SQLContext' class.
longToLiteral ( long )This method is from 'SQLContext' class.
optimizer ( )This method is from 'SQLContext' class.
parquetFile ( java.lang.String )This method is from 'SQLContext' class.
parser ( )This method is from 'SQLContext' class.
parseSql ( java.lang.String )This method is from 'SQLContext' class.
planner ( )This method is from 'SQLContext' class.
prepareForExecution ( )This method is from 'SQLContext' class.
registerRDDAsTable ( SchemaRDD, java.lang.String )This method is from 'SQLContext' class.
shortToLiteral ( short )This method is from 'SQLContext' class.
sparkContext ( )This method is from 'SQLContext' class.
sql ( java.lang.String )This method is from 'SQLContext' class.
SQLContext ( org.apache.spark.SparkContext )This constructor is from 'SQLContext' class.
stringToLiteral ( java.lang.String )This method is from 'SQLContext' class.
symbolToUnresolvedAttribute ( scala.Symbol )This method is from 'SQLContext' class.
table ( java.lang.String )This method is from 'SQLContext' class.
timestampToLiteral ( java.sql.Timestamp )This method is from 'SQLContext' class.
uncacheTable ( java.lang.String )This method is from 'SQLContext' class.
package org.apache.spark.sql.api.java
[+] ArrayType (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
getElementType ( )This method is from 'ArrayType' class.
isContainsNull ( )This method is from 'ArrayType' class.
[+] DataType (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (8)
createArrayType ( DataType )This method is from 'DataType' abstract class.
createArrayType ( DataType, boolean )This method is from 'DataType' abstract class.
createMapType ( DataType, DataType )This method is from 'DataType' abstract class.
createMapType ( DataType, DataType, boolean )This method is from 'DataType' abstract class.
createStructField ( java.lang.String, DataType, boolean )This method is from 'DataType' abstract class.
createStructType ( java.util.List<StructField> )This method is from 'DataType' abstract class.
createStructType ( StructField[ ] )This method is from 'DataType' abstract class.
DataType ( )This constructor is from 'DataType' abstract class.
[+] JavaSQLContext (1)
| Change | Effect |
---|
1 | Removed super-interface UDFRegistration. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (9)
applySchema ( org.apache.spark.api.java.JavaRDD<?>, java.lang.Class<?> )This method is from 'JavaSQLContext' class.
createParquetFile ( java.lang.Class<?>, java.lang.String, boolean, org.apache.hadoop.conf.Configuration )This method is from 'JavaSQLContext' class.
getSchema ( java.lang.Class<?> )This method is from 'JavaSQLContext' class.
JavaSQLContext ( org.apache.spark.api.java.JavaSparkContext )This constructor is from 'JavaSQLContext' class.
JavaSQLContext ( org.apache.spark.sql.SQLContext )This constructor is from 'JavaSQLContext' class.
parquetFile ( java.lang.String )This method is from 'JavaSQLContext' class.
registerRDDAsTable ( JavaSchemaRDD, java.lang.String )This method is from 'JavaSQLContext' class.
sql ( java.lang.String )This method is from 'JavaSQLContext' class.
sqlContext ( )This method is from 'JavaSQLContext' class.
[+] MapType (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (3)
getKeyType ( )This method is from 'MapType' class.
getValueType ( )This method is from 'MapType' class.
isValueContainsNull ( )This method is from 'MapType' class.
[+] StructField (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (6)
equals ( java.lang.Object )This method is from 'StructField' class.
getDataType ( )This method is from 'StructField' class.
getName ( )This method is from 'StructField' class.
hashCode ( )This method is from 'StructField' class.
isNullable ( )This method is from 'StructField' class.
StructField ( java.lang.String, DataType, boolean )This constructor is from 'StructField' class.
[+] StructType (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
getFields ( )This method is from 'StructType' class.
to the top
Java ARchives (2)
spark-core_2.10-1.1.0.jar
spark-sql_2.10-1.1.0.jar
to the top
Generated on Mon Jun 29 20:38:25 2015 for elasticsearch-spark_2.10-2.1.0.Beta3 by Java API Compliance Checker 1.4.1
A tool for checking backward compatibility of a Java library API