Binary compatibility report for the spark-avro_2.10-0.2.0 library between 1.2.0 and 1.3.0 versions (relating to the portability of client application spark-avro_2.10-0.2.0.jar)
Test Info
Library Name | spark-avro_2.10-0.2.0 |
Version #1 | 1.2.0 |
Version #2 | 1.3.0 |
Java Version | 1.7.0_75 |
Test Results
Total Java ARchives | 1 |
---|
Total Methods / Classes | 417 / 405 |
---|
Verdict | Incompatible (87.1%) |
Problem Summary
| Severity | Count |
---|
Added Methods | - | 54 |
---|
Removed Methods | High | 323 |
---|
Problems with Data Types | High | 9 |
---|
Medium | 1 |
Low | 0 |
Problems with Methods | High | 0 |
---|
Medium | 0 |
Low | 0 |
Added Methods (54)
spark-sql_2.10-1.3.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.baseRelationToDataFrame ( sources.BaseRelation baseRelation ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.baseRelationToDataFrame:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.cacheManager ( ) : CacheManager
[mangled: org/apache/spark/sql/SQLContext.cacheManager:()Lorg/apache/spark/sql/CacheManager;]
SQLContext.checkAnalysis ( ) : catalyst.analysis.CheckAnalysis
[mangled: org/apache/spark/sql/SQLContext.checkAnalysis:()Lorg/apache/spark/sql/catalyst/analysis/CheckAnalysis;]
SQLContext.conf ( ) : SQLConf
[mangled: org/apache/spark/sql/SQLContext.conf:()Lorg/apache/spark/sql/SQLConf;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<?> rdd, Class<?> beanClass ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/lang/Class;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, java.util.List<String> columns ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/util/List;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<?> rdd, Class<?> beanClass ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Ljava/lang/Class;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<A> rdd, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<Row> rowRDD, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( scala.collection.Seq<A> data, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lscala/collection/Seq;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String path, String source ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, types.StructType schema, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, types.StructType schema, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.emptyDataFrame ( ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.emptyDataFrame:()Lorg/apache/spark/sql/DataFrame;]
SQLContext.experimental ( ) : ExperimentalMethods
[mangled: org/apache/spark/sql/SQLContext.experimental:()Lorg/apache/spark/sql/ExperimentalMethods;]
SQLContext.getSchema ( Class<?> beanClass ) : scala.collection.Seq<catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/SQLContext.getSchema:(Ljava/lang/Class;)Lscala/collection/Seq;]
SQLContext.implicits ( ) : SQLContext.implicits.
[mangled: org/apache/spark/sql/SQLContext.implicits:()Lorg/apache/spark/sql/SQLContext$implicits$;]
SQLContext.jdbc ( String url, String table ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jdbc ( String url, String table, String columnName, long lowerBound, long upperBound, int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;JJI)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jdbc ( String url, String table, String[ ] theParts ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String path, String source ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, types.StructType schema, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, types.StructType schema, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parquetFile ( scala.collection.Seq<String> paths ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.parquetFile:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parquetFile ( String... paths ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.parquetFile:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parseDataType ( String dataTypeString ) : types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/types/DataType;]
SQLContext.registerDataFrameAsTable ( DataFrame df, String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.registerDataFrameAsTable:(Lorg/apache/spark/sql/DataFrame;Ljava/lang/String;)V]
SQLContext.sql ( String sqlText ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.SQLContext ( org.apache.spark.api.java.JavaSparkContext sparkContext )
[mangled: org/apache/spark/sql/SQLContext."<init>":(Lorg/apache/spark/api/java/JavaSparkContext;)V]
SQLContext.sqlParser ( ) : SparkSQLParser
[mangled: org/apache/spark/sql/SQLContext.sqlParser:()Lorg/apache/spark/sql/SparkSQLParser;]
SQLContext.table ( String tableName ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.table:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.tableNames ( ) : String[ ]
[mangled: org/apache/spark/sql/SQLContext.tableNames:()[Ljava/lang/String;]
SQLContext.tableNames ( String databaseName ) : String[ ]
[mangled: org/apache/spark/sql/SQLContext.tableNames:(Ljava/lang/String;)[Ljava/lang/String;]
SQLContext.tables ( ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.tables:()Lorg/apache/spark/sql/DataFrame;]
SQLContext.tables ( String databaseName ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.tables:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.udf ( ) : UDFRegistration
[mangled: org/apache/spark/sql/SQLContext.udf:()Lorg/apache/spark/sql/UDFRegistration;]
to the top
Removed Methods (323)
spark-sql_2.10-1.2.0.jar, JavaSchemaRDD.class
package org.apache.spark.sql.api.java
JavaSchemaRDD.aggregate ( U zeroValue, org.apache.spark.api.java.function.Function2<U,Row,U> seqOp, org.apache.spark.api.java.function.Function2<U,U,U> combOp ) : U
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.aggregate:(Ljava/lang/Object;Lorg/apache/spark/api/java/function/Function2;Lorg/apache/spark/api/java/function/Function2;)Ljava/lang/Object;]
JavaSchemaRDD.baseLogicalPlan ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.baseLogicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
JavaSchemaRDD.baseSchemaRDD ( ) : org.apache.spark.sql.SchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.baseSchemaRDD:()Lorg/apache/spark/sql/SchemaRDD;]
JavaSchemaRDD.cache ( ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.cache:()Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.cartesian ( org.apache.spark.api.java.JavaRDDLike<U,?> other ) : org.apache.spark.api.java.JavaPairRDD<Row,U>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.cartesian:(Lorg/apache/spark/api/java/JavaRDDLike;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.checkpoint ( ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.checkpoint:()V]
JavaSchemaRDD.classTag ( ) : scala.reflect.ClassTag<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.classTag:()Lscala/reflect/ClassTag;]
JavaSchemaRDD.coalesce ( int numPartitions, boolean shuffle ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.coalesce:(IZ)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.collect ( ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.collect:()Ljava/util/List;]
JavaSchemaRDD.collectAsync ( ) : org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.collectAsync:()Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.collectPartitions ( int[ ] partitionIds ) : java.util.List<Row>[ ]
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.collectPartitions:([I)[Ljava/util/List;]
JavaSchemaRDD.context ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.context:()Lorg/apache/spark/SparkContext;]
JavaSchemaRDD.count ( ) : long
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.count:()J]
JavaSchemaRDD.countApprox ( long timeout ) : org.apache.spark.partial.PartialResult<org.apache.spark.partial.BoundedDouble>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countApprox:(J)Lorg/apache/spark/partial/PartialResult;]
JavaSchemaRDD.countApprox ( long timeout, double confidence ) : org.apache.spark.partial.PartialResult<org.apache.spark.partial.BoundedDouble>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countApprox:(JD)Lorg/apache/spark/partial/PartialResult;]
JavaSchemaRDD.countApproxDistinct ( double relativeSD ) : long
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countApproxDistinct:(D)J]
JavaSchemaRDD.countAsync ( ) : org.apache.spark.api.java.JavaFutureAction<Long>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countAsync:()Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.countByValue ( ) : java.util.Map<Row,Long>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countByValue:()Ljava/util/Map;]
JavaSchemaRDD.countByValueApprox ( long timeout ) : org.apache.spark.partial.PartialResult<java.util.Map<Row,org.apache.spark.partial.BoundedDouble>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countByValueApprox:(J)Lorg/apache/spark/partial/PartialResult;]
JavaSchemaRDD.countByValueApprox ( long timeout, double confidence ) : org.apache.spark.partial.PartialResult<java.util.Map<Row,org.apache.spark.partial.BoundedDouble>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.countByValueApprox:(JD)Lorg/apache/spark/partial/PartialResult;]
JavaSchemaRDD.distinct ( ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.distinct:()Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.distinct ( int numPartitions ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.distinct:(I)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.filter ( org.apache.spark.api.java.function.Function<Row,Boolean> f ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.filter:(Lorg/apache/spark/api/java/function/Function;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.first ( ) : Object
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.first:()Ljava/lang/Object;]
JavaSchemaRDD.flatMap ( org.apache.spark.api.java.function.FlatMapFunction<Row,U> f ) : org.apache.spark.api.java.JavaRDD<U>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.flatMap:(Lorg/apache/spark/api/java/function/FlatMapFunction;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.flatMapToDouble ( org.apache.spark.api.java.function.DoubleFlatMapFunction<Row> f ) : org.apache.spark.api.java.JavaDoubleRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.flatMapToDouble:(Lorg/apache/spark/api/java/function/DoubleFlatMapFunction;)Lorg/apache/spark/api/java/JavaDoubleRDD;]
JavaSchemaRDD.flatMapToPair ( org.apache.spark.api.java.function.PairFlatMapFunction<Row,K2,V2> f ) : org.apache.spark.api.java.JavaPairRDD<K2,V2>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.flatMapToPair:(Lorg/apache/spark/api/java/function/PairFlatMapFunction;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.fold ( Row zeroValue, org.apache.spark.api.java.function.Function2<Row,Row,Row> f ) : Row
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.fold:(Ljava/lang/Object;Lorg/apache/spark/api/java/function/Function2;)Ljava/lang/Object;]
JavaSchemaRDD.foreach ( org.apache.spark.api.java.function.VoidFunction<Row> f ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.foreach:(Lorg/apache/spark/api/java/function/VoidFunction;)V]
JavaSchemaRDD.foreachAsync ( org.apache.spark.api.java.function.VoidFunction<Row> f ) : org.apache.spark.api.java.JavaFutureAction<Void>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.foreachAsync:(Lorg/apache/spark/api/java/function/VoidFunction;)Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.foreachPartition ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> f ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.foreachPartition:(Lorg/apache/spark/api/java/function/VoidFunction;)V]
JavaSchemaRDD.foreachPartitionAsync ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> f ) : org.apache.spark.api.java.JavaFutureAction<Void>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.foreachPartitionAsync:(Lorg/apache/spark/api/java/function/VoidFunction;)Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.getCheckpointFile ( ) : com.google.common.base.Optional<String>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.getCheckpointFile:()Lcom/google/common/base/Optional;]
JavaSchemaRDD.getStorageLevel ( ) : org.apache.spark.storage.StorageLevel
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.getStorageLevel:()Lorg/apache/spark/storage/StorageLevel;]
JavaSchemaRDD.glom ( ) : org.apache.spark.api.java.JavaRDD<java.util.List<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.glom:()Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.groupBy ( org.apache.spark.api.java.function.Function<Row,U> f ) : org.apache.spark.api.java.JavaPairRDD<U,Iterable<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.groupBy:(Lorg/apache/spark/api/java/function/Function;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.groupBy ( org.apache.spark.api.java.function.Function<Row,U> f, int numPartitions ) : org.apache.spark.api.java.JavaPairRDD<U,Iterable<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.groupBy:(Lorg/apache/spark/api/java/function/Function;I)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.id ( ) : int
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.id:()I]
JavaSchemaRDD.insertInto ( String tableName ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.insertInto:(Ljava/lang/String;)V]
JavaSchemaRDD.insertInto ( String tableName, boolean overwrite ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.insertInto:(Ljava/lang/String;Z)V]
JavaSchemaRDD.intersection ( JavaSchemaRDD other ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.intersection:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.intersection ( JavaSchemaRDD other, int numPartitions ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.intersection:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;I)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.intersection ( JavaSchemaRDD other, org.apache.spark.Partitioner partitioner ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.intersection:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;Lorg/apache/spark/Partitioner;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.isCheckpointed ( ) : boolean
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.isCheckpointed:()Z]
JavaSchemaRDD.iterator ( org.apache.spark.Partition split, org.apache.spark.TaskContext taskContext ) : java.util.Iterator<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.iterator:(Lorg/apache/spark/Partition;Lorg/apache/spark/TaskContext;)Ljava/util/Iterator;]
JavaSchemaRDD.JavaSchemaRDD ( org.apache.spark.sql.SQLContext sqlContext, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan baseLogicalPlan )
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD."<init>":(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
JavaSchemaRDD.keyBy ( org.apache.spark.api.java.function.Function<Row,U> f ) : org.apache.spark.api.java.JavaPairRDD<U,Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.keyBy:(Lorg/apache/spark/api/java/function/Function;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.logicalPlan ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.logicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
JavaSchemaRDD.map ( org.apache.spark.api.java.function.Function<Row,R> f ) : org.apache.spark.api.java.JavaRDD<R>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.map:(Lorg/apache/spark/api/java/function/Function;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.mapPartitions ( org.apache.spark.api.java.function.FlatMapFunction<java.util.Iterator<Row>,U> f ) : org.apache.spark.api.java.JavaRDD<U>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitions:(Lorg/apache/spark/api/java/function/FlatMapFunction;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.mapPartitions ( org.apache.spark.api.java.function.FlatMapFunction<java.util.Iterator<Row>,U> f, boolean preservesPartitioning ) : org.apache.spark.api.java.JavaRDD<U>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitions:(Lorg/apache/spark/api/java/function/FlatMapFunction;Z)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.mapPartitionsToDouble ( org.apache.spark.api.java.function.DoubleFlatMapFunction<java.util.Iterator<Row>> f ) : org.apache.spark.api.java.JavaDoubleRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitionsToDouble:(Lorg/apache/spark/api/java/function/DoubleFlatMapFunction;)Lorg/apache/spark/api/java/JavaDoubleRDD;]
JavaSchemaRDD.mapPartitionsToDouble ( org.apache.spark.api.java.function.DoubleFlatMapFunction<java.util.Iterator<Row>> f, boolean preservesPartitioning ) : org.apache.spark.api.java.JavaDoubleRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitionsToDouble:(Lorg/apache/spark/api/java/function/DoubleFlatMapFunction;Z)Lorg/apache/spark/api/java/JavaDoubleRDD;]
JavaSchemaRDD.mapPartitionsToPair ( org.apache.spark.api.java.function.PairFlatMapFunction<java.util.Iterator<Row>,K2,V2> f ) : org.apache.spark.api.java.JavaPairRDD<K2,V2>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitionsToPair:(Lorg/apache/spark/api/java/function/PairFlatMapFunction;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.mapPartitionsToPair ( org.apache.spark.api.java.function.PairFlatMapFunction<java.util.Iterator<Row>,K2,V2> f, boolean preservesPartitioning ) : org.apache.spark.api.java.JavaPairRDD<K2,V2>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitionsToPair:(Lorg/apache/spark/api/java/function/PairFlatMapFunction;Z)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.mapPartitionsWithIndex ( org.apache.spark.api.java.function.Function2<Integer,java.util.Iterator<Row>,java.util.Iterator<R>> f, boolean preservesPartitioning ) : org.apache.spark.api.java.JavaRDD<R>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapPartitionsWithIndex:(Lorg/apache/spark/api/java/function/Function2;Z)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.mapToDouble ( org.apache.spark.api.java.function.DoubleFunction<Row> f ) : org.apache.spark.api.java.JavaDoubleRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapToDouble:(Lorg/apache/spark/api/java/function/DoubleFunction;)Lorg/apache/spark/api/java/JavaDoubleRDD;]
JavaSchemaRDD.mapToPair ( org.apache.spark.api.java.function.PairFunction<Row,K2,V2> f ) : org.apache.spark.api.java.JavaPairRDD<K2,V2>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.mapToPair:(Lorg/apache/spark/api/java/function/PairFunction;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.max ( java.util.Comparator<Row> comp ) : Row
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.max:(Ljava/util/Comparator;)Ljava/lang/Object;]
JavaSchemaRDD.min ( java.util.Comparator<Row> comp ) : Row
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.min:(Ljava/util/Comparator;)Ljava/lang/Object;]
JavaSchemaRDD.name ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.name:()Ljava/lang/String;]
JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike..super.toString ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike..super.toString:()Ljava/lang/String;]
JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan p1 ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
JavaSchemaRDD.partitions ( ) : java.util.List<org.apache.spark.Partition>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.partitions:()Ljava/util/List;]
JavaSchemaRDD.persist ( ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.persist:()Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.pipe ( java.util.List<String> command ) : org.apache.spark.api.java.JavaRDD<String>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.pipe:(Ljava/util/List;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.pipe ( java.util.List<String> command, java.util.Map<String,String> env ) : org.apache.spark.api.java.JavaRDD<String>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.pipe:(Ljava/util/List;Ljava/util/Map;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.pipe ( String command ) : org.apache.spark.api.java.JavaRDD<String>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.pipe:(Ljava/lang/String;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.printSchema ( ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.printSchema:()V]
JavaSchemaRDD.queryExecution ( ) : org.apache.spark.sql.SQLContext.QueryExecution
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.queryExecution:()Lorg/apache/spark/sql/SQLContext$QueryExecution;]
JavaSchemaRDD.rdd ( ) : org.apache.spark.rdd.RDD<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.rdd:()Lorg/apache/spark/rdd/RDD;]
JavaSchemaRDD.reduce ( org.apache.spark.api.java.function.Function2<Row,Row,Row> f ) : Row
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.reduce:(Lorg/apache/spark/api/java/function/Function2;)Ljava/lang/Object;]
JavaSchemaRDD.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.registerTempTable:(Ljava/lang/String;)V]
JavaSchemaRDD.repartition ( int numPartitions ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.repartition:(I)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.saveAsObjectFile ( String path ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.saveAsObjectFile:(Ljava/lang/String;)V]
JavaSchemaRDD.saveAsParquetFile ( String path ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.saveAsParquetFile:(Ljava/lang/String;)V]
JavaSchemaRDD.saveAsTable ( String tableName ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.saveAsTable:(Ljava/lang/String;)V]
JavaSchemaRDD.saveAsTextFile ( String path ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.saveAsTextFile:(Ljava/lang/String;)V]
JavaSchemaRDD.saveAsTextFile ( String path, Class<? extends org.apache.hadoop.io.compress.CompressionCodec> codec ) : void
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.saveAsTextFile:(Ljava/lang/String;Ljava/lang/Class;)V]
JavaSchemaRDD.schema ( ) : StructType
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schema:()Lorg/apache/spark/sql/api/java/StructType;]
JavaSchemaRDD.schemaRDD ( ) : org.apache.spark.sql.SchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schemaRDD:()Lorg/apache/spark/sql/SchemaRDD;]
JavaSchemaRDD.schemaString ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.schemaString:()Ljava/lang/String;]
JavaSchemaRDD.setName ( String name ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.setName:(Ljava/lang/String;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
JavaSchemaRDD.subtract ( JavaSchemaRDD other ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.subtract:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.subtract ( JavaSchemaRDD other, int numPartitions ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.subtract:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;I)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.subtract ( JavaSchemaRDD other, org.apache.spark.Partitioner p ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.subtract:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;Lorg/apache/spark/Partitioner;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.take ( int num ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.take:(I)Ljava/util/List;]
JavaSchemaRDD.takeAsync ( int num ) : org.apache.spark.api.java.JavaFutureAction<java.util.List<Row>>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.takeAsync:(I)Lorg/apache/spark/api/java/JavaFutureAction;]
JavaSchemaRDD.takeOrdered ( int num ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.takeOrdered:(I)Ljava/util/List;]
JavaSchemaRDD.takeOrdered ( int num, java.util.Comparator<Row> comp ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.takeOrdered:(ILjava/util/Comparator;)Ljava/util/List;]
JavaSchemaRDD.takeSample ( boolean withReplacement, int num ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.takeSample:(ZI)Ljava/util/List;]
JavaSchemaRDD.takeSample ( boolean withReplacement, int num, long seed ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.takeSample:(ZIJ)Ljava/util/List;]
JavaSchemaRDD.toArray ( ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.toArray:()Ljava/util/List;]
JavaSchemaRDD.toDebugString ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.toDebugString:()Ljava/lang/String;]
JavaSchemaRDD.toJSON ( ) : org.apache.spark.api.java.JavaRDD<String>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.toJSON:()Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.toLocalIterator ( ) : java.util.Iterator<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.toLocalIterator:()Ljava/util/Iterator;]
JavaSchemaRDD.top ( int num ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.top:(I)Ljava/util/List;]
JavaSchemaRDD.top ( int num, java.util.Comparator<Row> comp ) : java.util.List<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.top:(ILjava/util/Comparator;)Ljava/util/List;]
JavaSchemaRDD.toString ( ) : String
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.toString:()Ljava/lang/String;]
JavaSchemaRDD.unpersist ( boolean blocking ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.unpersist:(Z)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSchemaRDD.wrapRDD ( org.apache.spark.rdd.RDD rdd ) : org.apache.spark.api.java.JavaRDDLike
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.wrapRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/api/java/JavaRDDLike;]
JavaSchemaRDD.wrapRDD ( org.apache.spark.rdd.RDD<Row> rdd ) : org.apache.spark.api.java.JavaRDD<Row>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.wrapRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.zip ( org.apache.spark.api.java.JavaRDDLike<U,?> other ) : org.apache.spark.api.java.JavaPairRDD<Row,U>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.zip:(Lorg/apache/spark/api/java/JavaRDDLike;)Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.zipPartitions ( org.apache.spark.api.java.JavaRDDLike<U,?> other, org.apache.spark.api.java.function.FlatMapFunction2<java.util.Iterator<Row>,java.util.Iterator<U>,V> f ) : org.apache.spark.api.java.JavaRDD<V>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.zipPartitions:(Lorg/apache/spark/api/java/JavaRDDLike;Lorg/apache/spark/api/java/function/FlatMapFunction2;)Lorg/apache/spark/api/java/JavaRDD;]
JavaSchemaRDD.zipWithIndex ( ) : org.apache.spark.api.java.JavaPairRDD<Row,Long>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.zipWithIndex:()Lorg/apache/spark/api/java/JavaPairRDD;]
JavaSchemaRDD.zipWithUniqueId ( ) : org.apache.spark.api.java.JavaPairRDD<Row,Long>
[mangled: org/apache/spark/sql/api/java/JavaSchemaRDD.zipWithUniqueId:()Lorg/apache/spark/api/java/JavaPairRDD;]
spark-sql_2.10-1.2.0.jar, JavaSQLContext.class
package org.apache.spark.sql.api.java
JavaSQLContext.applySchema ( org.apache.spark.api.java.JavaRDD<?> rdd, Class<?> beanClass ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.applySchema:(Lorg/apache/spark/api/java/JavaRDD;Ljava/lang/Class;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.applySchema ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.applySchema:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.baseRelationToSchemaRDD ( org.apache.spark.sql.sources.BaseRelation baseRelation ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.baseRelationToSchemaRDD:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.createParquetFile ( Class<?> beanClass, String path, boolean allowExisting, org.apache.hadoop.conf.Configuration conf ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.createParquetFile:(Ljava/lang/Class;Ljava/lang/String;ZLorg/apache/hadoop/conf/Configuration;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.getSchema ( Class<?> beanClass ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.getSchema:(Ljava/lang/Class;)Lscala/collection/Seq;]
JavaSQLContext.JavaSQLContext ( org.apache.spark.api.java.JavaSparkContext sparkContext )
[mangled: org/apache/spark/sql/api/java/JavaSQLContext."<init>":(Lorg/apache/spark/api/java/JavaSparkContext;)V]
JavaSQLContext.JavaSQLContext ( org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/api/java/JavaSQLContext."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
JavaSQLContext.jsonFile ( String path ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonFile ( String path, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, StructType schema ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/api/java/StructType;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.parquetFile ( String path ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.parquetFile:(Ljava/lang/String;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.registerFunction ( String name, UDF10<?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF10;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF11<?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF11;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF12<?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF12;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF13<?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF13;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF14;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF15;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF16;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF17;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF18;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF19;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF1<?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF1;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF20;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF21;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF22;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF2<?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF2;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF3<?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF3;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF4<?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF4;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF5<?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF5;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF6<?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF6;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF7<?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF7;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF8<?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF8;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerFunction ( String name, UDF9<?,?,?,?,?,?,?,?,?,?> f, DataType dataType ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerFunction:(Ljava/lang/String;Lorg/apache/spark/sql/api/java/UDF9;Lorg/apache/spark/sql/api/java/DataType;)V]
JavaSQLContext.registerRDDAsTable ( JavaSchemaRDD rdd, String tableName ) : void
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.registerRDDAsTable:(Lorg/apache/spark/sql/api/java/JavaSchemaRDD;Ljava/lang/String;)V]
JavaSQLContext.sql ( String sqlText ) : JavaSchemaRDD
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
JavaSQLContext.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/api/java/JavaSQLContext.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
spark-sql_2.10-1.2.0.jar, SchemaRDD.class
package org.apache.spark.sql
SchemaRDD.aggregate ( scala.collection.Seq<catalyst.expressions.Expression> aggregateExprs ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.aggregate:(Lscala/collection/Seq;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.as ( scala.Symbol alias ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.as:(Lscala/Symbol;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.baseLogicalPlan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/SchemaRDD.baseLogicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
SchemaRDD.baseSchemaRDD ( ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.baseSchemaRDD:()Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.cache ( ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.cache:()Lorg/apache/spark/rdd/RDD;]
SchemaRDD.cache ( ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.cache:()Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.coalesce ( int numPartitions, boolean shuffle, scala.math.Ordering ord ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.coalesce:(IZLscala/math/Ordering;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.coalesce ( int numPartitions, boolean shuffle, scala.math.Ordering<catalyst.expressions.Row> ord ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.coalesce:(IZLscala/math/Ordering;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.collect ( ) : Object
[mangled: org/apache/spark/sql/SchemaRDD.collect:()Ljava/lang/Object;]
SchemaRDD.collect ( ) : catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/SchemaRDD.collect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
SchemaRDD.collectToPython ( ) : java.util.List<byte[ ]>
[mangled: org/apache/spark/sql/SchemaRDD.collectToPython:()Ljava/util/List;]
SchemaRDD.compute ( org.apache.spark.Partition split, org.apache.spark.TaskContext context ) : scala.collection.Iterator<catalyst.expressions.Row>
[mangled: org/apache/spark/sql/SchemaRDD.compute:(Lorg/apache/spark/Partition;Lorg/apache/spark/TaskContext;)Lscala/collection/Iterator;]
SchemaRDD.count ( ) : long
[mangled: org/apache/spark/sql/SchemaRDD.count:()J]
SchemaRDD.distinct ( ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.distinct:()Lorg/apache/spark/rdd/RDD;]
SchemaRDD.distinct ( ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.distinct:()Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.distinct ( int numPartitions, scala.math.Ordering ord ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.distinct:(ILscala/math/Ordering;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.distinct ( int numPartitions, scala.math.Ordering<catalyst.expressions.Row> ord ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.distinct:(ILscala/math/Ordering;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.except ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.except:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.filter ( scala.Function1 f ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.filter:(Lscala/Function1;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.filter ( scala.Function1<catalyst.expressions.Row,Object> f ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.filter:(Lscala/Function1;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.generate ( catalyst.expressions.Generator generator, boolean join, boolean outer, scala.Option<String> alias ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.generate:(Lorg/apache/spark/sql/catalyst/expressions/Generator;ZZLscala/Option;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.getDependencies ( ) : scala.collection.Seq<org.apache.spark.Dependency<?>>
[mangled: org/apache/spark/sql/SchemaRDD.getDependencies:()Lscala/collection/Seq;]
SchemaRDD.getPartitions ( ) : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/SchemaRDD.getPartitions:()[Lorg/apache/spark/Partition;]
SchemaRDD.groupBy ( scala.collection.Seq<catalyst.expressions.Expression> groupingExprs, scala.collection.Seq<catalyst.expressions.Expression> aggregateExprs ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.groupBy:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.insertInto ( String tableName ) : void
[mangled: org/apache/spark/sql/SchemaRDD.insertInto:(Ljava/lang/String;)V]
SchemaRDD.insertInto ( String tableName, boolean overwrite ) : void
[mangled: org/apache/spark/sql/SchemaRDD.insertInto:(Ljava/lang/String;Z)V]
SchemaRDD.intersect ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.intersect:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.intersection ( org.apache.spark.rdd.RDD other ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.intersection:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.intersection ( org.apache.spark.rdd.RDD other, int numPartitions ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.intersection:(Lorg/apache/spark/rdd/RDD;I)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.intersection ( org.apache.spark.rdd.RDD other, org.apache.spark.Partitioner partitioner, scala.math.Ordering ord ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.intersection:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/Partitioner;Lscala/math/Ordering;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.intersection ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> other ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.intersection:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.intersection ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> other, int numPartitions ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.intersection:(Lorg/apache/spark/rdd/RDD;I)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.intersection ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> other, org.apache.spark.Partitioner partitioner, scala.math.Ordering<catalyst.expressions.Row> ord ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.intersection:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/Partitioner;Lscala/math/Ordering;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.javaToPython ( ) : org.apache.spark.api.java.JavaRDD<byte[ ]>
[mangled: org/apache/spark/sql/SchemaRDD.javaToPython:()Lorg/apache/spark/api/java/JavaRDD;]
SchemaRDD.join ( SchemaRDD otherPlan, catalyst.plans.JoinType joinType, scala.Option<catalyst.expressions.Expression> on ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.join:(Lorg/apache/spark/sql/SchemaRDD;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.limit ( int limitNum ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.limit:(I)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.logicalPlan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/SchemaRDD.logicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
SchemaRDD.orderBy ( scala.collection.Seq<catalyst.expressions.SortOrder> sortExprs ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.orderBy:(Lscala/collection/Seq;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.SchemaRDDLike..super.toString ( ) : String
[mangled: org/apache/spark/sql/SchemaRDD.org.apache.spark.sql.SchemaRDDLike..super.toString:()Ljava/lang/String;]
SchemaRDD.SchemaRDDLike._setter_.logicalPlan_.eq ( catalyst.plans.logical.LogicalPlan p1 ) : void
[mangled: org/apache/spark/sql/SchemaRDD.org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.persist ( org.apache.spark.storage.StorageLevel newLevel ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.printSchema ( ) : void
[mangled: org/apache/spark/sql/SchemaRDD.printSchema:()V]
SchemaRDD.queryExecution ( ) : SQLContext.QueryExecution
[mangled: org/apache/spark/sql/SchemaRDD.queryExecution:()Lorg/apache/spark/sql/SQLContext$QueryExecution;]
SchemaRDD.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SchemaRDD.registerTempTable:(Ljava/lang/String;)V]
SchemaRDD.repartition ( int numPartitions, scala.math.Ordering ord ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.repartition:(ILscala/math/Ordering;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.repartition ( int numPartitions, scala.math.Ordering<catalyst.expressions.Row> ord ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.repartition:(ILscala/math/Ordering;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.sample ( boolean withReplacement, double fraction, long seed ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.sample:(ZDJ)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.sample ( boolean withReplacement, double fraction, long seed ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.sample:(ZDJ)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.saveAsParquetFile ( String path ) : void
[mangled: org/apache/spark/sql/SchemaRDD.saveAsParquetFile:(Ljava/lang/String;)V]
SchemaRDD.saveAsTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SchemaRDD.saveAsTable:(Ljava/lang/String;)V]
SchemaRDD.schema ( ) : catalyst.types.StructType
[mangled: org/apache/spark/sql/SchemaRDD.schema:()Lorg/apache/spark/sql/catalyst/types/StructType;]
SchemaRDD.SchemaRDD ( SQLContext sqlContext, catalyst.plans.logical.LogicalPlan baseLogicalPlan )
[mangled: org/apache/spark/sql/SchemaRDD."<init>":(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
SchemaRDD.schemaString ( ) : String
[mangled: org/apache/spark/sql/SchemaRDD.schemaString:()Ljava/lang/String;]
SchemaRDD.select ( scala.collection.Seq<catalyst.expressions.Expression> exprs ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.select:(Lscala/collection/Seq;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.sqlContext ( ) : SQLContext
[mangled: org/apache/spark/sql/SchemaRDD.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
SchemaRDD.subtract ( org.apache.spark.rdd.RDD other ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.subtract:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.subtract ( org.apache.spark.rdd.RDD other, int numPartitions ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.subtract:(Lorg/apache/spark/rdd/RDD;I)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.subtract ( org.apache.spark.rdd.RDD other, org.apache.spark.Partitioner p, scala.math.Ordering ord ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.subtract:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/Partitioner;Lscala/math/Ordering;)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.subtract ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> other ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.subtract:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.subtract ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> other, int numPartitions ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.subtract:(Lorg/apache/spark/rdd/RDD;I)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.subtract ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> other, org.apache.spark.Partitioner p, scala.math.Ordering<catalyst.expressions.Row> ord ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.subtract:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/Partitioner;Lscala/math/Ordering;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.take ( int num ) : Object
[mangled: org/apache/spark/sql/SchemaRDD.take:(I)Ljava/lang/Object;]
SchemaRDD.take ( int num ) : catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/SchemaRDD.take:(I)[Lorg/apache/spark/sql/catalyst/expressions/Row;]
SchemaRDD.toJavaSchemaRDD ( ) : api.java.JavaSchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.toJavaSchemaRDD:()Lorg/apache/spark/sql/api/java/JavaSchemaRDD;]
SchemaRDD.toJSON ( ) : org.apache.spark.rdd.RDD<String>
[mangled: org/apache/spark/sql/SchemaRDD.toJSON:()Lorg/apache/spark/rdd/RDD;]
SchemaRDD.toSchemaRDD ( ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.toSchemaRDD:()Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.toString ( ) : String
[mangled: org/apache/spark/sql/SchemaRDD.toString:()Ljava/lang/String;]
SchemaRDD.unionAll ( SchemaRDD otherPlan ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.unionAll:(Lorg/apache/spark/sql/SchemaRDD;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.unpersist ( boolean blocking ) : org.apache.spark.rdd.RDD
[mangled: org/apache/spark/sql/SchemaRDD.unpersist:(Z)Lorg/apache/spark/rdd/RDD;]
SchemaRDD.unpersist ( boolean blocking ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.unpersist:(Z)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.where ( catalyst.expressions.Expression condition ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.where:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.where ( scala.Function1<catalyst.expressions.DynamicRow,Object> dynamicUdf ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.where:(Lscala/Function1;)Lorg/apache/spark/sql/SchemaRDD;]
SchemaRDD.where ( scala.Symbol arg1, scala.Function1<T1,Object> udf ) : SchemaRDD
[mangled: org/apache/spark/sql/SchemaRDD.where:(Lscala/Symbol;Lscala/Function1;)Lorg/apache/spark/sql/SchemaRDD;]
spark-sql_2.10-1.2.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.abs ( catalyst.expressions.Expression e ) : catalyst.expressions.Abs
[mangled: org/apache/spark/sql/SQLContext.abs:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Abs;]
SQLContext.applySchema ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> rowRDD, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchema:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.approxCountDistinct ( catalyst.expressions.Expression e, double rsd ) : catalyst.expressions.ApproxCountDistinct
[mangled: org/apache/spark/sql/SQLContext.approxCountDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;D)Lorg/apache/spark/sql/catalyst/expressions/ApproxCountDistinct;]
SQLContext.autoBroadcastJoinThreshold ( ) : int
[mangled: org/apache/spark/sql/SQLContext.autoBroadcastJoinThreshold:()I]
SQLContext.avg ( catalyst.expressions.Expression e ) : catalyst.expressions.Average
[mangled: org/apache/spark/sql/SQLContext.avg:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Average;]
SQLContext.baseRelationToSchemaRDD ( sources.BaseRelation baseRelation ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.baseRelationToSchemaRDD:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.bigDecimalToLiteral ( scala.math.BigDecimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.bigDecimalToLiteral:(Lscala/math/BigDecimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.binaryToLiteral ( byte[ ] a ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.binaryToLiteral:([B)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.booleanToLiteral ( boolean b ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.booleanToLiteral:(Z)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.byteToLiteral ( byte b ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.byteToLiteral:(B)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.cacheQuery ( SchemaRDD query, scala.Option<String> tableName, org.apache.spark.storage.StorageLevel storageLevel ) : void
[mangled: org/apache/spark/sql/SQLContext.cacheQuery:(Lorg/apache/spark/sql/SchemaRDD;Lscala/Option;Lorg/apache/spark/storage/StorageLevel;)V]
SQLContext.clear ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clear:()V]
SQLContext.codegenEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.codegenEnabled:()Z]
SQLContext.columnBatchSize ( ) : int
[mangled: org/apache/spark/sql/SQLContext.columnBatchSize:()I]
SQLContext.columnNameOfCorruptRecord ( ) : String
[mangled: org/apache/spark/sql/SQLContext.columnNameOfCorruptRecord:()Ljava/lang/String;]
SQLContext.count ( catalyst.expressions.Expression e ) : catalyst.expressions.Count
[mangled: org/apache/spark/sql/SQLContext.count:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Count;]
SQLContext.countDistinct ( scala.collection.Seq<catalyst.expressions.Expression> e ) : catalyst.expressions.CountDistinct
[mangled: org/apache/spark/sql/SQLContext.countDistinct:(Lscala/collection/Seq;)Lorg/apache/spark/sql/catalyst/expressions/CountDistinct;]
SQLContext.createParquetFile ( String path, boolean allowExisting, org.apache.hadoop.conf.Configuration conf, scala.reflect.api.TypeTags.TypeTag<A> p4 ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.createParquetFile:(Ljava/lang/String;ZLorg/apache/hadoop/conf/Configuration;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.createSchemaRDD ( org.apache.spark.rdd.RDD<A> rdd, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.createSchemaRDD:(Lorg/apache/spark/rdd/RDD;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.dateToLiteral ( java.sql.Date d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.dateToLiteral:(Ljava/sql/Date;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.decimalToLiteral ( catalyst.types.decimal.Decimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.decimalToLiteral:(Lorg/apache/spark/sql/catalyst/types/decimal/Decimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.defaultSizeInBytes ( ) : long
[mangled: org/apache/spark/sql/SQLContext.defaultSizeInBytes:()J]
SQLContext.dialect ( ) : String
[mangled: org/apache/spark/sql/SQLContext.dialect:()Ljava/lang/String;]
SQLContext.doubleToLiteral ( double d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.doubleToLiteral:(D)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.DslAttribute ( catalyst.expressions.AttributeReference a ) : catalyst.dsl.package.ExpressionConversions.DslAttribute
[mangled: org/apache/spark/sql/SQLContext.DslAttribute:(Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslAttribute;]
SQLContext.DslExpression ( catalyst.expressions.Expression e ) : catalyst.dsl.package.ExpressionConversions.DslExpression
[mangled: org/apache/spark/sql/SQLContext.DslExpression:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslExpression;]
SQLContext.DslString ( String s ) : catalyst.dsl.package.ExpressionConversions.DslString
[mangled: org/apache/spark/sql/SQLContext.DslString:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslString;]
SQLContext.DslSymbol ( scala.Symbol sym ) : catalyst.dsl.package.ExpressionConversions.DslSymbol
[mangled: org/apache/spark/sql/SQLContext.DslSymbol:(Lscala/Symbol;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslSymbol;]
SQLContext.externalSortEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.externalSortEnabled:()Z]
SQLContext.extraStrategies ( ) : scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>>
[mangled: org/apache/spark/sql/SQLContext.extraStrategies:()Lscala/collection/Seq;]
SQLContext.extraStrategies_.eq ( scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>> p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.extraStrategies_.eq:(Lscala/collection/Seq;)V]
SQLContext.first ( catalyst.expressions.Expression e ) : catalyst.expressions.First
[mangled: org/apache/spark/sql/SQLContext.first:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/First;]
SQLContext.floatToLiteral ( float f ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.floatToLiteral:(F)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.inMemoryPartitionPruning ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.inMemoryPartitionPruning:()Z]
SQLContext.intToLiteral ( int i ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.intToLiteral:(I)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.invalidateCache ( catalyst.plans.logical.LogicalPlan plan ) : void
[mangled: org/apache/spark/sql/SQLContext.invalidateCache:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
SQLContext.isParquetBinaryAsString ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isParquetBinaryAsString:()Z]
SQLContext.jsonFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.last ( catalyst.expressions.Expression e ) : catalyst.expressions.Last
[mangled: org/apache/spark/sql/SQLContext.last:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Last;]
SQLContext.logicalPlanToSparkQuery ( catalyst.plans.logical.LogicalPlan plan ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.logicalPlanToSparkQuery:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.longToLiteral ( long l ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.longToLiteral:(J)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.lookupCachedData ( catalyst.plans.logical.LogicalPlan plan ) : scala.Option<CachedData>
[mangled: org/apache/spark/sql/SQLContext.lookupCachedData:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lscala/Option;]
SQLContext.lookupCachedData ( SchemaRDD query ) : scala.Option<CachedData>
[mangled: org/apache/spark/sql/SQLContext.lookupCachedData:(Lorg/apache/spark/sql/SchemaRDD;)Lscala/Option;]
SQLContext.lower ( catalyst.expressions.Expression e ) : catalyst.expressions.Lower
[mangled: org/apache/spark/sql/SQLContext.lower:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Lower;]
SQLContext.max ( catalyst.expressions.Expression e ) : catalyst.expressions.Max
[mangled: org/apache/spark/sql/SQLContext.max:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Max;]
SQLContext.min ( catalyst.expressions.Expression e ) : catalyst.expressions.Min
[mangled: org/apache/spark/sql/SQLContext.min:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Min;]
SQLContext.numShufflePartitions ( ) : int
[mangled: org/apache/spark/sql/SQLContext.numShufflePartitions:()I]
SQLContext.CacheManager..cachedData ( ) : scala.collection.mutable.ArrayBuffer<CachedData>
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager..cachedData:()Lscala/collection/mutable/ArrayBuffer;]
SQLContext.CacheManager..cacheLock ( ) : java.util.concurrent.locks.ReentrantReadWriteLock
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager..cacheLock:()Ljava/util/concurrent/locks/ReentrantReadWriteLock;]
SQLContext.CacheManager._setter_.CacheManager..cachedData_.eq ( scala.collection.mutable.ArrayBuffer p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager._setter_.org.apache.spark.sql.CacheManager..cachedData_.eq:(Lscala/collection/mutable/ArrayBuffer;)V]
SQLContext.CacheManager._setter_.CacheManager..cacheLock_.eq ( java.util.concurrent.locks.ReentrantReadWriteLock p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.CacheManager._setter_.org.apache.spark.sql.CacheManager..cacheLock_.eq:(Ljava/util/concurrent/locks/ReentrantReadWriteLock;)V]
SQLContext.SQLConf._setter_.settings_.eq ( java.util.Map p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.SQLConf._setter_.settings_.eq:(Ljava/util/Map;)V]
SQLContext.parquetCompressionCodec ( ) : String
[mangled: org/apache/spark/sql/SQLContext.parquetCompressionCodec:()Ljava/lang/String;]
SQLContext.parquetFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.parquetFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.parquetFilterPushDown ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.parquetFilterPushDown:()Z]
SQLContext.parseDataType ( String dataTypeString ) : catalyst.types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/types/DataType;]
SQLContext.registerFunction ( String name, scala.Function10<?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function10;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function11<?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function11;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function12<?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function12;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function13<?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function13;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function14;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function15;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function16;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function17;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function18;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function19;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function1<?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function20;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function21;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function22;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function2<?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function2;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function3<?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function3;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function4<?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function4;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function5<?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function5;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function6<?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function6;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function7<?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function7;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function8<?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function8;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function9<?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function9;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType ) : void
[mangled: org/apache/spark/sql/SQLContext.registerPython:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Ljava/lang/String;)V]
SQLContext.registerRDDAsTable ( SchemaRDD rdd, String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.registerRDDAsTable:(Lorg/apache/spark/sql/SchemaRDD;Ljava/lang/String;)V]
SQLContext.settings ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.settings:()Ljava/util/Map;]
SQLContext.shortToLiteral ( short s ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.shortToLiteral:(S)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.sql ( String sqlText ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.sqlParser ( ) : catalyst.SparkSQLParser
[mangled: org/apache/spark/sql/SQLContext.sqlParser:()Lorg/apache/spark/sql/catalyst/SparkSQLParser;]
SQLContext.sqrt ( catalyst.expressions.Expression e ) : catalyst.expressions.Sqrt
[mangled: org/apache/spark/sql/SQLContext.sqrt:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Sqrt;]
SQLContext.stringToLiteral ( String s ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.stringToLiteral:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.sum ( catalyst.expressions.Expression e ) : catalyst.expressions.Sum
[mangled: org/apache/spark/sql/SQLContext.sum:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Sum;]
SQLContext.sumDistinct ( catalyst.expressions.Expression e ) : catalyst.expressions.SumDistinct
[mangled: org/apache/spark/sql/SQLContext.sumDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/SumDistinct;]
SQLContext.symbolToUnresolvedAttribute ( scala.Symbol s ) : catalyst.analysis.UnresolvedAttribute
[mangled: org/apache/spark/sql/SQLContext.symbolToUnresolvedAttribute:(Lscala/Symbol;)Lorg/apache/spark/sql/catalyst/analysis/UnresolvedAttribute;]
SQLContext.table ( String tableName ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.table:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.timestampToLiteral ( java.sql.Timestamp t ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.timestampToLiteral:(Ljava/sql/Timestamp;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.tryUncacheQuery ( SchemaRDD query, boolean blocking ) : boolean
[mangled: org/apache/spark/sql/SQLContext.tryUncacheQuery:(Lorg/apache/spark/sql/SchemaRDD;Z)Z]
SQLContext.uncacheQuery ( SchemaRDD query, boolean blocking ) : void
[mangled: org/apache/spark/sql/SQLContext.uncacheQuery:(Lorg/apache/spark/sql/SchemaRDD;Z)V]
SQLContext.upper ( catalyst.expressions.Expression e ) : catalyst.expressions.Upper
[mangled: org/apache/spark/sql/SQLContext.upper:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Upper;]
SQLContext.useCachedData ( catalyst.plans.logical.LogicalPlan plan ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/SQLContext.useCachedData:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
SQLContext.useCompression ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.useCompression:()Z]
spark-sql_2.10-1.2.0.jar, TableScan.class
package org.apache.spark.sql.sources
TableScan.TableScan ( )
[mangled: org/apache/spark/sql/sources/TableScan."<init>":()V]
to the top
Problems with Data Types, High Severity (9)
spark-sql_2.10-1.2.0.jar
package org.apache.spark.sql
[+] SchemaRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (74)
aggregate ( scala.collection.Seq<catalyst.expressions.Expression> )This method is from 'SchemaRDD' class.
as ( scala.Symbol )This method is from 'SchemaRDD' class.
baseLogicalPlan ( )This method is from 'SchemaRDD' class.
baseSchemaRDD ( )This method is from 'SchemaRDD' class.
cache ( )This method is from 'SchemaRDD' class.
cache ( )This method is from 'SchemaRDD' class.
coalesce ( int, boolean, scala.math.Ordering )This method is from 'SchemaRDD' class.
coalesce ( int, boolean, scala.math.Ordering<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
collect ( )This method is from 'SchemaRDD' class.
collect ( )This method is from 'SchemaRDD' class.
collectToPython ( )This method is from 'SchemaRDD' class.
compute ( org.apache.spark.Partition, org.apache.spark.TaskContext )This method is from 'SchemaRDD' class.
count ( )This method is from 'SchemaRDD' class.
distinct ( )This method is from 'SchemaRDD' class.
distinct ( )This method is from 'SchemaRDD' class.
distinct ( int, scala.math.Ordering )This method is from 'SchemaRDD' class.
distinct ( int, scala.math.Ordering<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
except ( SchemaRDD )This method is from 'SchemaRDD' class.
filter ( scala.Function1 )This method is from 'SchemaRDD' class.
filter ( scala.Function1<catalyst.expressions.Row,java.lang.Object> )This method is from 'SchemaRDD' class.
generate ( catalyst.expressions.Generator, boolean, boolean, scala.Option<java.lang.String> )This method is from 'SchemaRDD' class.
getDependencies ( )This method is from 'SchemaRDD' class.
getPartitions ( )This method is from 'SchemaRDD' class.
groupBy ( scala.collection.Seq<catalyst.expressions.Expression>, scala.collection.Seq<catalyst.expressions.Expression> )This method is from 'SchemaRDD' class.
insertInto ( java.lang.String )This method is from 'SchemaRDD' class.
insertInto ( java.lang.String, boolean )This method is from 'SchemaRDD' class.
intersect ( SchemaRDD )This method is from 'SchemaRDD' class.
intersection ( org.apache.spark.rdd.RDD )This method is from 'SchemaRDD' class.
intersection ( org.apache.spark.rdd.RDD, int )This method is from 'SchemaRDD' class.
intersection ( org.apache.spark.rdd.RDD, org.apache.spark.Partitioner, scala.math.Ordering )This method is from 'SchemaRDD' class.
intersection ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
intersection ( org.apache.spark.rdd.RDD<catalyst.expressions.Row>, int )This method is from 'SchemaRDD' class.
intersection ( org.apache.spark.rdd.RDD<catalyst.expressions.Row>, org.apache.spark.Partitioner, scala.math.Ordering<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
javaToPython ( )This method is from 'SchemaRDD' class.
join ( SchemaRDD, catalyst.plans.JoinType, scala.Option<catalyst.expressions.Expression> )This method is from 'SchemaRDD' class.
limit ( int )This method is from 'SchemaRDD' class.
logicalPlan ( )This method is from 'SchemaRDD' class.
orderBy ( scala.collection.Seq<catalyst.expressions.SortOrder> )This method is from 'SchemaRDD' class.
SchemaRDDLike..super.toString ( )This method is from 'SchemaRDD' class.
SchemaRDDLike._setter_.logicalPlan_.eq ( catalyst.plans.logical.LogicalPlan )This method is from 'SchemaRDD' class.
persist ( org.apache.spark.storage.StorageLevel )This method is from 'SchemaRDD' class.
persist ( org.apache.spark.storage.StorageLevel )This method is from 'SchemaRDD' class.
printSchema ( )This method is from 'SchemaRDD' class.
queryExecution ( )This method is from 'SchemaRDD' class.
registerTempTable ( java.lang.String )This method is from 'SchemaRDD' class.
repartition ( int, scala.math.Ordering )This method is from 'SchemaRDD' class.
repartition ( int, scala.math.Ordering<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
sample ( boolean, double, long )This method is from 'SchemaRDD' class.
sample ( boolean, double, long )This method is from 'SchemaRDD' class.
saveAsParquetFile ( java.lang.String )This method is from 'SchemaRDD' class.
saveAsTable ( java.lang.String )This method is from 'SchemaRDD' class.
schema ( )This method is from 'SchemaRDD' class.
SchemaRDD ( SQLContext, catalyst.plans.logical.LogicalPlan )This constructor is from 'SchemaRDD' class.
schemaString ( )This method is from 'SchemaRDD' class.
select ( scala.collection.Seq<catalyst.expressions.Expression> )This method is from 'SchemaRDD' class.
sqlContext ( )This method is from 'SchemaRDD' class.
subtract ( org.apache.spark.rdd.RDD )This method is from 'SchemaRDD' class.
subtract ( org.apache.spark.rdd.RDD, int )This method is from 'SchemaRDD' class.
subtract ( org.apache.spark.rdd.RDD, org.apache.spark.Partitioner, scala.math.Ordering )This method is from 'SchemaRDD' class.
subtract ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
subtract ( org.apache.spark.rdd.RDD<catalyst.expressions.Row>, int )This method is from 'SchemaRDD' class.
subtract ( org.apache.spark.rdd.RDD<catalyst.expressions.Row>, org.apache.spark.Partitioner, scala.math.Ordering<catalyst.expressions.Row> )This method is from 'SchemaRDD' class.
take ( int )This method is from 'SchemaRDD' class.
take ( int )This method is from 'SchemaRDD' class.
toJavaSchemaRDD ( )This method is from 'SchemaRDD' class.
toJSON ( )This method is from 'SchemaRDD' class.
toSchemaRDD ( )This method is from 'SchemaRDD' class.
toString ( )This method is from 'SchemaRDD' class.
unionAll ( SchemaRDD )This method is from 'SchemaRDD' class.
unpersist ( boolean )This method is from 'SchemaRDD' class.
unpersist ( boolean )This method is from 'SchemaRDD' class.
where ( catalyst.expressions.Expression )This method is from 'SchemaRDD' class.
where ( scala.Function1<catalyst.expressions.DynamicRow,java.lang.Object> )This method is from 'SchemaRDD' class.
where ( scala.Symbol, scala.Function1<T1,java.lang.Object> )This method is from 'SchemaRDD' class.
[+] SQLContext (4)
| Change | Effect |
---|
1 | Removed super-interface CacheManager. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface SQLConf. | A client program may be interrupted by NoSuchMethodError exception. |
3 | Removed super-interface UDFRegistration. | A client program may be interrupted by NoSuchMethodError exception. |
4 | Removed super-interface catalyst.dsl.package.ExpressionConversions. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (39)
createRelation ( SQLContext, scala.collection.immutable.Map<java.lang.String,java.lang.String> )1st parameter 'p1' of this abstract method has type 'SQLContext'.
analyzer ( )This method is from 'SQLContext' class.
cacheTable ( java.lang.String )This method is from 'SQLContext' class.
catalog ( )This method is from 'SQLContext' class.
clearCache ( )This method is from 'SQLContext' class.
ddlParser ( )This method is from 'SQLContext' class.
dropTempTable ( java.lang.String )This method is from 'SQLContext' class.
emptyResult ( )This method is from 'SQLContext' class.
executePlan ( catalyst.plans.logical.LogicalPlan )This method is from 'SQLContext' class.
executeSql ( java.lang.String )This method is from 'SQLContext' class.
functionRegistry ( )This method is from 'SQLContext' class.
getAllConfs ( )This method is from 'SQLContext' class.
getConf ( java.lang.String )This method is from 'SQLContext' class.
getConf ( java.lang.String, java.lang.String )This method is from 'SQLContext' class.
isCached ( java.lang.String )This method is from 'SQLContext' class.
isTraceEnabled ( )This method is from 'SQLContext' class.
log ( )This method is from 'SQLContext' class.
logDebug ( scala.Function0<java.lang.String> )This method is from 'SQLContext' class.
logDebug ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'SQLContext' class.
logError ( scala.Function0<java.lang.String> )This method is from 'SQLContext' class.
logError ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'SQLContext' class.
logInfo ( scala.Function0<java.lang.String> )This method is from 'SQLContext' class.
logInfo ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'SQLContext' class.
logName ( )This method is from 'SQLContext' class.
logTrace ( scala.Function0<java.lang.String> )This method is from 'SQLContext' class.
logTrace ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'SQLContext' class.
logWarning ( scala.Function0<java.lang.String> )This method is from 'SQLContext' class.
logWarning ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'SQLContext' class.
optimizer ( )This method is from 'SQLContext' class.
org.apache.spark.Logging..log_ ( )This method is from 'SQLContext' class.
org.apache.spark.Logging..log__.eq ( org.slf4j.Logger )This method is from 'SQLContext' class.
parseSql ( java.lang.String )This method is from 'SQLContext' class.
planner ( )This method is from 'SQLContext' class.
prepareForExecution ( )This method is from 'SQLContext' class.
setConf ( java.lang.String, java.lang.String )This method is from 'SQLContext' class.
setConf ( java.util.Properties )This method is from 'SQLContext' class.
sparkContext ( )This method is from 'SQLContext' class.
SQLContext ( org.apache.spark.SparkContext )This constructor is from 'SQLContext' class.
uncacheTable ( java.lang.String )This method is from 'SQLContext' class.
package org.apache.spark.sql.api.java
[+] JavaSchemaRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (108)
aggregate ( U, org.apache.spark.api.java.function.Function2<U,Row,U>, org.apache.spark.api.java.function.Function2<U,U,U> )This method is from 'JavaSchemaRDD' class.
baseLogicalPlan ( )This method is from 'JavaSchemaRDD' class.
baseSchemaRDD ( )This method is from 'JavaSchemaRDD' class.
cache ( )This method is from 'JavaSchemaRDD' class.
cartesian ( org.apache.spark.api.java.JavaRDDLike<U,?> )This method is from 'JavaSchemaRDD' class.
checkpoint ( )This method is from 'JavaSchemaRDD' class.
classTag ( )This method is from 'JavaSchemaRDD' class.
coalesce ( int, boolean )This method is from 'JavaSchemaRDD' class.
collect ( )This method is from 'JavaSchemaRDD' class.
collectAsync ( )This method is from 'JavaSchemaRDD' class.
collectPartitions ( int[ ] )This method is from 'JavaSchemaRDD' class.
context ( )This method is from 'JavaSchemaRDD' class.
count ( )This method is from 'JavaSchemaRDD' class.
countApprox ( long )This method is from 'JavaSchemaRDD' class.
countApprox ( long, double )This method is from 'JavaSchemaRDD' class.
countApproxDistinct ( double )This method is from 'JavaSchemaRDD' class.
countAsync ( )This method is from 'JavaSchemaRDD' class.
countByValue ( )This method is from 'JavaSchemaRDD' class.
countByValueApprox ( long )This method is from 'JavaSchemaRDD' class.
countByValueApprox ( long, double )This method is from 'JavaSchemaRDD' class.
distinct ( )This method is from 'JavaSchemaRDD' class.
distinct ( int )This method is from 'JavaSchemaRDD' class.
filter ( org.apache.spark.api.java.function.Function<Row,java.lang.Boolean> )This method is from 'JavaSchemaRDD' class.
first ( )This method is from 'JavaSchemaRDD' class.
flatMap ( org.apache.spark.api.java.function.FlatMapFunction<Row,U> )This method is from 'JavaSchemaRDD' class.
flatMapToDouble ( org.apache.spark.api.java.function.DoubleFlatMapFunction<Row> )This method is from 'JavaSchemaRDD' class.
flatMapToPair ( org.apache.spark.api.java.function.PairFlatMapFunction<Row,K2,V2> )This method is from 'JavaSchemaRDD' class.
fold ( Row, org.apache.spark.api.java.function.Function2<Row,Row,Row> )This method is from 'JavaSchemaRDD' class.
foreach ( org.apache.spark.api.java.function.VoidFunction<Row> )This method is from 'JavaSchemaRDD' class.
foreachAsync ( org.apache.spark.api.java.function.VoidFunction<Row> )This method is from 'JavaSchemaRDD' class.
foreachPartition ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> )This method is from 'JavaSchemaRDD' class.
foreachPartitionAsync ( org.apache.spark.api.java.function.VoidFunction<java.util.Iterator<Row>> )This method is from 'JavaSchemaRDD' class.
getCheckpointFile ( )This method is from 'JavaSchemaRDD' class.
getStorageLevel ( )This method is from 'JavaSchemaRDD' class.
glom ( )This method is from 'JavaSchemaRDD' class.
groupBy ( org.apache.spark.api.java.function.Function<Row,U> )This method is from 'JavaSchemaRDD' class.
groupBy ( org.apache.spark.api.java.function.Function<Row,U>, int )This method is from 'JavaSchemaRDD' class.
id ( )This method is from 'JavaSchemaRDD' class.
insertInto ( java.lang.String )This method is from 'JavaSchemaRDD' class.
insertInto ( java.lang.String, boolean )This method is from 'JavaSchemaRDD' class.
intersection ( JavaSchemaRDD )This method is from 'JavaSchemaRDD' class.
intersection ( JavaSchemaRDD, int )This method is from 'JavaSchemaRDD' class.
intersection ( JavaSchemaRDD, org.apache.spark.Partitioner )This method is from 'JavaSchemaRDD' class.
isCheckpointed ( )This method is from 'JavaSchemaRDD' class.
iterator ( org.apache.spark.Partition, org.apache.spark.TaskContext )This method is from 'JavaSchemaRDD' class.
JavaSchemaRDD ( org.apache.spark.sql.SQLContext, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This constructor is from 'JavaSchemaRDD' class.
keyBy ( org.apache.spark.api.java.function.Function<Row,U> )This method is from 'JavaSchemaRDD' class.
logicalPlan ( )This method is from 'JavaSchemaRDD' class.
map ( org.apache.spark.api.java.function.Function<Row,R> )This method is from 'JavaSchemaRDD' class.
mapPartitions ( org.apache.spark.api.java.function.FlatMapFunction<java.util.Iterator<Row>,U> )This method is from 'JavaSchemaRDD' class.
mapPartitions ( org.apache.spark.api.java.function.FlatMapFunction<java.util.Iterator<Row>,U>, boolean )This method is from 'JavaSchemaRDD' class.
mapPartitionsToDouble ( org.apache.spark.api.java.function.DoubleFlatMapFunction<java.util.Iterator<Row>> )This method is from 'JavaSchemaRDD' class.
mapPartitionsToDouble ( org.apache.spark.api.java.function.DoubleFlatMapFunction<java.util.Iterator<Row>>, boolean )This method is from 'JavaSchemaRDD' class.
mapPartitionsToPair ( org.apache.spark.api.java.function.PairFlatMapFunction<java.util.Iterator<Row>,K2,V2> )This method is from 'JavaSchemaRDD' class.
mapPartitionsToPair ( org.apache.spark.api.java.function.PairFlatMapFunction<java.util.Iterator<Row>,K2,V2>, boolean )This method is from 'JavaSchemaRDD' class.
mapPartitionsWithIndex ( org.apache.spark.api.java.function.Function2<java.lang.Integer,java.util.Iterator<Row>,java.util.Iterator<R>>, boolean )This method is from 'JavaSchemaRDD' class.
mapToDouble ( org.apache.spark.api.java.function.DoubleFunction<Row> )This method is from 'JavaSchemaRDD' class.
mapToPair ( org.apache.spark.api.java.function.PairFunction<Row,K2,V2> )This method is from 'JavaSchemaRDD' class.
max ( java.util.Comparator<Row> )This method is from 'JavaSchemaRDD' class.
min ( java.util.Comparator<Row> )This method is from 'JavaSchemaRDD' class.
name ( )This method is from 'JavaSchemaRDD' class.
org.apache.spark.sql.SchemaRDDLike..super.toString ( )This method is from 'JavaSchemaRDD' class.
org.apache.spark.sql.SchemaRDDLike._setter_.logicalPlan_.eq ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'JavaSchemaRDD' class.
partitions ( )This method is from 'JavaSchemaRDD' class.
persist ( )This method is from 'JavaSchemaRDD' class.
persist ( org.apache.spark.storage.StorageLevel )This method is from 'JavaSchemaRDD' class.
pipe ( java.lang.String )This method is from 'JavaSchemaRDD' class.
pipe ( java.util.List<java.lang.String> )This method is from 'JavaSchemaRDD' class.
pipe ( java.util.List<java.lang.String>, java.util.Map<java.lang.String,java.lang.String> )This method is from 'JavaSchemaRDD' class.
printSchema ( )This method is from 'JavaSchemaRDD' class.
queryExecution ( )This method is from 'JavaSchemaRDD' class.
rdd ( )This method is from 'JavaSchemaRDD' class.
reduce ( org.apache.spark.api.java.function.Function2<Row,Row,Row> )This method is from 'JavaSchemaRDD' class.
registerTempTable ( java.lang.String )This method is from 'JavaSchemaRDD' class.
repartition ( int )This method is from 'JavaSchemaRDD' class.
saveAsObjectFile ( java.lang.String )This method is from 'JavaSchemaRDD' class.
saveAsParquetFile ( java.lang.String )This method is from 'JavaSchemaRDD' class.
saveAsTable ( java.lang.String )This method is from 'JavaSchemaRDD' class.
saveAsTextFile ( java.lang.String )This method is from 'JavaSchemaRDD' class.
saveAsTextFile ( java.lang.String, java.lang.Class<? extends org.apache.hadoop.io.compress.CompressionCodec> )This method is from 'JavaSchemaRDD' class.
schema ( )This method is from 'JavaSchemaRDD' class.
schemaRDD ( )This method is from 'JavaSchemaRDD' class.
schemaString ( )This method is from 'JavaSchemaRDD' class.
setName ( java.lang.String )This method is from 'JavaSchemaRDD' class.
sqlContext ( )This method is from 'JavaSchemaRDD' class.
subtract ( JavaSchemaRDD )This method is from 'JavaSchemaRDD' class.
subtract ( JavaSchemaRDD, int )This method is from 'JavaSchemaRDD' class.
subtract ( JavaSchemaRDD, org.apache.spark.Partitioner )This method is from 'JavaSchemaRDD' class.
take ( int )This method is from 'JavaSchemaRDD' class.
takeAsync ( int )This method is from 'JavaSchemaRDD' class.
takeOrdered ( int )This method is from 'JavaSchemaRDD' class.
takeOrdered ( int, java.util.Comparator<Row> )This method is from 'JavaSchemaRDD' class.
takeSample ( boolean, int )This method is from 'JavaSchemaRDD' class.
takeSample ( boolean, int, long )This method is from 'JavaSchemaRDD' class.
toArray ( )This method is from 'JavaSchemaRDD' class.
toDebugString ( )This method is from 'JavaSchemaRDD' class.
toJSON ( )This method is from 'JavaSchemaRDD' class.
toLocalIterator ( )This method is from 'JavaSchemaRDD' class.
top ( int )This method is from 'JavaSchemaRDD' class.
top ( int, java.util.Comparator<Row> )This method is from 'JavaSchemaRDD' class.
toString ( )This method is from 'JavaSchemaRDD' class.
unpersist ( boolean )This method is from 'JavaSchemaRDD' class.
wrapRDD ( org.apache.spark.rdd.RDD )This method is from 'JavaSchemaRDD' class.
wrapRDD ( org.apache.spark.rdd.RDD<Row> )This method is from 'JavaSchemaRDD' class.
zip ( org.apache.spark.api.java.JavaRDDLike<U,?> )This method is from 'JavaSchemaRDD' class.
zipPartitions ( org.apache.spark.api.java.JavaRDDLike<U,?>, org.apache.spark.api.java.function.FlatMapFunction2<java.util.Iterator<Row>,java.util.Iterator<U>,V> )This method is from 'JavaSchemaRDD' class.
zipWithIndex ( )This method is from 'JavaSchemaRDD' class.
zipWithUniqueId ( )This method is from 'JavaSchemaRDD' class.
[+] JavaSQLContext (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (37)
applySchema ( org.apache.spark.api.java.JavaRDD<?>, java.lang.Class<?> )This method is from 'JavaSQLContext' class.
applySchema ( org.apache.spark.api.java.JavaRDD<Row>, StructType )This method is from 'JavaSQLContext' class.
baseRelationToSchemaRDD ( org.apache.spark.sql.sources.BaseRelation )This method is from 'JavaSQLContext' class.
createParquetFile ( java.lang.Class<?>, java.lang.String, boolean, org.apache.hadoop.conf.Configuration )This method is from 'JavaSQLContext' class.
getSchema ( java.lang.Class<?> )This method is from 'JavaSQLContext' class.
JavaSQLContext ( org.apache.spark.api.java.JavaSparkContext )This constructor is from 'JavaSQLContext' class.
JavaSQLContext ( org.apache.spark.sql.SQLContext )This constructor is from 'JavaSQLContext' class.
jsonFile ( java.lang.String )This method is from 'JavaSQLContext' class.
jsonFile ( java.lang.String, StructType )This method is from 'JavaSQLContext' class.
jsonRDD ( org.apache.spark.api.java.JavaRDD<java.lang.String> )This method is from 'JavaSQLContext' class.
jsonRDD ( org.apache.spark.api.java.JavaRDD<java.lang.String>, StructType )This method is from 'JavaSQLContext' class.
parquetFile ( java.lang.String )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF10<?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF11<?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF12<?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF13<?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF1<?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF2<?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF3<?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF4<?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF5<?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF6<?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF7<?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF8<?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerFunction ( java.lang.String, UDF9<?,?,?,?,?,?,?,?,?,?>, DataType )This method is from 'JavaSQLContext' class.
registerRDDAsTable ( JavaSchemaRDD, java.lang.String )This method is from 'JavaSQLContext' class.
sql ( java.lang.String )This method is from 'JavaSQLContext' class.
sqlContext ( )This method is from 'JavaSQLContext' class.
package org.apache.spark.sql.sources
[+] DDLParser (1)
| Change | Effect |
---|
1 | Removed super-interface scala.util.parsing.combinator.PackratParsers. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
ddlParser ( )Return value of this method has type 'DDLParser'.
[+] TableScan (1)
| Change | Effect |
---|
1 | This class became interface. | A client program may be interrupted by IncompatibleClassChangeError or InstantiationError exception dependent on the usage of this class. |
[+] affected methods (1)
buildScan ( )This abstract method is from 'TableScan' abstract class.
to the top
Problems with Data Types, Medium Severity (1)
spark-sql_2.10-1.2.0.jar
package org.apache.spark.sql.sources
[+] DDLParser (1)
| Change | Effect |
---|
1 | Superclass has been changed from scala.util.parsing.combinator.syntactical.StandardTokenParsers to org.apache.spark.sql.catalyst.AbstractSparkSQLParser. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ddlParser ( )Return value of this method has type 'DDLParser'.
to the top
Java ARchives (1)
spark-sql_2.10-1.2.0.jar
to the top
Generated on Sat Apr 11 01:09:03 2015 for spark-avro_2.10-0.2.0 by Java API Compliance Checker 1.4.1
A tool for checking backward compatibility of a Java library API