Binary compatibility report for the succinct-0.1.2 library between 1.3.0 and 1.1.0 versions (relating to the portability of client application succinct-0.1.2.jar)
Test Info
Library Name | succinct-0.1.2 |
Version #1 | 1.3.0 |
Version #2 | 1.1.0 |
Java Version | 1.7.0_75 |
Test Results
Total Java ARchives | 1 |
---|
Total Methods / Classes | 2397 / 463 |
---|
Verdict | Incompatible (67%) |
Problem Summary
| Severity | Count |
---|
Added Methods | - | 163 |
---|
Removed Methods | High | 1483 |
---|
Problems with Data Types | High | 119 |
---|
Medium | 38 |
Low | 17 |
Problems with Methods | High | 0 |
---|
Medium | 0 |
Low | 0 |
Other Changes in Data Types | - | 6 |
Added Methods (163)
spark-sql_2.10-1.1.0.jar, CatalystConverter.class
package org.apache.spark.sql.parquet
CatalystConverter.getCurrentRecord ( ) : org.apache.spark.sql.catalyst.expressions.Row
[mangled: org/apache/spark/sql/parquet/CatalystConverter.getCurrentRecord:()Lorg/apache/spark/sql/catalyst/expressions/Row;]
CatalystConverter.updateString ( int fieldIndex, parquet.io.api.Binary value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateString:(ILparquet/io/api/Binary;)V]
spark-sql_2.10-1.1.0.jar, CatalystGroupConverter.class
package org.apache.spark.sql.parquet
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.catalyst.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/catalyst/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.catalyst.types.StructField[ ] schema, int index, CatalystConverter parent, scala.collection.mutable.ArrayBuffer<Object> current, scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.catalyst.expressions.Row> buffer )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/catalyst/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;Lscala/collection/mutable/ArrayBuffer;Lscala/collection/mutable/ArrayBuffer;)V]
CatalystGroupConverter.getCurrentRecord ( ) : org.apache.spark.sql.catalyst.expressions.Row
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.getCurrentRecord:()Lorg/apache/spark/sql/catalyst/expressions/Row;]
CatalystGroupConverter.schema ( ) : org.apache.spark.sql.catalyst.types.StructField[ ]
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.schema:()[Lorg/apache/spark/sql/catalyst/types/StructField;]
spark-sql_2.10-1.1.0.jar, ColumnBuilder.class
package org.apache.spark.sql.columnar
ColumnBuilder.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnBuilder.appendFrom:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
spark-sql_2.10-1.1.0.jar, CompressionScheme.class
package org.apache.spark.sql.columnar.compression
CompressionScheme.encoder ( ) [abstract] : Encoder<T>
[mangled: org/apache/spark/sql/columnar/compression/CompressionScheme.encoder:()Lorg/apache/spark/sql/columnar/compression/Encoder;]
spark-sql_2.10-1.1.0.jar, DescribeCommand.class
package org.apache.spark.sql.execution
DescribeCommand.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/DescribeCommand.children:()Lscala/collection/immutable/Nil$;]
DescribeCommand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/DescribeCommand.children:()Lscala/collection/Seq;]
DescribeCommand.copy ( SparkPlan child, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.sql.SQLContext context ) : DescribeCommand
[mangled: org/apache/spark/sql/execution/DescribeCommand.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/DescribeCommand;]
DescribeCommand.DescribeCommand ( SparkPlan child, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.sql.SQLContext context )
[mangled: org/apache/spark/sql/execution/DescribeCommand."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)V]
DescribeCommand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.catalyst.expressions.Row>
[mangled: org/apache/spark/sql/execution/DescribeCommand.execute:()Lorg/apache/spark/rdd/RDD;]
DescribeCommand.sideEffectResult ( ) : scala.collection.Seq<scala.Tuple3<String,String,String>>
[mangled: org/apache/spark/sql/execution/DescribeCommand.sideEffectResult:()Lscala/collection/Seq;]
spark-sql_2.10-1.1.0.jar, Encoder<T>.class
package org.apache.spark.sql.columnar.compression
Encoder<T>.compress ( java.nio.ByteBuffer p1, java.nio.ByteBuffer p2, org.apache.spark.sql.columnar.NativeColumnType<T> p3 ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.compress:(Ljava/nio/ByteBuffer;Ljava/nio/ByteBuffer;Lorg/apache/spark/sql/columnar/NativeColumnType;)Ljava/nio/ByteBuffer;]
Encoder<T>.gatherCompressibilityStats ( Object p1, org.apache.spark.sql.columnar.NativeColumnType<T> p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.gatherCompressibilityStats:(Ljava/lang/Object;Lorg/apache/spark/sql/columnar/NativeColumnType;)V]
spark-sql_2.10-1.1.0.jar, EvaluatePython.class
package org.apache.spark.sql.execution
EvaluatePython.copy ( PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child ) : EvaluatePython
[mangled: org/apache/spark/sql/execution/EvaluatePython.copy:(Lorg/apache/spark/sql/execution/PythonUDF;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/execution/EvaluatePython;]
EvaluatePython.curried ( ) [static] : scala.Function1<PythonUDF,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,EvaluatePython>>
[mangled: org/apache/spark/sql/execution/EvaluatePython.curried:()Lscala/Function1;]
EvaluatePython.EvaluatePython ( PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child )
[mangled: org/apache/spark/sql/execution/EvaluatePython."<init>":(Lorg/apache/spark/sql/execution/PythonUDF;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
EvaluatePython.tupled ( ) [static] : scala.Function1<scala.Tuple2<PythonUDF,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,EvaluatePython>
[mangled: org/apache/spark/sql/execution/EvaluatePython.tupled:()Lscala/Function1;]
spark-sql_2.10-1.1.0.jar, ExplainCommand.class
package org.apache.spark.sql.execution
ExplainCommand.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/ExplainCommand.children:()Lscala/collection/immutable/Nil$;]
ExplainCommand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/ExplainCommand.children:()Lscala/collection/Seq;]
ExplainCommand.copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan logicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean extended, org.apache.spark.sql.SQLContext context ) : ExplainCommand
[mangled: org/apache/spark/sql/execution/ExplainCommand.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lscala/collection/Seq;ZLorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/ExplainCommand;]
ExplainCommand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.catalyst.expressions.Row>
[mangled: org/apache/spark/sql/execution/ExplainCommand.execute:()Lorg/apache/spark/rdd/RDD;]
ExplainCommand.ExplainCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan logicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean extended, org.apache.spark.sql.SQLContext context )
[mangled: org/apache/spark/sql/execution/ExplainCommand."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lscala/collection/Seq;ZLorg/apache/spark/sql/SQLContext;)V]
ExplainCommand.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.sql.SQLContext>
[mangled: org/apache/spark/sql/execution/ExplainCommand.otherCopyArgs:()Lscala/collection/immutable/List;]
ExplainCommand.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/ExplainCommand.otherCopyArgs:()Lscala/collection/Seq;]
ExplainCommand.sideEffectResult ( ) : scala.collection.Seq<String>
[mangled: org/apache/spark/sql/execution/ExplainCommand.sideEffectResult:()Lscala/collection/Seq;]
spark-sql_2.10-1.1.0.jar, InMemoryColumnarTableScan.class
package org.apache.spark.sql.columnar
InMemoryColumnarTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, InMemoryRelation relation ) : InMemoryColumnarTableScan
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.copy:(Lscala/collection/Seq;Lorg/apache/spark/sql/columnar/InMemoryRelation;)Lorg/apache/spark/sql/columnar/InMemoryColumnarTableScan;]
InMemoryColumnarTableScan.InMemoryColumnarTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, InMemoryRelation relation )
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan."<init>":(Lscala/collection/Seq;Lorg/apache/spark/sql/columnar/InMemoryRelation;)V]
spark-sql_2.10-1.1.0.jar, InMemoryRelation.class
package org.apache.spark.sql.columnar
InMemoryRelation.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean useCompression, int batchSize, org.apache.spark.sql.execution.SparkPlan child, org.apache.spark.rdd.RDD<java.nio.ByteBuffer[ ]> _cachedColumnBuffers ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.copy:(Lscala/collection/Seq;ZILorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/columnar/InMemoryRelation;]
InMemoryRelation.InMemoryRelation ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean useCompression, int batchSize, org.apache.spark.sql.execution.SparkPlan child, org.apache.spark.rdd.RDD<java.nio.ByteBuffer[ ]> _cachedColumnBuffers )
[mangled: org/apache/spark/sql/columnar/InMemoryRelation."<init>":(Lscala/collection/Seq;ZILorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/rdd/RDD;)V]
InMemoryRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.Statistics
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan$Statistics;]
spark-sql_2.10-1.1.0.jar, IntColumnStats.class
package org.apache.spark.sql.columnar
IntColumnStats.ASCENDING ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.ASCENDING:()I]
IntColumnStats.contains ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.contains:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)Z]
IntColumnStats.DESCENDING ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.DESCENDING:()I]
IntColumnStats.gatherStats ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.gatherStats:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
IntColumnStats.initialBounds ( ) : scala.Tuple2<Object,Object>
[mangled: org/apache/spark/sql/columnar/IntColumnStats.initialBounds:()Lscala/Tuple2;]
IntColumnStats.INITIALIZED ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.INITIALIZED:()I]
IntColumnStats.isAbove ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isAbove:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)Z]
IntColumnStats.isAscending ( ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isAscending:()Z]
IntColumnStats.isBelow ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isBelow:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)Z]
IntColumnStats.isDescending ( ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isDescending:()Z]
IntColumnStats.isOrdered ( ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isOrdered:()Z]
IntColumnStats.maxDelta ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.maxDelta:()I]
IntColumnStats.UNINITIALIZED ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.UNINITIALIZED:()I]
IntColumnStats.UNORDERED ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.UNORDERED:()I]
spark-sql_2.10-1.1.0.jar, Limit.class
package org.apache.spark.sql.execution
Limit.executeCollect ( ) : org.apache.spark.sql.catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/execution/Limit.executeCollect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
spark-sql_2.10-1.1.0.jar, NativeColumnType<T>.class
package org.apache.spark.sql.columnar
NativeColumnType<T>.dataType ( ) : T
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.dataType:()Lorg/apache/spark/sql/catalyst/types/NativeType;]
NativeColumnType<T>.NativeColumnType ( T dataType, int typeId, int defaultSize ) : public
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.org.apache.spark.sql.columnar.NativeColumnType:(Lorg/apache/spark/sql/catalyst/types/NativeType;II)V]
spark-sql_2.10-1.1.0.jar, NullableColumnBuilder.class
package org.apache.spark.sql.columnar
NullableColumnBuilder.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.appendFrom:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
NullableColumnBuilder.NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..super.appendFrom:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
spark-sql_2.10-1.1.0.jar, ParquetRelation.class
package org.apache.spark.sql.parquet
ParquetRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.Statistics
[mangled: org/apache/spark/sql/parquet/ParquetRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan$Statistics;]
spark-sql_2.10-1.1.0.jar, ParquetTableScan.class
package org.apache.spark.sql.parquet
ParquetTableScan.normalOutput ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.normalOutput:()Lscala/collection/Seq;]
ParquetTableScan.partOutput ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.partOutput:()Lscala/collection/Seq;]
spark-sql_2.10-1.1.0.jar, PythonUDF.class
package org.apache.spark.sql.execution
PythonUDF.copy ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, org.apache.spark.sql.catalyst.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children ) : PythonUDF
[mangled: org/apache/spark/sql/execution/PythonUDF.copy:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/catalyst/types/DataType;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/PythonUDF;]
PythonUDF.dataType ( ) : org.apache.spark.sql.catalyst.types.DataType
[mangled: org/apache/spark/sql/execution/PythonUDF.dataType:()Lorg/apache/spark/sql/catalyst/types/DataType;]
PythonUDF.eval ( org.apache.spark.sql.catalyst.expressions.Row input ) : Object
[mangled: org/apache/spark/sql/execution/PythonUDF.eval:(Lorg/apache/spark/sql/catalyst/expressions/Row;)Ljava/lang/Object;]
PythonUDF.eval ( org.apache.spark.sql.catalyst.expressions.Row input ) : scala.runtime.Nothing.
[mangled: org/apache/spark/sql/execution/PythonUDF.eval:(Lorg/apache/spark/sql/catalyst/expressions/Row;)Lscala/runtime/Nothing$;]
PythonUDF.PythonUDF ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, org.apache.spark.sql.catalyst.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children )
[mangled: org/apache/spark/sql/execution/PythonUDF."<init>":(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/catalyst/types/DataType;Lscala/collection/Seq;)V]
spark-sql_2.10-1.1.0.jar, RowWriteSupport.class
package org.apache.spark.sql.parquet
RowWriteSupport.attributes ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.attributes:()Lscala/collection/Seq;]
RowWriteSupport.attributes_.eq ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> p1 ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.attributes_.eq:(Lscala/collection/Seq;)V]
RowWriteSupport.write ( org.apache.spark.sql.catalyst.expressions.Row record ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.write:(Lorg/apache/spark/sql/catalyst/expressions/Row;)V]
RowWriteSupport.writeArray ( org.apache.spark.sql.catalyst.types.ArrayType schema, scala.collection.Seq<Object> array ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeArray:(Lorg/apache/spark/sql/catalyst/types/ArrayType;Lscala/collection/Seq;)V]
RowWriteSupport.writeMap ( org.apache.spark.sql.catalyst.types.MapType schema, scala.collection.immutable.Map<?,Object> map ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeMap:(Lorg/apache/spark/sql/catalyst/types/MapType;Lscala/collection/immutable/Map;)V]
RowWriteSupport.writePrimitive ( org.apache.spark.sql.catalyst.types.PrimitiveType schema, Object value ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writePrimitive:(Lorg/apache/spark/sql/catalyst/types/PrimitiveType;Ljava/lang/Object;)V]
RowWriteSupport.writeStruct ( org.apache.spark.sql.catalyst.types.StructType schema, scala.collection.Seq<Object> struct ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeStruct:(Lorg/apache/spark/sql/catalyst/types/StructType;Lscala/collection/Seq;)V]
RowWriteSupport.writeValue ( org.apache.spark.sql.catalyst.types.DataType schema, Object value ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeValue:(Lorg/apache/spark/sql/catalyst/types/DataType;Ljava/lang/Object;)V]
spark-sql_2.10-1.1.0.jar, SetCommand.class
package org.apache.spark.sql.execution
SetCommand.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/SetCommand.children:()Lscala/collection/immutable/Nil$;]
SetCommand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/SetCommand.children:()Lscala/collection/Seq;]
SetCommand.copy ( scala.Option<String> key, scala.Option<String> value, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.sql.SQLContext context ) : SetCommand
[mangled: org/apache/spark/sql/execution/SetCommand.copy:(Lscala/Option;Lscala/Option;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/SetCommand;]
SetCommand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.catalyst.expressions.Row>
[mangled: org/apache/spark/sql/execution/SetCommand.execute:()Lorg/apache/spark/rdd/RDD;]
SetCommand.key ( ) : scala.Option<String>
[mangled: org/apache/spark/sql/execution/SetCommand.key:()Lscala/Option;]
SetCommand.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.sql.SQLContext>
[mangled: org/apache/spark/sql/execution/SetCommand.otherCopyArgs:()Lscala/collection/immutable/List;]
SetCommand.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/SetCommand.otherCopyArgs:()Lscala/collection/Seq;]
SetCommand.SetCommand ( scala.Option<String> key, scala.Option<String> value, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.sql.SQLContext context )
[mangled: org/apache/spark/sql/execution/SetCommand."<init>":(Lscala/Option;Lscala/Option;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)V]
SetCommand.sideEffectResult ( ) : scala.collection.Seq<String>
[mangled: org/apache/spark/sql/execution/SetCommand.sideEffectResult:()Lscala/collection/Seq;]
SetCommand.value ( ) : scala.Option<String>
[mangled: org/apache/spark/sql/execution/SetCommand.value:()Lscala/Option;]
spark-sql_2.10-1.1.0.jar, SparkPlan.class
package org.apache.spark.sql.execution
SparkPlan.executeCollect ( ) : org.apache.spark.sql.catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/execution/SparkPlan.executeCollect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
spark-sql_2.10-1.1.0.jar, SparkStrategies.class
package org.apache.spark.sql.execution
SparkStrategies.CommandStrategy ( ) : SparkStrategies.CommandStrategy.
[mangled: org/apache/spark/sql/execution/SparkStrategies.CommandStrategy:()Lorg/apache/spark/sql/execution/SparkStrategies$CommandStrategy$;]
spark-sql_2.10-1.1.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.applySchema ( org.apache.spark.rdd.RDD<catalyst.expressions.Row> rowRDD, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchema:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.approxCountDistinct ( catalyst.expressions.Expression e, double rsd ) : catalyst.expressions.ApproxCountDistinct
[mangled: org/apache/spark/sql/SQLContext.approxCountDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;D)Lorg/apache/spark/sql/catalyst/expressions/ApproxCountDistinct;]
SQLContext.autoBroadcastJoinThreshold ( ) : int
[mangled: org/apache/spark/sql/SQLContext.autoBroadcastJoinThreshold:()I]
SQLContext.avg ( catalyst.expressions.Expression e ) : catalyst.expressions.Average
[mangled: org/apache/spark/sql/SQLContext.avg:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Average;]
SQLContext.binaryToLiteral ( byte[ ] a ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.binaryToLiteral:([B)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.booleanToLiteral ( boolean b ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.booleanToLiteral:(Z)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.byteToLiteral ( byte b ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.byteToLiteral:(B)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.clear ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clear:()V]
SQLContext.codegenEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.codegenEnabled:()Z]
SQLContext.columnBatchSize ( ) : int
[mangled: org/apache/spark/sql/SQLContext.columnBatchSize:()I]
SQLContext.count ( catalyst.expressions.Expression e ) : catalyst.expressions.Count
[mangled: org/apache/spark/sql/SQLContext.count:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Count;]
SQLContext.countDistinct ( scala.collection.Seq<catalyst.expressions.Expression> e ) : catalyst.expressions.CountDistinct
[mangled: org/apache/spark/sql/SQLContext.countDistinct:(Lscala/collection/Seq;)Lorg/apache/spark/sql/catalyst/expressions/CountDistinct;]
SQLContext.createParquetFile ( String path, boolean allowExisting, org.apache.hadoop.conf.Configuration conf, scala.reflect.api.TypeTags.TypeTag<A> p4 ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.createParquetFile:(Ljava/lang/String;ZLorg/apache/hadoop/conf/Configuration;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.createSchemaRDD ( org.apache.spark.rdd.RDD<A> rdd, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.createSchemaRDD:(Lorg/apache/spark/rdd/RDD;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.decimalToLiteral ( scala.math.BigDecimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.decimalToLiteral:(Lscala/math/BigDecimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.defaultSizeInBytes ( ) : long
[mangled: org/apache/spark/sql/SQLContext.defaultSizeInBytes:()J]
SQLContext.dialect ( ) : String
[mangled: org/apache/spark/sql/SQLContext.dialect:()Ljava/lang/String;]
SQLContext.doubleToLiteral ( double d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.doubleToLiteral:(D)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.DslAttribute ( catalyst.expressions.AttributeReference a ) : catalyst.dsl.package.ExpressionConversions.DslAttribute
[mangled: org/apache/spark/sql/SQLContext.DslAttribute:(Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslAttribute;]
SQLContext.DslExpression ( catalyst.expressions.Expression e ) : catalyst.dsl.package.ExpressionConversions.DslExpression
[mangled: org/apache/spark/sql/SQLContext.DslExpression:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslExpression;]
SQLContext.DslString ( String s ) : catalyst.dsl.package.ExpressionConversions.DslString
[mangled: org/apache/spark/sql/SQLContext.DslString:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslString;]
SQLContext.DslSymbol ( scala.Symbol sym ) : catalyst.dsl.package.ExpressionConversions.DslSymbol
[mangled: org/apache/spark/sql/SQLContext.DslSymbol:(Lscala/Symbol;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslSymbol;]
SQLContext.first ( catalyst.expressions.Expression e ) : catalyst.expressions.First
[mangled: org/apache/spark/sql/SQLContext.first:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/First;]
SQLContext.floatToLiteral ( float f ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.floatToLiteral:(F)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.intToLiteral ( int i ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.intToLiteral:(I)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.isParquetBinaryAsString ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isParquetBinaryAsString:()Z]
SQLContext.jsonFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonFile ( String path, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, catalyst.types.StructType schema ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/types/StructType;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.logicalPlanToSparkQuery ( catalyst.plans.logical.LogicalPlan plan ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.logicalPlanToSparkQuery:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.longToLiteral ( long l ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.longToLiteral:(J)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.lower ( catalyst.expressions.Expression e ) : catalyst.expressions.Lower
[mangled: org/apache/spark/sql/SQLContext.lower:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Lower;]
SQLContext.max ( catalyst.expressions.Expression e ) : catalyst.expressions.Max
[mangled: org/apache/spark/sql/SQLContext.max:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Max;]
SQLContext.min ( catalyst.expressions.Expression e ) : catalyst.expressions.Min
[mangled: org/apache/spark/sql/SQLContext.min:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Min;]
SQLContext.numShufflePartitions ( ) : int
[mangled: org/apache/spark/sql/SQLContext.numShufflePartitions:()I]
SQLContext.optimizer ( ) : catalyst.optimizer.Optimizer.
[mangled: org/apache/spark/sql/SQLContext.optimizer:()Lorg/apache/spark/sql/catalyst/optimizer/Optimizer$;]
SQLContext.SQLConf._setter_.settings_.eq ( java.util.Map p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.sql.SQLConf._setter_.settings_.eq:(Ljava/util/Map;)V]
SQLContext.parquetCompressionCodec ( ) : String
[mangled: org/apache/spark/sql/SQLContext.parquetCompressionCodec:()Ljava/lang/String;]
SQLContext.parquetFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.parquetFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.parseDataType ( String dataTypeString ) : catalyst.types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/types/DataType;]
SQLContext.parser ( ) : catalyst.SqlParser
[mangled: org/apache/spark/sql/SQLContext.parser:()Lorg/apache/spark/sql/catalyst/SqlParser;]
SQLContext.registerFunction ( String name, scala.Function10<?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function10;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function11<?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function11;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function12<?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function12;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function13<?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function13;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function14<?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function14;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function15<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function15;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function16<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function16;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function17<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function17;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function18<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function18;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function19<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function19;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function1<?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function20<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function20;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function21<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function21;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function22<?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function22;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function2<?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function2;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function3<?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function3;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function4<?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function4;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function5<?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function5;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function6<?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function6;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function7<?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function7;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function8<?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function8;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerFunction ( String name, scala.Function9<?,?,?,?,?,?,?,?,?,T> func, scala.reflect.api.TypeTags.TypeTag<T> p3 ) : void
[mangled: org/apache/spark/sql/SQLContext.registerFunction:(Ljava/lang/String;Lscala/Function9;Lscala/reflect/api/TypeTags$TypeTag;)V]
SQLContext.registerPython ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, String stringDataType ) : void
[mangled: org/apache/spark/sql/SQLContext.registerPython:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Lorg/apache/spark/Accumulator;Ljava/lang/String;)V]
SQLContext.registerRDDAsTable ( SchemaRDD rdd, String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.registerRDDAsTable:(Lorg/apache/spark/sql/SchemaRDD;Ljava/lang/String;)V]
SQLContext.settings ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.settings:()Ljava/util/Map;]
SQLContext.shortToLiteral ( short s ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.shortToLiteral:(S)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.sql ( String sqlText ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.stringToLiteral ( String s ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.stringToLiteral:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.sum ( catalyst.expressions.Expression e ) : catalyst.expressions.Sum
[mangled: org/apache/spark/sql/SQLContext.sum:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Sum;]
SQLContext.sumDistinct ( catalyst.expressions.Expression e ) : catalyst.expressions.SumDistinct
[mangled: org/apache/spark/sql/SQLContext.sumDistinct:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/SumDistinct;]
SQLContext.symbolToUnresolvedAttribute ( scala.Symbol s ) : catalyst.analysis.UnresolvedAttribute
[mangled: org/apache/spark/sql/SQLContext.symbolToUnresolvedAttribute:(Lscala/Symbol;)Lorg/apache/spark/sql/catalyst/analysis/UnresolvedAttribute;]
SQLContext.table ( String tableName ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.table:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.timestampToLiteral ( java.sql.Timestamp t ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.timestampToLiteral:(Ljava/sql/Timestamp;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.upper ( catalyst.expressions.Expression e ) : catalyst.expressions.Upper
[mangled: org/apache/spark/sql/SQLContext.upper:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/expressions/Upper;]
SQLContext.useCompression ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.useCompression:()Z]
spark-sql_2.10-1.1.0.jar, TakeOrdered.class
package org.apache.spark.sql.execution
TakeOrdered.executeCollect ( ) : org.apache.spark.sql.catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/execution/TakeOrdered.executeCollect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
TakeOrdered.ordering ( ) : org.apache.spark.sql.catalyst.expressions.RowOrdering
[mangled: org/apache/spark/sql/execution/TakeOrdered.ordering:()Lorg/apache/spark/sql/catalyst/expressions/RowOrdering;]
to the top
Removed Methods (1483)
spark-sql_2.10-1.3.0.jar, And.class
package org.apache.spark.sql.sources
And.And ( Filter left, Filter right )
[mangled: org/apache/spark/sql/sources/And."<init>":(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)V]
And.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/And.canEqual:(Ljava/lang/Object;)Z]
And.copy ( Filter left, Filter right ) : And
[mangled: org/apache/spark/sql/sources/And.copy:(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/sql/sources/And;]
And.curried ( ) [static] : scala.Function1<Filter,scala.Function1<Filter,And>>
[mangled: org/apache/spark/sql/sources/And.curried:()Lscala/Function1;]
And.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/And.equals:(Ljava/lang/Object;)Z]
And.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/And.hashCode:()I]
And.left ( ) : Filter
[mangled: org/apache/spark/sql/sources/And.left:()Lorg/apache/spark/sql/sources/Filter;]
And.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/And.productArity:()I]
And.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/And.productElement:(I)Ljava/lang/Object;]
And.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/And.productIterator:()Lscala/collection/Iterator;]
And.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/And.productPrefix:()Ljava/lang/String;]
And.right ( ) : Filter
[mangled: org/apache/spark/sql/sources/And.right:()Lorg/apache/spark/sql/sources/Filter;]
And.toString ( ) : String
[mangled: org/apache/spark/sql/sources/And.toString:()Ljava/lang/String;]
And.tupled ( ) [static] : scala.Function1<scala.Tuple2<Filter,Filter>,And>
[mangled: org/apache/spark/sql/sources/And.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, BaseRelation.class
package org.apache.spark.sql.sources
BaseRelation.BaseRelation ( )
[mangled: org/apache/spark/sql/sources/BaseRelation."<init>":()V]
BaseRelation.schema ( ) [abstract] : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/sources/BaseRelation.schema:()Lorg/apache/spark/sql/types/StructType;]
BaseRelation.sizeInBytes ( ) : long
[mangled: org/apache/spark/sql/sources/BaseRelation.sizeInBytes:()J]
BaseRelation.sqlContext ( ) [abstract] : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/sources/BaseRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
spark-sql_2.10-1.3.0.jar, BinaryColumnStats.class
package org.apache.spark.sql.columnar
BinaryColumnStats.BinaryColumnStats ( )
[mangled: org/apache/spark/sql/columnar/BinaryColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, BroadcastHashJoin.class
package org.apache.spark.sql.execution.joins
BroadcastHashJoin.BroadcastHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
BroadcastHashJoin.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
BroadcastHashJoin.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
BroadcastHashJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.canEqual:(Ljava/lang/Object;)Z]
BroadcastHashJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.children:()Lscala/collection/Seq;]
BroadcastHashJoin.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : BroadcastHashJoin
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/BroadcastHashJoin;]
BroadcastHashJoin.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<package.BuildSide,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,BroadcastHashJoin>>>>>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.curried:()Lscala/Function1;]
BroadcastHashJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.equals:(Ljava/lang/Object;)Z]
BroadcastHashJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.execute:()Lorg/apache/spark/rdd/RDD;]
BroadcastHashJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.hashCode:()I]
BroadcastHashJoin.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
BroadcastHashJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastHashJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.leftKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.output:()Lscala/collection/Seq;]
BroadcastHashJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
BroadcastHashJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productArity:()I]
BroadcastHashJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productElement:(I)Ljava/lang/Object;]
BroadcastHashJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productIterator:()Lscala/collection/Iterator;]
BroadcastHashJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productPrefix:()Ljava/lang/String;]
BroadcastHashJoin.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.UnspecifiedDistribution.>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.requiredChildDistribution:()Lscala/collection/immutable/List;]
BroadcastHashJoin.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.requiredChildDistribution:()Lscala/collection/Seq;]
BroadcastHashJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastHashJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.rightKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.streamedKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.streamSideKeyGenerator:()Lscala/Function0;]
BroadcastHashJoin.timeout ( ) : scala.concurrent.duration.Duration
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.timeout:()Lscala/concurrent/duration/Duration;]
BroadcastHashJoin.tupled ( ) [static] : scala.Function1<scala.Tuple5<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,package.BuildSide,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,BroadcastHashJoin>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, BroadcastLeftSemiJoinHash.class
package org.apache.spark.sql.execution.joins
BroadcastLeftSemiJoinHash.BroadcastLeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
BroadcastLeftSemiJoinHash.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.buildSide ( ) : package.BuildRight.
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildRight$;]
BroadcastLeftSemiJoinHash.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
BroadcastLeftSemiJoinHash.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
BroadcastLeftSemiJoinHash.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.canEqual:(Ljava/lang/Object;)Z]
BroadcastLeftSemiJoinHash.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.children:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : BroadcastLeftSemiJoinHash
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash;]
BroadcastLeftSemiJoinHash.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,BroadcastLeftSemiJoinHash>>>>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.curried:()Lscala/Function1;]
BroadcastLeftSemiJoinHash.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.equals:(Ljava/lang/Object;)Z]
BroadcastLeftSemiJoinHash.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.execute:()Lorg/apache/spark/rdd/RDD;]
BroadcastLeftSemiJoinHash.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.hashCode:()I]
BroadcastLeftSemiJoinHash.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
BroadcastLeftSemiJoinHash.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastLeftSemiJoinHash.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.leftKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.output:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productArity:()I]
BroadcastLeftSemiJoinHash.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productElement:(I)Ljava/lang/Object;]
BroadcastLeftSemiJoinHash.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productIterator:()Lscala/collection/Iterator;]
BroadcastLeftSemiJoinHash.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productPrefix:()Ljava/lang/String;]
BroadcastLeftSemiJoinHash.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastLeftSemiJoinHash.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.rightKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.streamedKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.streamSideKeyGenerator:()Lscala/Function0;]
BroadcastLeftSemiJoinHash.tupled ( ) [static] : scala.Function1<scala.Tuple4<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,BroadcastLeftSemiJoinHash>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, BroadcastNestedLoopJoin.class
package org.apache.spark.sql.execution.joins
BroadcastNestedLoopJoin.BroadcastNestedLoopJoin ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right, package.BuildSide buildSide, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition )
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;)V]
BroadcastNestedLoopJoin.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
BroadcastNestedLoopJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.canEqual:(Ljava/lang/Object;)Z]
BroadcastNestedLoopJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.children:()Lscala/collection/Seq;]
BroadcastNestedLoopJoin.condition ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.condition:()Lscala/Option;]
BroadcastNestedLoopJoin.copy ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right, package.BuildSide buildSide, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition ) : BroadcastNestedLoopJoin
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;)Lorg/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin;]
BroadcastNestedLoopJoin.curried ( ) [static] : scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<package.BuildSide,scala.Function1<org.apache.spark.sql.catalyst.plans.JoinType,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,BroadcastNestedLoopJoin>>>>>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.curried:()Lscala/Function1;]
BroadcastNestedLoopJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.equals:(Ljava/lang/Object;)Z]
BroadcastNestedLoopJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.execute:()Lorg/apache/spark/rdd/RDD;]
BroadcastNestedLoopJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.hashCode:()I]
BroadcastNestedLoopJoin.joinType ( ) : org.apache.spark.sql.catalyst.plans.JoinType
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.joinType:()Lorg/apache/spark/sql/catalyst/plans/JoinType;]
BroadcastNestedLoopJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastNestedLoopJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastNestedLoopJoin.BroadcastNestedLoopJoin..boundCondition ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin..boundCondition:()Lscala/Function1;]
BroadcastNestedLoopJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.output:()Lscala/collection/Seq;]
BroadcastNestedLoopJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
BroadcastNestedLoopJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productArity:()I]
BroadcastNestedLoopJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productElement:(I)Ljava/lang/Object;]
BroadcastNestedLoopJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productIterator:()Lscala/collection/Iterator;]
BroadcastNestedLoopJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productPrefix:()Ljava/lang/String;]
BroadcastNestedLoopJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastNestedLoopJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastNestedLoopJoin.tupled ( ) [static] : scala.Function1<scala.Tuple5<org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan,package.BuildSide,org.apache.spark.sql.catalyst.plans.JoinType,scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>>,BroadcastNestedLoopJoin>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CachedBatch.class
package org.apache.spark.sql.columnar
CachedBatch.buffers ( ) : byte[ ][ ]
[mangled: org/apache/spark/sql/columnar/CachedBatch.buffers:()[[B]
CachedBatch.CachedBatch ( byte[ ][ ] buffers, org.apache.spark.sql.Row stats )
[mangled: org/apache/spark/sql/columnar/CachedBatch."<init>":([[BLorg/apache/spark/sql/Row;)V]
CachedBatch.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/columnar/CachedBatch.canEqual:(Ljava/lang/Object;)Z]
CachedBatch.copy ( byte[ ][ ] buffers, org.apache.spark.sql.Row stats ) : CachedBatch
[mangled: org/apache/spark/sql/columnar/CachedBatch.copy:([[BLorg/apache/spark/sql/Row;)Lorg/apache/spark/sql/columnar/CachedBatch;]
CachedBatch.curried ( ) [static] : scala.Function1<byte[ ][ ],scala.Function1<org.apache.spark.sql.Row,CachedBatch>>
[mangled: org/apache/spark/sql/columnar/CachedBatch.curried:()Lscala/Function1;]
CachedBatch.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/columnar/CachedBatch.equals:(Ljava/lang/Object;)Z]
CachedBatch.hashCode ( ) : int
[mangled: org/apache/spark/sql/columnar/CachedBatch.hashCode:()I]
CachedBatch.productArity ( ) : int
[mangled: org/apache/spark/sql/columnar/CachedBatch.productArity:()I]
CachedBatch.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/columnar/CachedBatch.productElement:(I)Ljava/lang/Object;]
CachedBatch.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/columnar/CachedBatch.productIterator:()Lscala/collection/Iterator;]
CachedBatch.productPrefix ( ) : String
[mangled: org/apache/spark/sql/columnar/CachedBatch.productPrefix:()Ljava/lang/String;]
CachedBatch.stats ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/columnar/CachedBatch.stats:()Lorg/apache/spark/sql/Row;]
CachedBatch.toString ( ) : String
[mangled: org/apache/spark/sql/columnar/CachedBatch.toString:()Ljava/lang/String;]
CachedBatch.tupled ( ) [static] : scala.Function1<scala.Tuple2<byte[ ][ ],org.apache.spark.sql.Row>,CachedBatch>
[mangled: org/apache/spark/sql/columnar/CachedBatch.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CachedData.class
package org.apache.spark.sql
CachedData.CachedData ( catalyst.plans.logical.LogicalPlan plan, columnar.InMemoryRelation cachedRepresentation )
[mangled: org/apache/spark/sql/CachedData."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/columnar/InMemoryRelation;)V]
CachedData.cachedRepresentation ( ) : columnar.InMemoryRelation
[mangled: org/apache/spark/sql/CachedData.cachedRepresentation:()Lorg/apache/spark/sql/columnar/InMemoryRelation;]
CachedData.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/CachedData.canEqual:(Ljava/lang/Object;)Z]
CachedData.copy ( catalyst.plans.logical.LogicalPlan plan, columnar.InMemoryRelation cachedRepresentation ) : CachedData
[mangled: org/apache/spark/sql/CachedData.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/columnar/InMemoryRelation;)Lorg/apache/spark/sql/CachedData;]
CachedData.curried ( ) [static] : scala.Function1<catalyst.plans.logical.LogicalPlan,scala.Function1<columnar.InMemoryRelation,CachedData>>
[mangled: org/apache/spark/sql/CachedData.curried:()Lscala/Function1;]
CachedData.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/CachedData.equals:(Ljava/lang/Object;)Z]
CachedData.hashCode ( ) : int
[mangled: org/apache/spark/sql/CachedData.hashCode:()I]
CachedData.plan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/CachedData.plan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
CachedData.productArity ( ) : int
[mangled: org/apache/spark/sql/CachedData.productArity:()I]
CachedData.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/CachedData.productElement:(I)Ljava/lang/Object;]
CachedData.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/CachedData.productIterator:()Lscala/collection/Iterator;]
CachedData.productPrefix ( ) : String
[mangled: org/apache/spark/sql/CachedData.productPrefix:()Ljava/lang/String;]
CachedData.toString ( ) : String
[mangled: org/apache/spark/sql/CachedData.toString:()Ljava/lang/String;]
CachedData.tupled ( ) [static] : scala.Function1<scala.Tuple2<catalyst.plans.logical.LogicalPlan,columnar.InMemoryRelation>,CachedData>
[mangled: org/apache/spark/sql/CachedData.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CacheManager.class
package org.apache.spark.sql
CacheManager.CacheManager ( SQLContext sqlContext )
[mangled: org/apache/spark/sql/CacheManager."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
CacheManager.cacheQuery ( DataFrame query, scala.Option<String> tableName, org.apache.spark.storage.StorageLevel storageLevel ) : void
[mangled: org/apache/spark/sql/CacheManager.cacheQuery:(Lorg/apache/spark/sql/DataFrame;Lscala/Option;Lorg/apache/spark/storage/StorageLevel;)V]
CacheManager.cacheTable ( String tableName ) : void
[mangled: org/apache/spark/sql/CacheManager.cacheTable:(Ljava/lang/String;)V]
CacheManager.clearCache ( ) : void
[mangled: org/apache/spark/sql/CacheManager.clearCache:()V]
CacheManager.invalidateCache ( catalyst.plans.logical.LogicalPlan plan ) : void
[mangled: org/apache/spark/sql/CacheManager.invalidateCache:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
CacheManager.isCached ( String tableName ) : boolean
[mangled: org/apache/spark/sql/CacheManager.isCached:(Ljava/lang/String;)Z]
CacheManager.tryUncacheQuery ( DataFrame query, boolean blocking ) : boolean
[mangled: org/apache/spark/sql/CacheManager.tryUncacheQuery:(Lorg/apache/spark/sql/DataFrame;Z)Z]
CacheManager.uncacheTable ( String tableName ) : void
[mangled: org/apache/spark/sql/CacheManager.uncacheTable:(Ljava/lang/String;)V]
CacheManager.useCachedData ( catalyst.plans.logical.LogicalPlan plan ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/CacheManager.useCachedData:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
spark-sql_2.10-1.3.0.jar, CacheTableCommand.class
package org.apache.spark.sql.execution
CacheTableCommand.CacheTableCommand ( String tableName, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> plan, boolean isLazy )
[mangled: org/apache/spark/sql/execution/CacheTableCommand."<init>":(Ljava/lang/String;Lscala/Option;Z)V]
CacheTableCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/CacheTableCommand.canEqual:(Ljava/lang/Object;)Z]
CacheTableCommand.copy ( String tableName, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> plan, boolean isLazy ) : CacheTableCommand
[mangled: org/apache/spark/sql/execution/CacheTableCommand.copy:(Ljava/lang/String;Lscala/Option;Z)Lorg/apache/spark/sql/execution/CacheTableCommand;]
CacheTableCommand.curried ( ) [static] : scala.Function1<String,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,scala.Function1<Object,CacheTableCommand>>>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.curried:()Lscala/Function1;]
CacheTableCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/CacheTableCommand.equals:(Ljava/lang/Object;)Z]
CacheTableCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/CacheTableCommand.hashCode:()I]
CacheTableCommand.isLazy ( ) : boolean
[mangled: org/apache/spark/sql/execution/CacheTableCommand.isLazy:()Z]
CacheTableCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.output:()Lscala/collection/Seq;]
CacheTableCommand.plan ( ) : scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.plan:()Lscala/Option;]
CacheTableCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productArity:()I]
CacheTableCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productElement:(I)Ljava/lang/Object;]
CacheTableCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productIterator:()Lscala/collection/Iterator;]
CacheTableCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productPrefix:()Ljava/lang/String;]
CacheTableCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
CacheTableCommand.tableName ( ) : String
[mangled: org/apache/spark/sql/execution/CacheTableCommand.tableName:()Ljava/lang/String;]
CacheTableCommand.tupled ( ) [static] : scala.Function1<scala.Tuple3<String,scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,Object>,CacheTableCommand>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CartesianProduct.class
package org.apache.spark.sql.execution.joins
CartesianProduct.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.canEqual:(Ljava/lang/Object;)Z]
CartesianProduct.CartesianProduct ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
CartesianProduct.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.children:()Lscala/collection/Seq;]
CartesianProduct.copy ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : CartesianProduct
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/CartesianProduct;]
CartesianProduct.curried ( ) [static] : scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,CartesianProduct>>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.curried:()Lscala/Function1;]
CartesianProduct.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.equals:(Ljava/lang/Object;)Z]
CartesianProduct.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.execute:()Lorg/apache/spark/rdd/RDD;]
CartesianProduct.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.hashCode:()I]
CartesianProduct.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
CartesianProduct.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
CartesianProduct.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.output:()Lscala/collection/Seq;]
CartesianProduct.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productArity:()I]
CartesianProduct.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productElement:(I)Ljava/lang/Object;]
CartesianProduct.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productIterator:()Lscala/collection/Iterator;]
CartesianProduct.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productPrefix:()Ljava/lang/String;]
CartesianProduct.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
CartesianProduct.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
CartesianProduct.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,CartesianProduct>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CaseInsensitiveMap.class
package org.apache.spark.sql.sources
CaseInsensitiveMap.CaseInsensitiveMap ( scala.collection.immutable.Map<String,String> map )
[mangled: org/apache/spark/sql/sources/CaseInsensitiveMap."<init>":(Lscala/collection/immutable/Map;)V]
spark-sql_2.10-1.3.0.jar, CatalystArrayContainsNullConverter.class
package org.apache.spark.sql.parquet
CatalystArrayContainsNullConverter.CatalystArrayContainsNullConverter ( org.apache.spark.sql.types.DataType elementType, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystArrayContainsNullConverter."<init>":(Lorg/apache/spark/sql/types/DataType;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, CatalystArrayConverter.class
package org.apache.spark.sql.parquet
CatalystArrayConverter.CatalystArrayConverter ( org.apache.spark.sql.types.DataType elementType, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystArrayConverter."<init>":(Lorg/apache/spark/sql/types/DataType;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, CatalystConverter.class
package org.apache.spark.sql.parquet
CatalystConverter.getCurrentRecord ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/parquet/CatalystConverter.getCurrentRecord:()Lorg/apache/spark/sql/Row;]
CatalystConverter.readDecimal ( org.apache.spark.sql.types.Decimal dest, parquet.io.api.Binary value, org.apache.spark.sql.types.DecimalType ctype ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.readDecimal:(Lorg/apache/spark/sql/types/Decimal;Lparquet/io/api/Binary;Lorg/apache/spark/sql/types/DecimalType;)V]
CatalystConverter.readTimestamp ( parquet.io.api.Binary value ) : java.sql.Timestamp
[mangled: org/apache/spark/sql/parquet/CatalystConverter.readTimestamp:(Lparquet/io/api/Binary;)Ljava/sql/Timestamp;]
CatalystConverter.THRIFT_ARRAY_ELEMENTS_SCHEMA_NAME_SUFFIX ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.THRIFT_ARRAY_ELEMENTS_SCHEMA_NAME_SUFFIX:()Ljava/lang/String;]
CatalystConverter.updateDecimal ( int fieldIndex, parquet.io.api.Binary value, org.apache.spark.sql.types.DecimalType ctype ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateDecimal:(ILparquet/io/api/Binary;Lorg/apache/spark/sql/types/DecimalType;)V]
CatalystConverter.updateString ( int fieldIndex, String value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateString:(ILjava/lang/String;)V]
CatalystConverter.updateTimestamp ( int fieldIndex, parquet.io.api.Binary value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateTimestamp:(ILparquet/io/api/Binary;)V]
spark-sql_2.10-1.3.0.jar, CatalystGroupConverter.class
package org.apache.spark.sql.parquet
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent, scala.collection.mutable.ArrayBuffer<Object> current, scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.Row> buffer )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;Lscala/collection/mutable/ArrayBuffer;Lscala/collection/mutable/ArrayBuffer;)V]
CatalystGroupConverter.schema ( ) : org.apache.spark.sql.types.StructField[ ]
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.schema:()[Lorg/apache/spark/sql/types/StructField;]
spark-sql_2.10-1.3.0.jar, CatalystMapConverter.class
package org.apache.spark.sql.parquet
CatalystMapConverter.CatalystMapConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystMapConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, CatalystNativeArrayConverter.class
package org.apache.spark.sql.parquet
CatalystNativeArrayConverter.CatalystNativeArrayConverter ( org.apache.spark.sql.types.NativeType elementType, int index, CatalystConverter parent, int capacity )
[mangled: org/apache/spark/sql/parquet/CatalystNativeArrayConverter."<init>":(Lorg/apache/spark/sql/types/NativeType;ILorg/apache/spark/sql/parquet/CatalystConverter;I)V]
spark-sql_2.10-1.3.0.jar, CatalystPrimitiveStringConverter.class
package org.apache.spark.sql.parquet
CatalystPrimitiveStringConverter.CatalystPrimitiveStringConverter ( CatalystConverter parent, int fieldIndex )
[mangled: org/apache/spark/sql/parquet/CatalystPrimitiveStringConverter."<init>":(Lorg/apache/spark/sql/parquet/CatalystConverter;I)V]
spark-sql_2.10-1.3.0.jar, CatalystScan.class
package org.apache.spark.sql.sources
CatalystScan.buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> p1, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> p2 ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/CatalystScan.buildScan:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/rdd/RDD;]
spark-sql_2.10-1.3.0.jar, CatalystStructConverter.class
package org.apache.spark.sql.parquet
CatalystStructConverter.CatalystStructConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystStructConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, Column.class
package org.apache.spark.sql
Column.and ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.and:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.apply ( catalyst.expressions.Expression p1 ) [static] : Column
[mangled: org/apache/spark/sql/Column.apply:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/Column;]
Column.apply ( String p1 ) [static] : Column
[mangled: org/apache/spark/sql/Column.apply:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.as ( scala.Symbol alias ) : Column
[mangled: org/apache/spark/sql/Column.as:(Lscala/Symbol;)Lorg/apache/spark/sql/Column;]
Column.as ( String alias ) : Column
[mangled: org/apache/spark/sql/Column.as:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.asc ( ) : Column
[mangled: org/apache/spark/sql/Column.asc:()Lorg/apache/spark/sql/Column;]
Column.cast ( types.DataType to ) : Column
[mangled: org/apache/spark/sql/Column.cast:(Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/Column;]
Column.cast ( String to ) : Column
[mangled: org/apache/spark/sql/Column.cast:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.Column ( catalyst.expressions.Expression expr )
[mangled: org/apache/spark/sql/Column."<init>":(Lorg/apache/spark/sql/catalyst/expressions/Expression;)V]
Column.Column ( String name )
[mangled: org/apache/spark/sql/Column."<init>":(Ljava/lang/String;)V]
Column.contains ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.contains:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.desc ( ) : Column
[mangled: org/apache/spark/sql/Column.desc:()Lorg/apache/spark/sql/Column;]
Column.divide ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.divide:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.endsWith ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.endsWith:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.endsWith ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.endsWith:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.eqNullSafe ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.eqNullSafe:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.equalTo ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.equalTo:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.explain ( boolean extended ) : void
[mangled: org/apache/spark/sql/Column.explain:(Z)V]
Column.expr ( ) : catalyst.expressions.Expression
[mangled: org/apache/spark/sql/Column.expr:()Lorg/apache/spark/sql/catalyst/expressions/Expression;]
Column.geq ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.geq:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.getField ( String fieldName ) : Column
[mangled: org/apache/spark/sql/Column.getField:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.getItem ( int ordinal ) : Column
[mangled: org/apache/spark/sql/Column.getItem:(I)Lorg/apache/spark/sql/Column;]
Column.gt ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.gt:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.in ( Column... list ) : Column
[mangled: org/apache/spark/sql/Column.in:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.in ( scala.collection.Seq<Column> list ) : Column
[mangled: org/apache/spark/sql/Column.in:(Lscala/collection/Seq;)Lorg/apache/spark/sql/Column;]
Column.isNotNull ( ) : Column
[mangled: org/apache/spark/sql/Column.isNotNull:()Lorg/apache/spark/sql/Column;]
Column.isNull ( ) : Column
[mangled: org/apache/spark/sql/Column.isNull:()Lorg/apache/spark/sql/Column;]
Column.leq ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.leq:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.like ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.like:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.lt ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.lt:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.minus ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.minus:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.mod ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.mod:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.multiply ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.multiply:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.notEqual ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.notEqual:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.or ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.or:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.plus ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.plus:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.rlike ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.rlike:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.startsWith ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.startsWith:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.startsWith ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.startsWith:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.substr ( int startPos, int len ) : Column
[mangled: org/apache/spark/sql/Column.substr:(II)Lorg/apache/spark/sql/Column;]
Column.substr ( Column startPos, Column len ) : Column
[mangled: org/apache/spark/sql/Column.substr:(Lorg/apache/spark/sql/Column;Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.toString ( ) : String
[mangled: org/apache/spark/sql/Column.toString:()Ljava/lang/String;]
Column.unapply ( Column p1 ) [static] : scala.Option<catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/Column.unapply:(Lorg/apache/spark/sql/Column;)Lscala/Option;]
Column.unary_.bang ( ) : Column
[mangled: org/apache/spark/sql/Column.unary_.bang:()Lorg/apache/spark/sql/Column;]
Column.unary_.minus ( ) : Column
[mangled: org/apache/spark/sql/Column.unary_.minus:()Lorg/apache/spark/sql/Column;]
spark-sql_2.10-1.3.0.jar, ColumnBuilder.class
package org.apache.spark.sql.columnar
ColumnBuilder.appendFrom ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnBuilder.appendFrom:(Lorg/apache/spark/sql/Row;I)V]
spark-sql_2.10-1.3.0.jar, ColumnName.class
package org.apache.spark.sql
ColumnName.ColumnName ( String name )
[mangled: org/apache/spark/sql/ColumnName."<init>":(Ljava/lang/String;)V]
spark-sql_2.10-1.3.0.jar, ColumnStats.class
package org.apache.spark.sql.columnar
ColumnStats.collectedStatistics ( ) [abstract] : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/columnar/ColumnStats.collectedStatistics:()Lorg/apache/spark/sql/Row;]
ColumnStats.count ( ) [abstract] : int
[mangled: org/apache/spark/sql/columnar/ColumnStats.count:()I]
ColumnStats.count_.eq ( int p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.count_.eq:(I)V]
ColumnStats.gatherStats ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.gatherStats:(Lorg/apache/spark/sql/Row;I)V]
ColumnStats.nullCount ( ) [abstract] : int
[mangled: org/apache/spark/sql/columnar/ColumnStats.nullCount:()I]
ColumnStats.nullCount_.eq ( int p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.nullCount_.eq:(I)V]
ColumnStats.sizeInBytes ( ) [abstract] : long
[mangled: org/apache/spark/sql/columnar/ColumnStats.sizeInBytes:()J]
ColumnStats.sizeInBytes_.eq ( long p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.sizeInBytes_.eq:(J)V]
spark-sql_2.10-1.3.0.jar, CompressionScheme.class
package org.apache.spark.sql.columnar.compression
CompressionScheme.encoder ( org.apache.spark.sql.columnar.NativeColumnType<T> p1 ) [abstract] : Encoder<T>
[mangled: org/apache/spark/sql/columnar/compression/CompressionScheme.encoder:(Lorg/apache/spark/sql/columnar/NativeColumnType;)Lorg/apache/spark/sql/columnar/compression/Encoder;]
spark-sql_2.10-1.3.0.jar, CreatableRelationProvider.class
package org.apache.spark.sql.sources
CreatableRelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, org.apache.spark.sql.SaveMode p2, scala.collection.immutable.Map<String,String> p3, org.apache.spark.sql.DataFrame p4 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/CreatableRelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.3.0.jar, CreateTableUsing.class
package org.apache.spark.sql.sources
CreateTableUsing.allowExisting ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.allowExisting:()Z]
CreateTableUsing.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.canEqual:(Ljava/lang/Object;)Z]
CreateTableUsing.copy ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, boolean temporary, scala.collection.immutable.Map<String,String> options, boolean allowExisting, boolean managedIfNoPath ) : CreateTableUsing
[mangled: org/apache/spark/sql/sources/CreateTableUsing.copy:(Ljava/lang/String;Lscala/Option;Ljava/lang/String;ZLscala/collection/immutable/Map;ZZ)Lorg/apache/spark/sql/sources/CreateTableUsing;]
CreateTableUsing.CreateTableUsing ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, boolean temporary, scala.collection.immutable.Map<String,String> options, boolean allowExisting, boolean managedIfNoPath )
[mangled: org/apache/spark/sql/sources/CreateTableUsing."<init>":(Ljava/lang/String;Lscala/Option;Ljava/lang/String;ZLscala/collection/immutable/Map;ZZ)V]
CreateTableUsing.curried ( ) [static] : scala.Function1<String,scala.Function1<scala.Option<org.apache.spark.sql.types.StructType>,scala.Function1<String,scala.Function1<Object,scala.Function1<scala.collection.immutable.Map<String,String>,scala.Function1<Object,scala.Function1<Object,CreateTableUsing>>>>>>>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.curried:()Lscala/Function1;]
CreateTableUsing.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.equals:(Ljava/lang/Object;)Z]
CreateTableUsing.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsing.hashCode:()I]
CreateTableUsing.managedIfNoPath ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.managedIfNoPath:()Z]
CreateTableUsing.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.options:()Lscala/collection/immutable/Map;]
CreateTableUsing.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productArity:()I]
CreateTableUsing.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productElement:(I)Ljava/lang/Object;]
CreateTableUsing.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productIterator:()Lscala/collection/Iterator;]
CreateTableUsing.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productPrefix:()Ljava/lang/String;]
CreateTableUsing.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsing.provider:()Ljava/lang/String;]
CreateTableUsing.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsing.tableName:()Ljava/lang/String;]
CreateTableUsing.temporary ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.temporary:()Z]
CreateTableUsing.tupled ( ) [static] : scala.Function1<scala.Tuple7<String,scala.Option<org.apache.spark.sql.types.StructType>,String,Object,scala.collection.immutable.Map<String,String>,Object,Object>,CreateTableUsing>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.tupled:()Lscala/Function1;]
CreateTableUsing.userSpecifiedSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.userSpecifiedSchema:()Lscala/Option;]
spark-sql_2.10-1.3.0.jar, CreateTableUsingAsSelect.class
package org.apache.spark.sql.sources
CreateTableUsingAsSelect.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.canEqual:(Ljava/lang/Object;)Z]
CreateTableUsingAsSelect.child ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.child:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
CreateTableUsingAsSelect.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
CreateTableUsingAsSelect.copy ( String tableName, String provider, boolean temporary, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child ) : CreateTableUsingAsSelect
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.copy:(Ljava/lang/String;Ljava/lang/String;ZLorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/sources/CreateTableUsingAsSelect;]
CreateTableUsingAsSelect.CreateTableUsingAsSelect ( String tableName, String provider, boolean temporary, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child )
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect."<init>":(Ljava/lang/String;Ljava/lang/String;ZLorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
CreateTableUsingAsSelect.curried ( ) [static] : scala.Function1<String,scala.Function1<String,scala.Function1<Object,scala.Function1<org.apache.spark.sql.SaveMode,scala.Function1<scala.collection.immutable.Map<String,String>,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,CreateTableUsingAsSelect>>>>>>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.curried:()Lscala/Function1;]
CreateTableUsingAsSelect.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.equals:(Ljava/lang/Object;)Z]
CreateTableUsingAsSelect.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.hashCode:()I]
CreateTableUsingAsSelect.mode ( ) : org.apache.spark.sql.SaveMode
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.mode:()Lorg/apache/spark/sql/SaveMode;]
CreateTableUsingAsSelect.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.options:()Lscala/collection/immutable/Map;]
CreateTableUsingAsSelect.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.output:()Lscala/collection/Seq;]
CreateTableUsingAsSelect.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productArity:()I]
CreateTableUsingAsSelect.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productElement:(I)Ljava/lang/Object;]
CreateTableUsingAsSelect.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productIterator:()Lscala/collection/Iterator;]
CreateTableUsingAsSelect.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productPrefix:()Ljava/lang/String;]
CreateTableUsingAsSelect.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.provider:()Ljava/lang/String;]
CreateTableUsingAsSelect.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.tableName:()Ljava/lang/String;]
CreateTableUsingAsSelect.temporary ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.temporary:()Z]
CreateTableUsingAsSelect.tupled ( ) [static] : scala.Function1<scala.Tuple6<String,String,Object,org.apache.spark.sql.SaveMode,scala.collection.immutable.Map<String,String>,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,CreateTableUsingAsSelect>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CreateTempTableUsing.class
package org.apache.spark.sql.sources
CreateTempTableUsing.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.canEqual:(Ljava/lang/Object;)Z]
CreateTempTableUsing.copy ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, scala.collection.immutable.Map<String,String> options ) : CreateTempTableUsing
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.copy:(Ljava/lang/String;Lscala/Option;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/CreateTempTableUsing;]
CreateTempTableUsing.CreateTempTableUsing ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, scala.collection.immutable.Map<String,String> options )
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing."<init>":(Ljava/lang/String;Lscala/Option;Ljava/lang/String;Lscala/collection/immutable/Map;)V]
CreateTempTableUsing.curried ( ) [static] : scala.Function1<String,scala.Function1<scala.Option<org.apache.spark.sql.types.StructType>,scala.Function1<String,scala.Function1<scala.collection.immutable.Map<String,String>,CreateTempTableUsing>>>>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.curried:()Lscala/Function1;]
CreateTempTableUsing.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.equals:(Ljava/lang/Object;)Z]
CreateTempTableUsing.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.hashCode:()I]
CreateTempTableUsing.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.options:()Lscala/collection/immutable/Map;]
CreateTempTableUsing.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productArity:()I]
CreateTempTableUsing.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productElement:(I)Ljava/lang/Object;]
CreateTempTableUsing.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productIterator:()Lscala/collection/Iterator;]
CreateTempTableUsing.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productPrefix:()Ljava/lang/String;]
CreateTempTableUsing.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.provider:()Ljava/lang/String;]
CreateTempTableUsing.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<scala.runtime.Nothing.>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
CreateTempTableUsing.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.tableName:()Ljava/lang/String;]
CreateTempTableUsing.tupled ( ) [static] : scala.Function1<scala.Tuple4<String,scala.Option<org.apache.spark.sql.types.StructType>,String,scala.collection.immutable.Map<String,String>>,CreateTempTableUsing>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.tupled:()Lscala/Function1;]
CreateTempTableUsing.userSpecifiedSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.userSpecifiedSchema:()Lscala/Option;]
spark-sql_2.10-1.3.0.jar, CreateTempTableUsingAsSelect.class
package org.apache.spark.sql.sources
CreateTempTableUsingAsSelect.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.canEqual:(Ljava/lang/Object;)Z]
CreateTempTableUsingAsSelect.copy ( String tableName, String provider, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query ) : CreateTempTableUsingAsSelect
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.copy:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/sources/CreateTempTableUsingAsSelect;]
CreateTempTableUsingAsSelect.CreateTempTableUsingAsSelect ( String tableName, String provider, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query )
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect."<init>":(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
CreateTempTableUsingAsSelect.curried ( ) [static] : scala.Function1<String,scala.Function1<String,scala.Function1<org.apache.spark.sql.SaveMode,scala.Function1<scala.collection.immutable.Map<String,String>,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,CreateTempTableUsingAsSelect>>>>>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.curried:()Lscala/Function1;]
CreateTempTableUsingAsSelect.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.equals:(Ljava/lang/Object;)Z]
CreateTempTableUsingAsSelect.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.hashCode:()I]
CreateTempTableUsingAsSelect.mode ( ) : org.apache.spark.sql.SaveMode
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.mode:()Lorg/apache/spark/sql/SaveMode;]
CreateTempTableUsingAsSelect.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.options:()Lscala/collection/immutable/Map;]
CreateTempTableUsingAsSelect.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productArity:()I]
CreateTempTableUsingAsSelect.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productElement:(I)Ljava/lang/Object;]
CreateTempTableUsingAsSelect.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productIterator:()Lscala/collection/Iterator;]
CreateTempTableUsingAsSelect.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productPrefix:()Ljava/lang/String;]
CreateTempTableUsingAsSelect.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.provider:()Ljava/lang/String;]
CreateTempTableUsingAsSelect.query ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.query:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
CreateTempTableUsingAsSelect.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<scala.runtime.Nothing.>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
CreateTempTableUsingAsSelect.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.tableName:()Ljava/lang/String;]
CreateTempTableUsingAsSelect.tupled ( ) [static] : scala.Function1<scala.Tuple5<String,String,org.apache.spark.sql.SaveMode,scala.collection.immutable.Map<String,String>,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,CreateTempTableUsingAsSelect>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, DataFrame.class
package org.apache.spark.sql
DataFrame.agg ( java.util.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( Column expr, Column... exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lorg/apache/spark/sql/Column;[Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( Column expr, scala.collection.Seq<Column> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lorg/apache/spark/sql/Column;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( scala.collection.immutable.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( scala.Tuple2<String,String> aggExpr, scala.collection.Seq<scala.Tuple2<String,String>> aggExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lscala/Tuple2;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.apply ( String colName ) : Column
[mangled: org/apache/spark/sql/DataFrame.apply:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
DataFrame.as ( scala.Symbol alias ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.as:(Lscala/Symbol;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.as ( String alias ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.as:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.cache ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.cache:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.cache ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.cache:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.col ( String colName ) : Column
[mangled: org/apache/spark/sql/DataFrame.col:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
DataFrame.collect ( ) : Object
[mangled: org/apache/spark/sql/DataFrame.collect:()Ljava/lang/Object;]
DataFrame.collect ( ) : Row[ ]
[mangled: org/apache/spark/sql/DataFrame.collect:()[Lorg/apache/spark/sql/Row;]
DataFrame.collectAsList ( ) : java.util.List<Row>
[mangled: org/apache/spark/sql/DataFrame.collectAsList:()Ljava/util/List;]
DataFrame.columns ( ) : String[ ]
[mangled: org/apache/spark/sql/DataFrame.columns:()[Ljava/lang/String;]
DataFrame.count ( ) : long
[mangled: org/apache/spark/sql/DataFrame.count:()J]
DataFrame.createJDBCTable ( String url, String table, boolean allowExisting ) : void
[mangled: org/apache/spark/sql/DataFrame.createJDBCTable:(Ljava/lang/String;Ljava/lang/String;Z)V]
DataFrame.DataFrame ( SQLContext sqlContext, catalyst.plans.logical.LogicalPlan logicalPlan )
[mangled: org/apache/spark/sql/DataFrame."<init>":(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
DataFrame.DataFrame ( SQLContext sqlContext, SQLContext.QueryExecution queryExecution )
[mangled: org/apache/spark/sql/DataFrame."<init>":(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/SQLContext$QueryExecution;)V]
DataFrame.distinct ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.distinct:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.dtypes ( ) : scala.Tuple2<String,String>[ ]
[mangled: org/apache/spark/sql/DataFrame.dtypes:()[Lscala/Tuple2;]
DataFrame.except ( DataFrame other ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.except:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.explain ( ) : void
[mangled: org/apache/spark/sql/DataFrame.explain:()V]
DataFrame.explain ( boolean extended ) : void
[mangled: org/apache/spark/sql/DataFrame.explain:(Z)V]
DataFrame.explode ( scala.collection.Seq<Column> input, scala.Function1<Row,scala.collection.TraversableOnce<A>> f, scala.reflect.api.TypeTags.TypeTag<A> p3 ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.explode:(Lscala/collection/Seq;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.explode ( String inputColumn, String outputColumn, scala.Function1<A,scala.collection.TraversableOnce<B>> f, scala.reflect.api.TypeTags.TypeTag<B> p4 ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.explode:(Ljava/lang/String;Ljava/lang/String;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.filter ( Column condition ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.filter:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.filter ( String conditionExpr ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.filter:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.first ( ) : Object
[mangled: org/apache/spark/sql/DataFrame.first:()Ljava/lang/Object;]
DataFrame.first ( ) : Row
[mangled: org/apache/spark/sql/DataFrame.first:()Lorg/apache/spark/sql/Row;]
DataFrame.flatMap ( scala.Function1<Row,scala.collection.TraversableOnce<R>> f, scala.reflect.ClassTag<R> p2 ) : org.apache.spark.rdd.RDD<R>
[mangled: org/apache/spark/sql/DataFrame.flatMap:(Lscala/Function1;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
DataFrame.foreach ( scala.Function1<Row,scala.runtime.BoxedUnit> f ) : void
[mangled: org/apache/spark/sql/DataFrame.foreach:(Lscala/Function1;)V]
DataFrame.foreachPartition ( scala.Function1<scala.collection.Iterator<Row>,scala.runtime.BoxedUnit> f ) : void
[mangled: org/apache/spark/sql/DataFrame.foreachPartition:(Lscala/Function1;)V]
DataFrame.groupBy ( Column... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.groupBy ( scala.collection.Seq<Column> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:(Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.groupBy ( String col1, scala.collection.Seq<String> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.groupBy ( String col1, String... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.head ( ) : Row
[mangled: org/apache/spark/sql/DataFrame.head:()Lorg/apache/spark/sql/Row;]
DataFrame.head ( int n ) : Row[ ]
[mangled: org/apache/spark/sql/DataFrame.head:(I)[Lorg/apache/spark/sql/Row;]
DataFrame.insertInto ( String tableName ) : void
[mangled: org/apache/spark/sql/DataFrame.insertInto:(Ljava/lang/String;)V]
DataFrame.insertInto ( String tableName, boolean overwrite ) : void
[mangled: org/apache/spark/sql/DataFrame.insertInto:(Ljava/lang/String;Z)V]
DataFrame.insertIntoJDBC ( String url, String table, boolean overwrite ) : void
[mangled: org/apache/spark/sql/DataFrame.insertIntoJDBC:(Ljava/lang/String;Ljava/lang/String;Z)V]
DataFrame.intersect ( DataFrame other ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.intersect:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.isLocal ( ) : boolean
[mangled: org/apache/spark/sql/DataFrame.isLocal:()Z]
DataFrame.javaRDD ( ) : org.apache.spark.api.java.JavaRDD<Row>
[mangled: org/apache/spark/sql/DataFrame.javaRDD:()Lorg/apache/spark/api/java/JavaRDD;]
DataFrame.javaToPython ( ) : org.apache.spark.api.java.JavaRDD<byte[ ]>
[mangled: org/apache/spark/sql/DataFrame.javaToPython:()Lorg/apache/spark/api/java/JavaRDD;]
DataFrame.join ( DataFrame right ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.join ( DataFrame right, Column joinExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.join ( DataFrame right, Column joinExprs, String joinType ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;Lorg/apache/spark/sql/Column;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.limit ( int n ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.limit:(I)Lorg/apache/spark/sql/DataFrame;]
DataFrame.logicalPlan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/DataFrame.logicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
DataFrame.map ( scala.Function1<Row,R> f, scala.reflect.ClassTag<R> p2 ) : org.apache.spark.rdd.RDD<R>
[mangled: org/apache/spark/sql/DataFrame.map:(Lscala/Function1;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
DataFrame.mapPartitions ( scala.Function1<scala.collection.Iterator<Row>,scala.collection.Iterator<R>> f, scala.reflect.ClassTag<R> p2 ) : org.apache.spark.rdd.RDD<R>
[mangled: org/apache/spark/sql/DataFrame.mapPartitions:(Lscala/Function1;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
DataFrame.numericColumns ( ) : scala.collection.Seq<catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/DataFrame.numericColumns:()Lscala/collection/Seq;]
DataFrame.orderBy ( Column... sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.orderBy ( scala.collection.Seq<Column> sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.orderBy ( String sortCol, scala.collection.Seq<String> sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.orderBy ( String sortCol, String... sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.persist ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.persist:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.persist ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.persist:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.persist ( org.apache.spark.storage.StorageLevel newLevel ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.persist ( org.apache.spark.storage.StorageLevel newLevel ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/RDDApi;]
DataFrame.printSchema ( ) : void
[mangled: org/apache/spark/sql/DataFrame.printSchema:()V]
DataFrame.queryExecution ( ) : SQLContext.QueryExecution
[mangled: org/apache/spark/sql/DataFrame.queryExecution:()Lorg/apache/spark/sql/SQLContext$QueryExecution;]
DataFrame.rdd ( ) : org.apache.spark.rdd.RDD<Row>
[mangled: org/apache/spark/sql/DataFrame.rdd:()Lorg/apache/spark/rdd/RDD;]
DataFrame.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/DataFrame.registerTempTable:(Ljava/lang/String;)V]
DataFrame.repartition ( int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.repartition:(I)Lorg/apache/spark/sql/DataFrame;]
DataFrame.resolve ( String colName ) : catalyst.expressions.NamedExpression
[mangled: org/apache/spark/sql/DataFrame.resolve:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/expressions/NamedExpression;]
DataFrame.sample ( boolean withReplacement, double fraction ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sample:(ZD)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sample ( boolean withReplacement, double fraction, long seed ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sample:(ZDJ)Lorg/apache/spark/sql/DataFrame;]
DataFrame.save ( String path ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;)V]
DataFrame.save ( String path, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.save ( String path, String source ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Ljava/lang/String;)V]
DataFrame.save ( String path, String source, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.save ( String source, SaveMode mode, java.util.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Ljava/util/Map;)V]
DataFrame.save ( String source, SaveMode mode, scala.collection.immutable.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;)V]
DataFrame.saveAsParquetFile ( String path ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsParquetFile:(Ljava/lang/String;)V]
DataFrame.saveAsTable ( String tableName ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;)V]
DataFrame.saveAsTable ( String tableName, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.saveAsTable ( String tableName, String source ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;)V]
DataFrame.saveAsTable ( String tableName, String source, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.saveAsTable ( String tableName, String source, SaveMode mode, java.util.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Ljava/util/Map;)V]
DataFrame.saveAsTable ( String tableName, String source, SaveMode mode, scala.collection.immutable.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;)V]
DataFrame.schema ( ) : types.StructType
[mangled: org/apache/spark/sql/DataFrame.schema:()Lorg/apache/spark/sql/types/StructType;]
DataFrame.select ( Column... cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.select ( scala.collection.Seq<Column> cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.select ( String col, scala.collection.Seq<String> cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.select ( String col, String... cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.selectExpr ( scala.collection.Seq<String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.selectExpr:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.selectExpr ( String... exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.selectExpr:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.show ( ) : void
[mangled: org/apache/spark/sql/DataFrame.show:()V]
DataFrame.show ( int numRows ) : void
[mangled: org/apache/spark/sql/DataFrame.show:(I)V]
DataFrame.showString ( int numRows ) : String
[mangled: org/apache/spark/sql/DataFrame.showString:(I)Ljava/lang/String;]
DataFrame.sort ( Column... sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sort ( scala.collection.Seq<Column> sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sort ( String sortCol, scala.collection.Seq<String> sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sort ( String sortCol, String... sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sqlContext ( ) : SQLContext
[mangled: org/apache/spark/sql/DataFrame.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
DataFrame.take ( int n ) : Object
[mangled: org/apache/spark/sql/DataFrame.take:(I)Ljava/lang/Object;]
DataFrame.take ( int n ) : Row[ ]
[mangled: org/apache/spark/sql/DataFrame.take:(I)[Lorg/apache/spark/sql/Row;]
DataFrame.toDF ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.toDF:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.toDF ( scala.collection.Seq<String> colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.toDF:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.toDF ( String... colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.toDF:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.toJavaRDD ( ) : org.apache.spark.api.java.JavaRDD<Row>
[mangled: org/apache/spark/sql/DataFrame.toJavaRDD:()Lorg/apache/spark/api/java/JavaRDD;]
DataFrame.toJSON ( ) : org.apache.spark.rdd.RDD<String>
[mangled: org/apache/spark/sql/DataFrame.toJSON:()Lorg/apache/spark/rdd/RDD;]
DataFrame.toString ( ) : String
[mangled: org/apache/spark/sql/DataFrame.toString:()Ljava/lang/String;]
DataFrame.unionAll ( DataFrame other ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.unionAll:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.unpersist ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.unpersist:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.unpersist ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.unpersist:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.unpersist ( boolean blocking ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.unpersist:(Z)Lorg/apache/spark/sql/DataFrame;]
DataFrame.unpersist ( boolean blocking ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.unpersist:(Z)Lorg/apache/spark/sql/RDDApi;]
DataFrame.where ( Column condition ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.where:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.withColumn ( String colName, Column col ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.withColumn:(Ljava/lang/String;Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.withColumnRenamed ( String existingName, String newName ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.withColumnRenamed:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
spark-sql_2.10-1.3.0.jar, DataFrameHolder.class
package org.apache.spark.sql
DataFrameHolder.andThen ( scala.Function1<DataFrameHolder,A> p1 ) [static] : scala.Function1<DataFrame,A>
[mangled: org/apache/spark/sql/DataFrameHolder.andThen:(Lscala/Function1;)Lscala/Function1;]
DataFrameHolder.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/DataFrameHolder.canEqual:(Ljava/lang/Object;)Z]
DataFrameHolder.compose ( scala.Function1<A,DataFrame> p1 ) [static] : scala.Function1<A,DataFrameHolder>
[mangled: org/apache/spark/sql/DataFrameHolder.compose:(Lscala/Function1;)Lscala/Function1;]
DataFrameHolder.copy ( DataFrame df ) : DataFrameHolder
[mangled: org/apache/spark/sql/DataFrameHolder.copy:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrameHolder;]
DataFrameHolder.DataFrameHolder ( DataFrame df )
[mangled: org/apache/spark/sql/DataFrameHolder."<init>":(Lorg/apache/spark/sql/DataFrame;)V]
DataFrameHolder.df ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrameHolder.df:()Lorg/apache/spark/sql/DataFrame;]
DataFrameHolder.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/DataFrameHolder.equals:(Ljava/lang/Object;)Z]
DataFrameHolder.hashCode ( ) : int
[mangled: org/apache/spark/sql/DataFrameHolder.hashCode:()I]
DataFrameHolder.productArity ( ) : int
[mangled: org/apache/spark/sql/DataFrameHolder.productArity:()I]
DataFrameHolder.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/DataFrameHolder.productElement:(I)Ljava/lang/Object;]
DataFrameHolder.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/DataFrameHolder.productIterator:()Lscala/collection/Iterator;]
DataFrameHolder.productPrefix ( ) : String
[mangled: org/apache/spark/sql/DataFrameHolder.productPrefix:()Ljava/lang/String;]
DataFrameHolder.toDF ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrameHolder.toDF:()Lorg/apache/spark/sql/DataFrame;]
DataFrameHolder.toDF ( scala.collection.Seq<String> colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrameHolder.toDF:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrameHolder.toString ( ) : String
[mangled: org/apache/spark/sql/DataFrameHolder.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, DateColumnAccessor.class
package org.apache.spark.sql.columnar
DateColumnAccessor.DateColumnAccessor ( java.nio.ByteBuffer buffer )
[mangled: org/apache/spark/sql/columnar/DateColumnAccessor."<init>":(Ljava/nio/ByteBuffer;)V]
spark-sql_2.10-1.3.0.jar, DateColumnBuilder.class
package org.apache.spark.sql.columnar
DateColumnBuilder.DateColumnBuilder ( )
[mangled: org/apache/spark/sql/columnar/DateColumnBuilder."<init>":()V]
spark-sql_2.10-1.3.0.jar, DateColumnStats.class
package org.apache.spark.sql.columnar
DateColumnStats.DateColumnStats ( )
[mangled: org/apache/spark/sql/columnar/DateColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, DDLParser.class
package org.apache.spark.sql.sources
DDLParser.apply ( String input, boolean exceptionOnError ) : scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/sources/DDLParser.apply:(Ljava/lang/String;Z)Lscala/Option;]
DDLParser.DDLParser ( scala.Function1<String,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> parseQuery )
[mangled: org/apache/spark/sql/sources/DDLParser."<init>":(Lscala/Function1;)V]
spark-sql_2.10-1.3.0.jar, Decoder<T>.class
package org.apache.spark.sql.columnar.compression
Decoder<T>.hasNext ( ) [abstract] : boolean
[mangled: org/apache/spark/sql/columnar/compression/Decoder<T>.hasNext:()Z]
Decoder<T>.next ( org.apache.spark.sql.catalyst.expressions.MutableRow p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/compression/Decoder<T>.next:(Lorg/apache/spark/sql/catalyst/expressions/MutableRow;I)V]
spark-sql_2.10-1.3.0.jar, DescribeCommand.class
package org.apache.spark.sql.execution
DescribeCommand.copy ( SparkPlan child, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean isExtended ) : DescribeCommand
[mangled: org/apache/spark/sql/execution/DescribeCommand.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lscala/collection/Seq;Z)Lorg/apache/spark/sql/execution/DescribeCommand;]
DescribeCommand.curried ( ) [static] : scala.Function1<SparkPlan,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<Object,DescribeCommand>>>
[mangled: org/apache/spark/sql/execution/DescribeCommand.curried:()Lscala/Function1;]
DescribeCommand.DescribeCommand ( SparkPlan child, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean isExtended )
[mangled: org/apache/spark/sql/execution/DescribeCommand."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lscala/collection/Seq;Z)V]
DescribeCommand.isExtended ( ) : boolean
[mangled: org/apache/spark/sql/execution/DescribeCommand.isExtended:()Z]
DescribeCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/DescribeCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
DescribeCommand.tupled ( ) [static] : scala.Function1<scala.Tuple3<SparkPlan,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,Object>,DescribeCommand>
[mangled: org/apache/spark/sql/execution/DescribeCommand.tupled:()Lscala/Function1;]
package org.apache.spark.sql.sources
DescribeCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/DescribeCommand.canEqual:(Ljava/lang/Object;)Z]
DescribeCommand.copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan table, boolean isExtended ) : DescribeCommand
[mangled: org/apache/spark/sql/sources/DescribeCommand.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)Lorg/apache/spark/sql/sources/DescribeCommand;]
DescribeCommand.curried ( ) [static] : scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.Function1<Object,DescribeCommand>>
[mangled: org/apache/spark/sql/sources/DescribeCommand.curried:()Lscala/Function1;]
DescribeCommand.DescribeCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan table, boolean isExtended )
[mangled: org/apache/spark/sql/sources/DescribeCommand."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)V]
DescribeCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/DescribeCommand.equals:(Ljava/lang/Object;)Z]
DescribeCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/DescribeCommand.hashCode:()I]
DescribeCommand.isExtended ( ) : boolean
[mangled: org/apache/spark/sql/sources/DescribeCommand.isExtended:()Z]
DescribeCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/sources/DescribeCommand.output:()Lscala/collection/Seq;]
DescribeCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/DescribeCommand.productArity:()I]
DescribeCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/DescribeCommand.productElement:(I)Ljava/lang/Object;]
DescribeCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/DescribeCommand.productIterator:()Lscala/collection/Iterator;]
DescribeCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/DescribeCommand.productPrefix:()Ljava/lang/String;]
DescribeCommand.table ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/DescribeCommand.table:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
DescribeCommand.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,Object>,DescribeCommand>
[mangled: org/apache/spark/sql/sources/DescribeCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, DriverQuirks.class
package org.apache.spark.sql.jdbc
DriverQuirks.DriverQuirks ( )
[mangled: org/apache/spark/sql/jdbc/DriverQuirks."<init>":()V]
DriverQuirks.get ( String p1 ) [static] : DriverQuirks
[mangled: org/apache/spark/sql/jdbc/DriverQuirks.get:(Ljava/lang/String;)Lorg/apache/spark/sql/jdbc/DriverQuirks;]
DriverQuirks.getCatalystType ( int p1, String p2, int p3, org.apache.spark.sql.types.MetadataBuilder p4 ) [abstract] : org.apache.spark.sql.types.DataType
[mangled: org/apache/spark/sql/jdbc/DriverQuirks.getCatalystType:(ILjava/lang/String;ILorg/apache/spark/sql/types/MetadataBuilder;)Lorg/apache/spark/sql/types/DataType;]
DriverQuirks.getJDBCType ( org.apache.spark.sql.types.DataType p1 ) [abstract] : scala.Tuple2<String,scala.Option<Object>>
[mangled: org/apache/spark/sql/jdbc/DriverQuirks.getJDBCType:(Lorg/apache/spark/sql/types/DataType;)Lscala/Tuple2;]
spark-sql_2.10-1.3.0.jar, Encoder<T>.class
package org.apache.spark.sql.columnar.compression
Encoder<T>.compress ( java.nio.ByteBuffer p1, java.nio.ByteBuffer p2 ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.compress:(Ljava/nio/ByteBuffer;Ljava/nio/ByteBuffer;)Ljava/nio/ByteBuffer;]
Encoder<T>.gatherCompressibilityStats ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.gatherCompressibilityStats:(Lorg/apache/spark/sql/Row;I)V]
spark-sql_2.10-1.3.0.jar, EqualTo.class
package org.apache.spark.sql.sources
EqualTo.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/EqualTo.attribute:()Ljava/lang/String;]
EqualTo.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/EqualTo.canEqual:(Ljava/lang/Object;)Z]
EqualTo.copy ( String attribute, Object value ) : EqualTo
[mangled: org/apache/spark/sql/sources/EqualTo.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/EqualTo;]
EqualTo.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,EqualTo>>
[mangled: org/apache/spark/sql/sources/EqualTo.curried:()Lscala/Function1;]
EqualTo.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/EqualTo.equals:(Ljava/lang/Object;)Z]
EqualTo.EqualTo ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/EqualTo."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
EqualTo.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/EqualTo.hashCode:()I]
EqualTo.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/EqualTo.productArity:()I]
EqualTo.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/EqualTo.productElement:(I)Ljava/lang/Object;]
EqualTo.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/EqualTo.productIterator:()Lscala/collection/Iterator;]
EqualTo.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/EqualTo.productPrefix:()Ljava/lang/String;]
EqualTo.toString ( ) : String
[mangled: org/apache/spark/sql/sources/EqualTo.toString:()Ljava/lang/String;]
EqualTo.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,EqualTo>
[mangled: org/apache/spark/sql/sources/EqualTo.tupled:()Lscala/Function1;]
EqualTo.value ( ) : Object
[mangled: org/apache/spark/sql/sources/EqualTo.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, EvaluatePython.class
package org.apache.spark.sql.execution
EvaluatePython.copy ( PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child, org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute ) : EvaluatePython
[mangled: org/apache/spark/sql/execution/EvaluatePython.copy:(Lorg/apache/spark/sql/execution/PythonUDF;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)Lorg/apache/spark/sql/execution/EvaluatePython;]
EvaluatePython.EvaluatePython ( PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child, org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute )
[mangled: org/apache/spark/sql/execution/EvaluatePython."<init>":(Lorg/apache/spark/sql/execution/PythonUDF;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)V]
EvaluatePython.fromJava ( Object p1, org.apache.spark.sql.types.DataType p2 ) [static] : Object
[mangled: org/apache/spark/sql/execution/EvaluatePython.fromJava:(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)Ljava/lang/Object;]
EvaluatePython.references ( ) : org.apache.spark.sql.catalyst.expressions.AttributeSet
[mangled: org/apache/spark/sql/execution/EvaluatePython.references:()Lorg/apache/spark/sql/catalyst/expressions/AttributeSet;]
EvaluatePython.rowToArray ( org.apache.spark.sql.Row p1, scala.collection.Seq<org.apache.spark.sql.types.DataType> p2 ) [static] : Object[ ]
[mangled: org/apache/spark/sql/execution/EvaluatePython.rowToArray:(Lorg/apache/spark/sql/Row;Lscala/collection/Seq;)[Ljava/lang/Object;]
EvaluatePython.toJava ( Object p1, org.apache.spark.sql.types.DataType p2 ) [static] : Object
[mangled: org/apache/spark/sql/execution/EvaluatePython.toJava:(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, ExamplePoint.class
package org.apache.spark.sql.test
ExamplePoint.ExamplePoint ( double x, double y )
[mangled: org/apache/spark/sql/test/ExamplePoint."<init>":(DD)V]
ExamplePoint.x ( ) : double
[mangled: org/apache/spark/sql/test/ExamplePoint.x:()D]
ExamplePoint.y ( ) : double
[mangled: org/apache/spark/sql/test/ExamplePoint.y:()D]
spark-sql_2.10-1.3.0.jar, Exchange.class
package org.apache.spark.sql.execution
Exchange.Exchange..bypassMergeThreshold ( ) : int
[mangled: org/apache/spark/sql/execution/Exchange.org.apache.spark.sql.execution.Exchange..bypassMergeThreshold:()I]
spark-sql_2.10-1.3.0.jar, ExecutedCommand.class
package org.apache.spark.sql.execution
ExecutedCommand.andThen ( scala.Function1<ExecutedCommand,A> p1 ) [static] : scala.Function1<RunnableCommand,A>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.andThen:(Lscala/Function1;)Lscala/Function1;]
ExecutedCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExecutedCommand.canEqual:(Ljava/lang/Object;)Z]
ExecutedCommand.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/ExecutedCommand.children:()Lscala/collection/immutable/Nil$;]
ExecutedCommand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/ExecutedCommand.children:()Lscala/collection/Seq;]
ExecutedCommand.cmd ( ) : RunnableCommand
[mangled: org/apache/spark/sql/execution/ExecutedCommand.cmd:()Lorg/apache/spark/sql/execution/RunnableCommand;]
ExecutedCommand.compose ( scala.Function1<A,RunnableCommand> p1 ) [static] : scala.Function1<A,ExecutedCommand>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.compose:(Lscala/Function1;)Lscala/Function1;]
ExecutedCommand.copy ( RunnableCommand cmd ) : ExecutedCommand
[mangled: org/apache/spark/sql/execution/ExecutedCommand.copy:(Lorg/apache/spark/sql/execution/RunnableCommand;)Lorg/apache/spark/sql/execution/ExecutedCommand;]
ExecutedCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExecutedCommand.equals:(Ljava/lang/Object;)Z]
ExecutedCommand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.execute:()Lorg/apache/spark/rdd/RDD;]
ExecutedCommand.executeCollect ( ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/ExecutedCommand.executeCollect:()[Lorg/apache/spark/sql/Row;]
ExecutedCommand.ExecutedCommand ( RunnableCommand cmd )
[mangled: org/apache/spark/sql/execution/ExecutedCommand."<init>":(Lorg/apache/spark/sql/execution/RunnableCommand;)V]
ExecutedCommand.executeTake ( int limit ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/ExecutedCommand.executeTake:(I)[Lorg/apache/spark/sql/Row;]
ExecutedCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ExecutedCommand.hashCode:()I]
ExecutedCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.output:()Lscala/collection/Seq;]
ExecutedCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productArity:()I]
ExecutedCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productElement:(I)Ljava/lang/Object;]
ExecutedCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productIterator:()Lscala/collection/Iterator;]
ExecutedCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productPrefix:()Ljava/lang/String;]
ExecutedCommand.sideEffectResult ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.sideEffectResult:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, Expand.class
package org.apache.spark.sql.execution
Expand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Expand.canEqual:(Ljava/lang/Object;)Z]
Expand.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Expand.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Expand.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Expand.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
Expand.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/Expand.children:()Lscala/collection/immutable/List;]
Expand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/Expand.children:()Lscala/collection/Seq;]
Expand.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression> projections, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child ) : Expand
[mangled: org/apache/spark/sql/execution/Expand.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Expand;]
Expand.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<SparkPlan,Expand>>>
[mangled: org/apache/spark/sql/execution/Expand.curried:()Lscala/Function1;]
Expand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Expand.equals:(Ljava/lang/Object;)Z]
Expand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/Expand.execute:()Lorg/apache/spark/rdd/RDD;]
Expand.Expand ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression> projections, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child )
[mangled: org/apache/spark/sql/execution/Expand."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
Expand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/Expand.hashCode:()I]
Expand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Expand.output:()Lscala/collection/Seq;]
Expand.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/Expand.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
Expand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/Expand.productArity:()I]
Expand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/Expand.productElement:(I)Ljava/lang/Object;]
Expand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/Expand.productIterator:()Lscala/collection/Iterator;]
Expand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/Expand.productPrefix:()Ljava/lang/String;]
Expand.projections ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>
[mangled: org/apache/spark/sql/execution/Expand.projections:()Lscala/collection/Seq;]
Expand.tupled ( ) [static] : scala.Function1<scala.Tuple3<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,SparkPlan>,Expand>
[mangled: org/apache/spark/sql/execution/Expand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ExperimentalMethods.class
package org.apache.spark.sql
ExperimentalMethods.ExperimentalMethods ( SQLContext sqlContext )
[mangled: org/apache/spark/sql/ExperimentalMethods."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
ExperimentalMethods.extraStrategies ( ) : scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>>
[mangled: org/apache/spark/sql/ExperimentalMethods.extraStrategies:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, ExplainCommand.class
package org.apache.spark.sql.execution
ExplainCommand.copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan logicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean extended ) : ExplainCommand
[mangled: org/apache/spark/sql/execution/ExplainCommand.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lscala/collection/Seq;Z)Lorg/apache/spark/sql/execution/ExplainCommand;]
ExplainCommand.curried ( ) [static] : scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<Object,ExplainCommand>>>
[mangled: org/apache/spark/sql/execution/ExplainCommand.curried:()Lscala/Function1;]
ExplainCommand.ExplainCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan logicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean extended )
[mangled: org/apache/spark/sql/execution/ExplainCommand."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lscala/collection/Seq;Z)V]
ExplainCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExplainCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
ExplainCommand.tupled ( ) [static] : scala.Function1<scala.Tuple3<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,Object>,ExplainCommand>
[mangled: org/apache/spark/sql/execution/ExplainCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ExternalSort.class
package org.apache.spark.sql.execution
ExternalSort.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExternalSort.canEqual:(Ljava/lang/Object;)Z]
ExternalSort.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/ExternalSort.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
ExternalSort.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/ExternalSort.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
ExternalSort.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/ExternalSort.children:()Lscala/collection/immutable/List;]
ExternalSort.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/ExternalSort.children:()Lscala/collection/Seq;]
ExternalSort.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, boolean global, SparkPlan child ) : ExternalSort
[mangled: org/apache/spark/sql/execution/ExternalSort.copy:(Lscala/collection/Seq;ZLorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/ExternalSort;]
ExternalSort.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>,scala.Function1<Object,scala.Function1<SparkPlan,ExternalSort>>>
[mangled: org/apache/spark/sql/execution/ExternalSort.curried:()Lscala/Function1;]
ExternalSort.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExternalSort.equals:(Ljava/lang/Object;)Z]
ExternalSort.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExternalSort.execute:()Lorg/apache/spark/rdd/RDD;]
ExternalSort.ExternalSort ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, boolean global, SparkPlan child )
[mangled: org/apache/spark/sql/execution/ExternalSort."<init>":(Lscala/collection/Seq;ZLorg/apache/spark/sql/execution/SparkPlan;)V]
ExternalSort.global ( ) : boolean
[mangled: org/apache/spark/sql/execution/ExternalSort.global:()Z]
ExternalSort.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ExternalSort.hashCode:()I]
ExternalSort.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/ExternalSort.output:()Lscala/collection/Seq;]
ExternalSort.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/ExternalSort.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
ExternalSort.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ExternalSort.productArity:()I]
ExternalSort.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ExternalSort.productElement:(I)Ljava/lang/Object;]
ExternalSort.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ExternalSort.productIterator:()Lscala/collection/Iterator;]
ExternalSort.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ExternalSort.productPrefix:()Ljava/lang/String;]
ExternalSort.requiredChildDistribution ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution>
[mangled: org/apache/spark/sql/execution/ExternalSort.requiredChildDistribution:()Lscala/collection/Seq;]
ExternalSort.sortOrder ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>
[mangled: org/apache/spark/sql/execution/ExternalSort.sortOrder:()Lscala/collection/Seq;]
ExternalSort.tupled ( ) [static] : scala.Function1<scala.Tuple3<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>,Object,SparkPlan>,ExternalSort>
[mangled: org/apache/spark/sql/execution/ExternalSort.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, Filter.class
package org.apache.spark.sql.sources
Filter.Filter ( )
[mangled: org/apache/spark/sql/sources/Filter."<init>":()V]
spark-sql_2.10-1.3.0.jar, GeneralHashedRelation.class
package org.apache.spark.sql.execution.joins
GeneralHashedRelation.GeneralHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>> hashTable )
[mangled: org/apache/spark/sql/execution/joins/GeneralHashedRelation."<init>":(Ljava/util/HashMap;)V]
spark-sql_2.10-1.3.0.jar, GenericColumnStats.class
package org.apache.spark.sql.columnar
GenericColumnStats.GenericColumnStats ( )
[mangled: org/apache/spark/sql/columnar/GenericColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, GreaterThan.class
package org.apache.spark.sql.sources
GreaterThan.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThan.attribute:()Ljava/lang/String;]
GreaterThan.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThan.canEqual:(Ljava/lang/Object;)Z]
GreaterThan.copy ( String attribute, Object value ) : GreaterThan
[mangled: org/apache/spark/sql/sources/GreaterThan.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/GreaterThan;]
GreaterThan.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,GreaterThan>>
[mangled: org/apache/spark/sql/sources/GreaterThan.curried:()Lscala/Function1;]
GreaterThan.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThan.equals:(Ljava/lang/Object;)Z]
GreaterThan.GreaterThan ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/GreaterThan."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
GreaterThan.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThan.hashCode:()I]
GreaterThan.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThan.productArity:()I]
GreaterThan.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThan.productElement:(I)Ljava/lang/Object;]
GreaterThan.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/GreaterThan.productIterator:()Lscala/collection/Iterator;]
GreaterThan.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThan.productPrefix:()Ljava/lang/String;]
GreaterThan.toString ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThan.toString:()Ljava/lang/String;]
GreaterThan.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,GreaterThan>
[mangled: org/apache/spark/sql/sources/GreaterThan.tupled:()Lscala/Function1;]
GreaterThan.value ( ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThan.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, GreaterThanOrEqual.class
package org.apache.spark.sql.sources
GreaterThanOrEqual.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.attribute:()Ljava/lang/String;]
GreaterThanOrEqual.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.canEqual:(Ljava/lang/Object;)Z]
GreaterThanOrEqual.copy ( String attribute, Object value ) : GreaterThanOrEqual
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/GreaterThanOrEqual;]
GreaterThanOrEqual.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,GreaterThanOrEqual>>
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.curried:()Lscala/Function1;]
GreaterThanOrEqual.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.equals:(Ljava/lang/Object;)Z]
GreaterThanOrEqual.GreaterThanOrEqual ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
GreaterThanOrEqual.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.hashCode:()I]
GreaterThanOrEqual.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productArity:()I]
GreaterThanOrEqual.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productElement:(I)Ljava/lang/Object;]
GreaterThanOrEqual.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productIterator:()Lscala/collection/Iterator;]
GreaterThanOrEqual.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productPrefix:()Ljava/lang/String;]
GreaterThanOrEqual.toString ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.toString:()Ljava/lang/String;]
GreaterThanOrEqual.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,GreaterThanOrEqual>
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.tupled:()Lscala/Function1;]
GreaterThanOrEqual.value ( ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, GroupedData.class
package org.apache.spark.sql
GroupedData.agg ( java.util.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.agg ( Column expr, scala.collection.Seq<Column> exprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Lorg/apache/spark/sql/Column;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.agg ( scala.collection.immutable.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.agg ( scala.Tuple2<String,String> aggExpr, scala.collection.Seq<scala.Tuple2<String,String>> aggExprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Lscala/Tuple2;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.count ( ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.count:()Lorg/apache/spark/sql/DataFrame;]
GroupedData.GroupedData ( DataFrame df, scala.collection.Seq<catalyst.expressions.Expression> groupingExprs )
[mangled: org/apache/spark/sql/GroupedData."<init>":(Lorg/apache/spark/sql/DataFrame;Lscala/collection/Seq;)V]
spark-sql_2.10-1.3.0.jar, HashedRelation.class
package org.apache.spark.sql.execution.joins
HashedRelation.get ( org.apache.spark.sql.Row p1 ) [abstract] : org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashedRelation.get:(Lorg/apache/spark/sql/Row;)Lorg/apache/spark/util/collection/CompactBuffer;]
spark-sql_2.10-1.3.0.jar, HashJoin.class
package org.apache.spark.sql.execution.joins
HashJoin.buildKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildKeys:()Lscala/collection/Seq;]
HashJoin.buildPlan ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.buildSide ( ) [abstract] : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
HashJoin.buildSideKeyGenerator ( ) [abstract] : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
HashJoin.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> p1, HashedRelation p2 ) [abstract] : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
HashJoin.left ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.leftKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.leftKeys:()Lscala/collection/Seq;]
HashJoin.output ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.output:()Lscala/collection/Seq;]
HashJoin.right ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.rightKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.rightKeys:()Lscala/collection/Seq;]
HashJoin.streamedKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.streamedKeys:()Lscala/collection/Seq;]
HashJoin.streamedPlan ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.streamSideKeyGenerator ( ) [abstract] : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.streamSideKeyGenerator:()Lscala/Function0;]
spark-sql_2.10-1.3.0.jar, HashOuterJoin.class
package org.apache.spark.sql.execution.joins
HashOuterJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.canEqual:(Ljava/lang/Object;)Z]
HashOuterJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.children:()Lscala/collection/Seq;]
HashOuterJoin.condition ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.condition:()Lscala/Option;]
HashOuterJoin.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : HashOuterJoin
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/HashOuterJoin;]
HashOuterJoin.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.catalyst.plans.JoinType,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,HashOuterJoin>>>>>>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.curried:()Lscala/Function1;]
HashOuterJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.equals:(Ljava/lang/Object;)Z]
HashOuterJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.execute:()Lorg/apache/spark/rdd/RDD;]
HashOuterJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.hashCode:()I]
HashOuterJoin.HashOuterJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
HashOuterJoin.joinType ( ) : org.apache.spark.sql.catalyst.plans.JoinType
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.joinType:()Lorg/apache/spark/sql/catalyst/plans/JoinType;]
HashOuterJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
HashOuterJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashOuterJoin.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.leftKeys:()Lscala/collection/Seq;]
HashOuterJoin.HashOuterJoin..boundCondition ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..boundCondition:()Lscala/Function1;]
HashOuterJoin.HashOuterJoin..buildHashTable ( scala.collection.Iterator<org.apache.spark.sql.Row> iter, org.apache.spark.sql.catalyst.expressions.package.Projection keyGenerator ) : java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..buildHashTable:(Lscala/collection/Iterator;Lorg/apache/spark/sql/catalyst/expressions/package$Projection;)Ljava/util/HashMap;]
HashOuterJoin.HashOuterJoin..DUMMY_LIST ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..DUMMY_LIST:()Lscala/collection/Seq;]
HashOuterJoin.HashOuterJoin..EMPTY_LIST ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..EMPTY_LIST:()Lscala/collection/Seq;]
HashOuterJoin.HashOuterJoin..fullOuterIterator ( org.apache.spark.sql.Row key, scala.collection.Iterable<org.apache.spark.sql.Row> leftIter, scala.collection.Iterable<org.apache.spark.sql.Row> rightIter, org.apache.spark.sql.catalyst.expressions.JoinedRow joinedRow ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..fullOuterIterator:(Lorg/apache/spark/sql/Row;Lscala/collection/Iterable;Lscala/collection/Iterable;Lorg/apache/spark/sql/catalyst/expressions/JoinedRow;)Lscala/collection/Iterator;]
HashOuterJoin.HashOuterJoin..leftNullRow ( ) : org.apache.spark.sql.catalyst.expressions.GenericRow
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..leftNullRow:()Lorg/apache/spark/sql/catalyst/expressions/GenericRow;]
HashOuterJoin.HashOuterJoin..leftOuterIterator ( org.apache.spark.sql.Row key, org.apache.spark.sql.catalyst.expressions.JoinedRow joinedRow, scala.collection.Iterable<org.apache.spark.sql.Row> rightIter ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..leftOuterIterator:(Lorg/apache/spark/sql/Row;Lorg/apache/spark/sql/catalyst/expressions/JoinedRow;Lscala/collection/Iterable;)Lscala/collection/Iterator;]
HashOuterJoin.HashOuterJoin..rightNullRow ( ) : org.apache.spark.sql.catalyst.expressions.GenericRow
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..rightNullRow:()Lorg/apache/spark/sql/catalyst/expressions/GenericRow;]
HashOuterJoin.HashOuterJoin..rightOuterIterator ( org.apache.spark.sql.Row key, scala.collection.Iterable<org.apache.spark.sql.Row> leftIter, org.apache.spark.sql.catalyst.expressions.JoinedRow joinedRow ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..rightOuterIterator:(Lorg/apache/spark/sql/Row;Lscala/collection/Iterable;Lorg/apache/spark/sql/catalyst/expressions/JoinedRow;)Lscala/collection/Iterator;]
HashOuterJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.output:()Lscala/collection/Seq;]
HashOuterJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
HashOuterJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productArity:()I]
HashOuterJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productElement:(I)Ljava/lang/Object;]
HashOuterJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productIterator:()Lscala/collection/Iterator;]
HashOuterJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productPrefix:()Ljava/lang/String;]
HashOuterJoin.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.requiredChildDistribution:()Lscala/collection/immutable/List;]
HashOuterJoin.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.requiredChildDistribution:()Lscala/collection/Seq;]
HashOuterJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
HashOuterJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashOuterJoin.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.rightKeys:()Lscala/collection/Seq;]
HashOuterJoin.tupled ( ) [static] : scala.Function1<scala.Tuple6<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.catalyst.plans.JoinType,scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,HashOuterJoin>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, In.class
package org.apache.spark.sql.sources
In.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/In.attribute:()Ljava/lang/String;]
In.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/In.canEqual:(Ljava/lang/Object;)Z]
In.copy ( String attribute, Object[ ] values ) : In
[mangled: org/apache/spark/sql/sources/In.copy:(Ljava/lang/String;[Ljava/lang/Object;)Lorg/apache/spark/sql/sources/In;]
In.curried ( ) [static] : scala.Function1<String,scala.Function1<Object[ ],In>>
[mangled: org/apache/spark/sql/sources/In.curried:()Lscala/Function1;]
In.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/In.equals:(Ljava/lang/Object;)Z]
In.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/In.hashCode:()I]
In.In ( String attribute, Object[ ] values )
[mangled: org/apache/spark/sql/sources/In."<init>":(Ljava/lang/String;[Ljava/lang/Object;)V]
In.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/In.productArity:()I]
In.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/In.productElement:(I)Ljava/lang/Object;]
In.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/In.productIterator:()Lscala/collection/Iterator;]
In.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/In.productPrefix:()Ljava/lang/String;]
In.toString ( ) : String
[mangled: org/apache/spark/sql/sources/In.toString:()Ljava/lang/String;]
In.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object[ ]>,In>
[mangled: org/apache/spark/sql/sources/In.tupled:()Lscala/Function1;]
In.values ( ) : Object[ ]
[mangled: org/apache/spark/sql/sources/In.values:()[Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, InMemoryColumnarTableScan.class
package org.apache.spark.sql.columnar
InMemoryColumnarTableScan.buildFilter ( ) : scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.buildFilter:()Lscala/PartialFunction;]
InMemoryColumnarTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> predicates, InMemoryRelation relation ) : InMemoryColumnarTableScan
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/columnar/InMemoryRelation;)Lorg/apache/spark/sql/columnar/InMemoryColumnarTableScan;]
InMemoryColumnarTableScan.InMemoryColumnarTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> predicates, InMemoryRelation relation )
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/columnar/InMemoryRelation;)V]
InMemoryColumnarTableScan.InMemoryColumnarTableScan..inMemoryPartitionPruningEnabled ( ) : boolean
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.org.apache.spark.sql.columnar.InMemoryColumnarTableScan..inMemoryPartitionPruningEnabled:()Z]
InMemoryColumnarTableScan.InMemoryColumnarTableScan..statsFor ( org.apache.spark.sql.catalyst.expressions.Attribute a ) : ColumnStatisticsSchema
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.org.apache.spark.sql.columnar.InMemoryColumnarTableScan..statsFor:(Lorg/apache/spark/sql/catalyst/expressions/Attribute;)Lorg/apache/spark/sql/columnar/ColumnStatisticsSchema;]
InMemoryColumnarTableScan.partitionFilters ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.partitionFilters:()Lscala/collection/Seq;]
InMemoryColumnarTableScan.predicates ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.predicates:()Lscala/collection/Seq;]
InMemoryColumnarTableScan.readBatches ( ) : org.apache.spark.Accumulator<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.readBatches:()Lorg/apache/spark/Accumulator;]
InMemoryColumnarTableScan.readPartitions ( ) : org.apache.spark.Accumulator<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.readPartitions:()Lorg/apache/spark/Accumulator;]
spark-sql_2.10-1.3.0.jar, InMemoryRelation.class
package org.apache.spark.sql.columnar
InMemoryRelation.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean useCompression, int batchSize, org.apache.spark.storage.StorageLevel storageLevel, org.apache.spark.sql.execution.SparkPlan child, scala.Option<String> tableName, org.apache.spark.rdd.RDD<CachedBatch> _cachedColumnBuffers, org.apache.spark.sql.catalyst.plans.logical.Statistics _statistics ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.copy:(Lscala/collection/Seq;ZILorg/apache/spark/storage/StorageLevel;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;)Lorg/apache/spark/sql/columnar/InMemoryRelation;]
InMemoryRelation.InMemoryRelation ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean useCompression, int batchSize, org.apache.spark.storage.StorageLevel storageLevel, org.apache.spark.sql.execution.SparkPlan child, scala.Option<String> tableName, org.apache.spark.rdd.RDD<CachedBatch> _cachedColumnBuffers, org.apache.spark.sql.catalyst.plans.logical.Statistics _statistics )
[mangled: org/apache/spark/sql/columnar/InMemoryRelation."<init>":(Lscala/collection/Seq;ZILorg/apache/spark/storage/StorageLevel;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;)V]
InMemoryRelation.InMemoryRelation..batchStats ( ) : org.apache.spark.Accumulable<scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.Row>,org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.org.apache.spark.sql.columnar.InMemoryRelation..batchStats:()Lorg/apache/spark/Accumulable;]
InMemoryRelation.otherCopyArgs ( ) : scala.collection.Seq<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.otherCopyArgs:()Lscala/collection/Seq;]
InMemoryRelation.partitionStatistics ( ) : PartitionStatistics
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.partitionStatistics:()Lorg/apache/spark/sql/columnar/PartitionStatistics;]
InMemoryRelation.recache ( ) : void
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.recache:()V]
InMemoryRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
InMemoryRelation.storageLevel ( ) : org.apache.spark.storage.StorageLevel
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.storageLevel:()Lorg/apache/spark/storage/StorageLevel;]
InMemoryRelation.tableName ( ) : scala.Option<String>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.tableName:()Lscala/Option;]
InMemoryRelation.withOutput ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> newOutput ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.withOutput:(Lscala/collection/Seq;)Lorg/apache/spark/sql/columnar/InMemoryRelation;]
spark-sql_2.10-1.3.0.jar, InsertableRelation.class
package org.apache.spark.sql.sources
InsertableRelation.insert ( org.apache.spark.sql.DataFrame p1, boolean p2 ) [abstract] : void
[mangled: org/apache/spark/sql/sources/InsertableRelation.insert:(Lorg/apache/spark/sql/DataFrame;Z)V]
spark-sql_2.10-1.3.0.jar, InsertIntoDataSource.class
package org.apache.spark.sql.sources
InsertIntoDataSource.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.canEqual:(Ljava/lang/Object;)Z]
InsertIntoDataSource.copy ( LogicalRelation logicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, boolean overwrite ) : InsertIntoDataSource
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.copy:(Lorg/apache/spark/sql/sources/LogicalRelation;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)Lorg/apache/spark/sql/sources/InsertIntoDataSource;]
InsertIntoDataSource.curried ( ) [static] : scala.Function1<LogicalRelation,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.Function1<Object,InsertIntoDataSource>>>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.curried:()Lscala/Function1;]
InsertIntoDataSource.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.equals:(Ljava/lang/Object;)Z]
InsertIntoDataSource.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.hashCode:()I]
InsertIntoDataSource.InsertIntoDataSource ( LogicalRelation logicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, boolean overwrite )
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource."<init>":(Lorg/apache/spark/sql/sources/LogicalRelation;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)V]
InsertIntoDataSource.logicalRelation ( ) : LogicalRelation
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.logicalRelation:()Lorg/apache/spark/sql/sources/LogicalRelation;]
InsertIntoDataSource.overwrite ( ) : boolean
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.overwrite:()Z]
InsertIntoDataSource.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productArity:()I]
InsertIntoDataSource.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productElement:(I)Ljava/lang/Object;]
InsertIntoDataSource.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productIterator:()Lscala/collection/Iterator;]
InsertIntoDataSource.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productPrefix:()Ljava/lang/String;]
InsertIntoDataSource.query ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.query:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
InsertIntoDataSource.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
InsertIntoDataSource.tupled ( ) [static] : scala.Function1<scala.Tuple3<LogicalRelation,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,Object>,InsertIntoDataSource>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, IntColumnStats.class
package org.apache.spark.sql.columnar
IntColumnStats.collectedStatistics ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/columnar/IntColumnStats.collectedStatistics:()Lorg/apache/spark/sql/Row;]
IntColumnStats.count ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.count:()I]
IntColumnStats.count_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.count_.eq:(I)V]
IntColumnStats.gatherStats ( org.apache.spark.sql.Row row, int ordinal ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.gatherStats:(Lorg/apache/spark/sql/Row;I)V]
IntColumnStats.lower ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.lower:()I]
IntColumnStats.lower_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.lower_.eq:(I)V]
IntColumnStats.nullCount ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.nullCount:()I]
IntColumnStats.nullCount_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.nullCount_.eq:(I)V]
IntColumnStats.sizeInBytes ( ) : long
[mangled: org/apache/spark/sql/columnar/IntColumnStats.sizeInBytes:()J]
IntColumnStats.sizeInBytes_.eq ( long p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.sizeInBytes_.eq:(J)V]
IntColumnStats.upper ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.upper:()I]
IntColumnStats.upper_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.upper_.eq:(I)V]
spark-sql_2.10-1.3.0.jar, IsNotNull.class
package org.apache.spark.sql.sources
IsNotNull.andThen ( scala.Function1<IsNotNull,A> p1 ) [static] : scala.Function1<String,A>
[mangled: org/apache/spark/sql/sources/IsNotNull.andThen:(Lscala/Function1;)Lscala/Function1;]
IsNotNull.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/IsNotNull.attribute:()Ljava/lang/String;]
IsNotNull.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNotNull.canEqual:(Ljava/lang/Object;)Z]
IsNotNull.compose ( scala.Function1<A,String> p1 ) [static] : scala.Function1<A,IsNotNull>
[mangled: org/apache/spark/sql/sources/IsNotNull.compose:(Lscala/Function1;)Lscala/Function1;]
IsNotNull.copy ( String attribute ) : IsNotNull
[mangled: org/apache/spark/sql/sources/IsNotNull.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/sources/IsNotNull;]
IsNotNull.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNotNull.equals:(Ljava/lang/Object;)Z]
IsNotNull.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/IsNotNull.hashCode:()I]
IsNotNull.IsNotNull ( String attribute )
[mangled: org/apache/spark/sql/sources/IsNotNull."<init>":(Ljava/lang/String;)V]
IsNotNull.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/IsNotNull.productArity:()I]
IsNotNull.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/IsNotNull.productElement:(I)Ljava/lang/Object;]
IsNotNull.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/IsNotNull.productIterator:()Lscala/collection/Iterator;]
IsNotNull.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/IsNotNull.productPrefix:()Ljava/lang/String;]
IsNotNull.toString ( ) : String
[mangled: org/apache/spark/sql/sources/IsNotNull.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, IsNull.class
package org.apache.spark.sql.sources
IsNull.andThen ( scala.Function1<IsNull,A> p1 ) [static] : scala.Function1<String,A>
[mangled: org/apache/spark/sql/sources/IsNull.andThen:(Lscala/Function1;)Lscala/Function1;]
IsNull.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/IsNull.attribute:()Ljava/lang/String;]
IsNull.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNull.canEqual:(Ljava/lang/Object;)Z]
IsNull.compose ( scala.Function1<A,String> p1 ) [static] : scala.Function1<A,IsNull>
[mangled: org/apache/spark/sql/sources/IsNull.compose:(Lscala/Function1;)Lscala/Function1;]
IsNull.copy ( String attribute ) : IsNull
[mangled: org/apache/spark/sql/sources/IsNull.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/sources/IsNull;]
IsNull.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNull.equals:(Ljava/lang/Object;)Z]
IsNull.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/IsNull.hashCode:()I]
IsNull.IsNull ( String attribute )
[mangled: org/apache/spark/sql/sources/IsNull."<init>":(Ljava/lang/String;)V]
IsNull.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/IsNull.productArity:()I]
IsNull.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/IsNull.productElement:(I)Ljava/lang/Object;]
IsNull.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/IsNull.productIterator:()Lscala/collection/Iterator;]
IsNull.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/IsNull.productPrefix:()Ljava/lang/String;]
IsNull.toString ( ) : String
[mangled: org/apache/spark/sql/sources/IsNull.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, JavaBigDecimalSerializer.class
package org.apache.spark.sql.execution
JavaBigDecimalSerializer.JavaBigDecimalSerializer ( )
[mangled: org/apache/spark/sql/execution/JavaBigDecimalSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, JDBCPartition.class
package org.apache.spark.sql.jdbc
JDBCPartition.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.canEqual:(Ljava/lang/Object;)Z]
JDBCPartition.copy ( String whereClause, int idx ) : JDBCPartition
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.copy:(Ljava/lang/String;I)Lorg/apache/spark/sql/jdbc/JDBCPartition;]
JDBCPartition.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,JDBCPartition>>
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.curried:()Lscala/Function1;]
JDBCPartition.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.equals:(Ljava/lang/Object;)Z]
JDBCPartition.hashCode ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.hashCode:()I]
JDBCPartition.idx ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.idx:()I]
JDBCPartition.index ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.index:()I]
JDBCPartition.JDBCPartition ( String whereClause, int idx )
[mangled: org/apache/spark/sql/jdbc/JDBCPartition."<init>":(Ljava/lang/String;I)V]
JDBCPartition.productArity ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productArity:()I]
JDBCPartition.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productElement:(I)Ljava/lang/Object;]
JDBCPartition.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productIterator:()Lscala/collection/Iterator;]
JDBCPartition.productPrefix ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productPrefix:()Ljava/lang/String;]
JDBCPartition.toString ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.toString:()Ljava/lang/String;]
JDBCPartition.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,JDBCPartition>
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.tupled:()Lscala/Function1;]
JDBCPartition.whereClause ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.whereClause:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, JDBCPartitioningInfo.class
package org.apache.spark.sql.jdbc
JDBCPartitioningInfo.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.canEqual:(Ljava/lang/Object;)Z]
JDBCPartitioningInfo.column ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.column:()Ljava/lang/String;]
JDBCPartitioningInfo.copy ( String column, long lowerBound, long upperBound, int numPartitions ) : JDBCPartitioningInfo
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.copy:(Ljava/lang/String;JJI)Lorg/apache/spark/sql/jdbc/JDBCPartitioningInfo;]
JDBCPartitioningInfo.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,scala.Function1<Object,scala.Function1<Object,JDBCPartitioningInfo>>>>
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.curried:()Lscala/Function1;]
JDBCPartitioningInfo.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.equals:(Ljava/lang/Object;)Z]
JDBCPartitioningInfo.hashCode ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.hashCode:()I]
JDBCPartitioningInfo.JDBCPartitioningInfo ( String column, long lowerBound, long upperBound, int numPartitions )
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo."<init>":(Ljava/lang/String;JJI)V]
JDBCPartitioningInfo.lowerBound ( ) : long
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.lowerBound:()J]
JDBCPartitioningInfo.numPartitions ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.numPartitions:()I]
JDBCPartitioningInfo.productArity ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productArity:()I]
JDBCPartitioningInfo.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productElement:(I)Ljava/lang/Object;]
JDBCPartitioningInfo.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productIterator:()Lscala/collection/Iterator;]
JDBCPartitioningInfo.productPrefix ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productPrefix:()Ljava/lang/String;]
JDBCPartitioningInfo.toString ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.toString:()Ljava/lang/String;]
JDBCPartitioningInfo.tupled ( ) [static] : scala.Function1<scala.Tuple4<String,Object,Object,Object>,JDBCPartitioningInfo>
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.tupled:()Lscala/Function1;]
JDBCPartitioningInfo.upperBound ( ) : long
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.upperBound:()J]
spark-sql_2.10-1.3.0.jar, JDBCRDD.class
package org.apache.spark.sql.jdbc
JDBCRDD.BinaryConversion ( ) : JDBCRDD.BinaryConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.BinaryConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$BinaryConversion$;]
JDBCRDD.BinaryLongConversion ( ) : JDBCRDD.BinaryLongConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.BinaryLongConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$BinaryLongConversion$;]
JDBCRDD.BooleanConversion ( ) : JDBCRDD.BooleanConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.BooleanConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$BooleanConversion$;]
JDBCRDD.compute ( org.apache.spark.Partition thePart, org.apache.spark.TaskContext context ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.compute:(Lorg/apache/spark/Partition;Lorg/apache/spark/TaskContext;)Lscala/collection/Iterator;]
JDBCRDD.DateConversion ( ) : JDBCRDD.DateConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.DateConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$DateConversion$;]
JDBCRDD.DecimalConversion ( ) : JDBCRDD.DecimalConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.DecimalConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$DecimalConversion$;]
JDBCRDD.DoubleConversion ( ) : JDBCRDD.DoubleConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.DoubleConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$DoubleConversion$;]
JDBCRDD.FloatConversion ( ) : JDBCRDD.FloatConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.FloatConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$FloatConversion$;]
JDBCRDD.getConnector ( String p1, String p2 ) [static] : scala.Function0<java.sql.Connection>
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.getConnector:(Ljava/lang/String;Ljava/lang/String;)Lscala/Function0;]
JDBCRDD.getConversions ( org.apache.spark.sql.types.StructType schema ) : JDBCRDD.JDBCConversion[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.getConversions:(Lorg/apache/spark/sql/types/StructType;)[Lorg/apache/spark/sql/jdbc/JDBCRDD$JDBCConversion;]
JDBCRDD.getPartitions ( ) : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.getPartitions:()[Lorg/apache/spark/Partition;]
JDBCRDD.IntegerConversion ( ) : JDBCRDD.IntegerConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.IntegerConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$IntegerConversion$;]
JDBCRDD.JDBCRDD ( org.apache.spark.SparkContext sc, scala.Function0<java.sql.Connection> getConnection, org.apache.spark.sql.types.StructType schema, String fqTable, String[ ] columns, org.apache.spark.sql.sources.Filter[ ] filters, org.apache.spark.Partition[ ] partitions )
[mangled: org/apache/spark/sql/jdbc/JDBCRDD."<init>":(Lorg/apache/spark/SparkContext;Lscala/Function0;Lorg/apache/spark/sql/types/StructType;Ljava/lang/String;[Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;[Lorg/apache/spark/Partition;)V]
JDBCRDD.LongConversion ( ) : JDBCRDD.LongConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.LongConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$LongConversion$;]
JDBCRDD.JDBCRDD..columnList ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.org.apache.spark.sql.jdbc.JDBCRDD..columnList:()Ljava/lang/String;]
JDBCRDD.JDBCRDD..compileFilter ( org.apache.spark.sql.sources.Filter f ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.org.apache.spark.sql.jdbc.JDBCRDD..compileFilter:(Lorg/apache/spark/sql/sources/Filter;)Ljava/lang/String;]
JDBCRDD.JDBCRDD..getWhereClause ( JDBCPartition part ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.org.apache.spark.sql.jdbc.JDBCRDD..getWhereClause:(Lorg/apache/spark/sql/jdbc/JDBCPartition;)Ljava/lang/String;]
JDBCRDD.resolveTable ( String p1, String p2 ) [static] : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.resolveTable:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/types/StructType;]
JDBCRDD.scanTable ( org.apache.spark.SparkContext p1, org.apache.spark.sql.types.StructType p2, String p3, String p4, String p5, String[ ] p6, org.apache.spark.sql.sources.Filter[ ] p7, org.apache.spark.Partition[ ] p8 ) [static] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.scanTable:(Lorg/apache/spark/SparkContext;Lorg/apache/spark/sql/types/StructType;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;[Lorg/apache/spark/Partition;)Lorg/apache/spark/rdd/RDD;]
JDBCRDD.StringConversion ( ) : JDBCRDD.StringConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.StringConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$StringConversion$;]
JDBCRDD.TimestampConversion ( ) : JDBCRDD.TimestampConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.TimestampConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$TimestampConversion$;]
spark-sql_2.10-1.3.0.jar, JDBCRelation.class
package org.apache.spark.sql.jdbc
JDBCRelation.buildScan ( String[ ] requiredColumns, org.apache.spark.sql.sources.Filter[ ] filters ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.buildScan:([Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/rdd/RDD;]
JDBCRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.canEqual:(Ljava/lang/Object;)Z]
JDBCRelation.columnPartition ( JDBCPartitioningInfo p1 ) [static] : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.columnPartition:(Lorg/apache/spark/sql/jdbc/JDBCPartitioningInfo;)[Lorg/apache/spark/Partition;]
JDBCRelation.copy ( String url, String table, org.apache.spark.Partition[ ] parts, org.apache.spark.sql.SQLContext sqlContext ) : JDBCRelation
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.copy:(Ljava/lang/String;Ljava/lang/String;[Lorg/apache/spark/Partition;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/jdbc/JDBCRelation;]
JDBCRelation.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.equals:(Ljava/lang/Object;)Z]
JDBCRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.hashCode:()I]
JDBCRelation.JDBCRelation ( String url, String table, org.apache.spark.Partition[ ] parts, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/jdbc/JDBCRelation."<init>":(Ljava/lang/String;Ljava/lang/String;[Lorg/apache/spark/Partition;Lorg/apache/spark/sql/SQLContext;)V]
JDBCRelation.parts ( ) : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.parts:()[Lorg/apache/spark/Partition;]
JDBCRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productArity:()I]
JDBCRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productElement:(I)Ljava/lang/Object;]
JDBCRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productIterator:()Lscala/collection/Iterator;]
JDBCRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productPrefix:()Ljava/lang/String;]
JDBCRelation.schema ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.schema:()Lorg/apache/spark/sql/types/StructType;]
JDBCRelation.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
JDBCRelation.table ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.table:()Ljava/lang/String;]
JDBCRelation.toString ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.toString:()Ljava/lang/String;]
JDBCRelation.url ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.url:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, JSONRelation.class
package org.apache.spark.sql.json
JSONRelation.buildScan ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/json/JSONRelation.buildScan:()Lorg/apache/spark/rdd/RDD;]
JSONRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/json/JSONRelation.canEqual:(Ljava/lang/Object;)Z]
JSONRelation.copy ( String path, double samplingRatio, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, org.apache.spark.sql.SQLContext sqlContext ) : JSONRelation
[mangled: org/apache/spark/sql/json/JSONRelation.copy:(Ljava/lang/String;DLscala/Option;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/json/JSONRelation;]
JSONRelation.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/json/JSONRelation.equals:(Ljava/lang/Object;)Z]
JSONRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/json/JSONRelation.hashCode:()I]
JSONRelation.insert ( org.apache.spark.sql.DataFrame data, boolean overwrite ) : void
[mangled: org/apache/spark/sql/json/JSONRelation.insert:(Lorg/apache/spark/sql/DataFrame;Z)V]
JSONRelation.JSONRelation ( String path, double samplingRatio, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/json/JSONRelation."<init>":(Ljava/lang/String;DLscala/Option;Lorg/apache/spark/sql/SQLContext;)V]
JSONRelation.JSONRelation..baseRDD ( ) : org.apache.spark.rdd.RDD<String>
[mangled: org/apache/spark/sql/json/JSONRelation.org.apache.spark.sql.json.JSONRelation..baseRDD:()Lorg/apache/spark/rdd/RDD;]
JSONRelation.path ( ) : String
[mangled: org/apache/spark/sql/json/JSONRelation.path:()Ljava/lang/String;]
JSONRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/json/JSONRelation.productArity:()I]
JSONRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/json/JSONRelation.productElement:(I)Ljava/lang/Object;]
JSONRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/json/JSONRelation.productIterator:()Lscala/collection/Iterator;]
JSONRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/json/JSONRelation.productPrefix:()Ljava/lang/String;]
JSONRelation.samplingRatio ( ) : double
[mangled: org/apache/spark/sql/json/JSONRelation.samplingRatio:()D]
JSONRelation.schema ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/json/JSONRelation.schema:()Lorg/apache/spark/sql/types/StructType;]
JSONRelation.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/json/JSONRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
JSONRelation.toString ( ) : String
[mangled: org/apache/spark/sql/json/JSONRelation.toString:()Ljava/lang/String;]
JSONRelation.userSpecifiedSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/json/JSONRelation.userSpecifiedSchema:()Lscala/Option;]
spark-sql_2.10-1.3.0.jar, LeftSemiJoinBNL.class
package org.apache.spark.sql.execution.joins
LeftSemiJoinBNL.broadcast ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.broadcast:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.canEqual:(Ljava/lang/Object;)Z]
LeftSemiJoinBNL.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.children:()Lscala/collection/Seq;]
LeftSemiJoinBNL.condition ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.condition:()Lscala/Option;]
LeftSemiJoinBNL.copy ( org.apache.spark.sql.execution.SparkPlan streamed, org.apache.spark.sql.execution.SparkPlan broadcast, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition ) : LeftSemiJoinBNL
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;)Lorg/apache/spark/sql/execution/joins/LeftSemiJoinBNL;]
LeftSemiJoinBNL.curried ( ) [static] : scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,LeftSemiJoinBNL>>>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.curried:()Lscala/Function1;]
LeftSemiJoinBNL.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.equals:(Ljava/lang/Object;)Z]
LeftSemiJoinBNL.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.execute:()Lorg/apache/spark/rdd/RDD;]
LeftSemiJoinBNL.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.hashCode:()I]
LeftSemiJoinBNL.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinBNL.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.LeftSemiJoinBNL ( org.apache.spark.sql.execution.SparkPlan streamed, org.apache.spark.sql.execution.SparkPlan broadcast, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition )
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;)V]
LeftSemiJoinBNL.LeftSemiJoinBNL..boundCondition ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.org.apache.spark.sql.execution.joins.LeftSemiJoinBNL..boundCondition:()Lscala/Function1;]
LeftSemiJoinBNL.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.output:()Lscala/collection/Seq;]
LeftSemiJoinBNL.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
LeftSemiJoinBNL.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productArity:()I]
LeftSemiJoinBNL.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productElement:(I)Ljava/lang/Object;]
LeftSemiJoinBNL.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productIterator:()Lscala/collection/Iterator;]
LeftSemiJoinBNL.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productPrefix:()Ljava/lang/String;]
LeftSemiJoinBNL.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinBNL.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.streamed ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.streamed:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.tupled ( ) [static] : scala.Function1<scala.Tuple3<org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan,scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>>,LeftSemiJoinBNL>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LeftSemiJoinHash.class
package org.apache.spark.sql.execution.joins
LeftSemiJoinHash.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.buildSide ( ) : package.BuildRight.
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildRight$;]
LeftSemiJoinHash.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
LeftSemiJoinHash.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
LeftSemiJoinHash.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.canEqual:(Ljava/lang/Object;)Z]
LeftSemiJoinHash.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.children:()Lscala/collection/Seq;]
LeftSemiJoinHash.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : LeftSemiJoinHash
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/LeftSemiJoinHash;]
LeftSemiJoinHash.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,LeftSemiJoinHash>>>>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.curried:()Lscala/Function1;]
LeftSemiJoinHash.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.equals:(Ljava/lang/Object;)Z]
LeftSemiJoinHash.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.execute:()Lorg/apache/spark/rdd/RDD;]
LeftSemiJoinHash.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.hashCode:()I]
LeftSemiJoinHash.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
LeftSemiJoinHash.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinHash.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.leftKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.LeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
LeftSemiJoinHash.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.output:()Lscala/collection/Seq;]
LeftSemiJoinHash.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productArity:()I]
LeftSemiJoinHash.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productElement:(I)Ljava/lang/Object;]
LeftSemiJoinHash.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productIterator:()Lscala/collection/Iterator;]
LeftSemiJoinHash.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productPrefix:()Ljava/lang/String;]
LeftSemiJoinHash.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.requiredChildDistribution:()Lscala/collection/immutable/List;]
LeftSemiJoinHash.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.requiredChildDistribution:()Lscala/collection/Seq;]
LeftSemiJoinHash.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinHash.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.rightKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.streamedKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.streamSideKeyGenerator:()Lscala/Function0;]
LeftSemiJoinHash.tupled ( ) [static] : scala.Function1<scala.Tuple4<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,LeftSemiJoinHash>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LessThan.class
package org.apache.spark.sql.sources
LessThan.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/LessThan.attribute:()Ljava/lang/String;]
LessThan.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThan.canEqual:(Ljava/lang/Object;)Z]
LessThan.copy ( String attribute, Object value ) : LessThan
[mangled: org/apache/spark/sql/sources/LessThan.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/LessThan;]
LessThan.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,LessThan>>
[mangled: org/apache/spark/sql/sources/LessThan.curried:()Lscala/Function1;]
LessThan.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThan.equals:(Ljava/lang/Object;)Z]
LessThan.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/LessThan.hashCode:()I]
LessThan.LessThan ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/LessThan."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
LessThan.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/LessThan.productArity:()I]
LessThan.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/LessThan.productElement:(I)Ljava/lang/Object;]
LessThan.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/LessThan.productIterator:()Lscala/collection/Iterator;]
LessThan.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/LessThan.productPrefix:()Ljava/lang/String;]
LessThan.toString ( ) : String
[mangled: org/apache/spark/sql/sources/LessThan.toString:()Ljava/lang/String;]
LessThan.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,LessThan>
[mangled: org/apache/spark/sql/sources/LessThan.tupled:()Lscala/Function1;]
LessThan.value ( ) : Object
[mangled: org/apache/spark/sql/sources/LessThan.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, LessThanOrEqual.class
package org.apache.spark.sql.sources
LessThanOrEqual.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.attribute:()Ljava/lang/String;]
LessThanOrEqual.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.canEqual:(Ljava/lang/Object;)Z]
LessThanOrEqual.copy ( String attribute, Object value ) : LessThanOrEqual
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/LessThanOrEqual;]
LessThanOrEqual.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,LessThanOrEqual>>
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.curried:()Lscala/Function1;]
LessThanOrEqual.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.equals:(Ljava/lang/Object;)Z]
LessThanOrEqual.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.hashCode:()I]
LessThanOrEqual.LessThanOrEqual ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/LessThanOrEqual."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
LessThanOrEqual.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productArity:()I]
LessThanOrEqual.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productElement:(I)Ljava/lang/Object;]
LessThanOrEqual.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productIterator:()Lscala/collection/Iterator;]
LessThanOrEqual.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productPrefix:()Ljava/lang/String;]
LessThanOrEqual.toString ( ) : String
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.toString:()Ljava/lang/String;]
LessThanOrEqual.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,LessThanOrEqual>
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.tupled:()Lscala/Function1;]
LessThanOrEqual.value ( ) : Object
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, LocalTableScan.class
package org.apache.spark.sql.execution
LocalTableScan.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LocalTableScan.canEqual:(Ljava/lang/Object;)Z]
LocalTableScan.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/LocalTableScan.children:()Lscala/collection/immutable/Nil$;]
LocalTableScan.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/LocalTableScan.children:()Lscala/collection/Seq;]
LocalTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows ) : LocalTableScan
[mangled: org/apache/spark/sql/execution/LocalTableScan.copy:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/LocalTableScan;]
LocalTableScan.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.Row>,LocalTableScan>>
[mangled: org/apache/spark/sql/execution/LocalTableScan.curried:()Lscala/Function1;]
LocalTableScan.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LocalTableScan.equals:(Ljava/lang/Object;)Z]
LocalTableScan.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LocalTableScan.execute:()Lorg/apache/spark/rdd/RDD;]
LocalTableScan.executeCollect ( ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/LocalTableScan.executeCollect:()[Lorg/apache/spark/sql/Row;]
LocalTableScan.executeTake ( int limit ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/LocalTableScan.executeTake:(I)[Lorg/apache/spark/sql/Row;]
LocalTableScan.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/LocalTableScan.hashCode:()I]
LocalTableScan.LocalTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows )
[mangled: org/apache/spark/sql/execution/LocalTableScan."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;)V]
LocalTableScan.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/LocalTableScan.output:()Lscala/collection/Seq;]
LocalTableScan.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/LocalTableScan.productArity:()I]
LocalTableScan.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/LocalTableScan.productElement:(I)Ljava/lang/Object;]
LocalTableScan.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/LocalTableScan.productIterator:()Lscala/collection/Iterator;]
LocalTableScan.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/LocalTableScan.productPrefix:()Ljava/lang/String;]
LocalTableScan.rows ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LocalTableScan.rows:()Lscala/collection/Seq;]
LocalTableScan.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.collection.Seq<org.apache.spark.sql.Row>>,LocalTableScan>
[mangled: org/apache/spark/sql/execution/LocalTableScan.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LogicalLocalTable.class
package org.apache.spark.sql.execution
LogicalLocalTable.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.canEqual:(Ljava/lang/Object;)Z]
LogicalLocalTable.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.children:()Lscala/collection/immutable/Nil$;]
LogicalLocalTable.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.children:()Lscala/collection/Seq;]
LogicalLocalTable.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows, org.apache.spark.sql.SQLContext sqlContext ) : LogicalLocalTable
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/LogicalLocalTable;]
LogicalLocalTable.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.equals:(Ljava/lang/Object;)Z]
LogicalLocalTable.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.hashCode:()I]
LogicalLocalTable.LogicalLocalTable ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/execution/LogicalLocalTable."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)V]
LogicalLocalTable.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
LogicalLocalTable.newInstance ( ) : LogicalLocalTable
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.newInstance:()Lorg/apache/spark/sql/execution/LogicalLocalTable;]
LogicalLocalTable.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.output:()Lscala/collection/Seq;]
LogicalLocalTable.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productArity:()I]
LogicalLocalTable.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productElement:(I)Ljava/lang/Object;]
LogicalLocalTable.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productIterator:()Lscala/collection/Iterator;]
LogicalLocalTable.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productPrefix:()Ljava/lang/String;]
LogicalLocalTable.rows ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.rows:()Lscala/collection/Seq;]
LogicalLocalTable.sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.sameResult:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Z]
LogicalLocalTable.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, LogicalRDD.class
package org.apache.spark.sql.execution
LogicalRDD.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalRDD.canEqual:(Ljava/lang/Object;)Z]
LogicalRDD.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/LogicalRDD.children:()Lscala/collection/immutable/Nil$;]
LogicalRDD.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/LogicalRDD.children:()Lscala/collection/Seq;]
LogicalRDD.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd, org.apache.spark.sql.SQLContext sqlContext ) : LogicalRDD
[mangled: org/apache/spark/sql/execution/LogicalRDD.copy:(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/LogicalRDD;]
LogicalRDD.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalRDD.equals:(Ljava/lang/Object;)Z]
LogicalRDD.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalRDD.hashCode:()I]
LogicalRDD.LogicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/execution/LogicalRDD."<init>":(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/SQLContext;)V]
LogicalRDD.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/execution/LogicalRDD.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
LogicalRDD.newInstance ( ) : LogicalRDD
[mangled: org/apache/spark/sql/execution/LogicalRDD.newInstance:()Lorg/apache/spark/sql/execution/LogicalRDD;]
LogicalRDD.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/LogicalRDD.output:()Lscala/collection/Seq;]
LogicalRDD.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalRDD.productArity:()I]
LogicalRDD.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/LogicalRDD.productElement:(I)Ljava/lang/Object;]
LogicalRDD.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/LogicalRDD.productIterator:()Lscala/collection/Iterator;]
LogicalRDD.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/LogicalRDD.productPrefix:()Ljava/lang/String;]
LogicalRDD.rdd ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LogicalRDD.rdd:()Lorg/apache/spark/rdd/RDD;]
LogicalRDD.sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalRDD.sameResult:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Z]
LogicalRDD.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/execution/LogicalRDD.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, LogicalRelation.class
package org.apache.spark.sql.sources
LogicalRelation.andThen ( scala.Function1<LogicalRelation,A> p1 ) [static] : scala.Function1<BaseRelation,A>
[mangled: org/apache/spark/sql/sources/LogicalRelation.andThen:(Lscala/Function1;)Lscala/Function1;]
LogicalRelation.attributeMap ( ) : org.apache.spark.sql.catalyst.expressions.AttributeMap<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/sources/LogicalRelation.attributeMap:()Lorg/apache/spark/sql/catalyst/expressions/AttributeMap;]
LogicalRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LogicalRelation.canEqual:(Ljava/lang/Object;)Z]
LogicalRelation.compose ( scala.Function1<A,BaseRelation> p1 ) [static] : scala.Function1<A,LogicalRelation>
[mangled: org/apache/spark/sql/sources/LogicalRelation.compose:(Lscala/Function1;)Lscala/Function1;]
LogicalRelation.copy ( BaseRelation relation ) : LogicalRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.copy:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/sources/LogicalRelation;]
LogicalRelation.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/sources/LogicalRelation.equals:(Ljava/lang/Object;)Z]
LogicalRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/LogicalRelation.hashCode:()I]
LogicalRelation.LogicalRelation ( BaseRelation relation )
[mangled: org/apache/spark/sql/sources/LogicalRelation."<init>":(Lorg/apache/spark/sql/sources/BaseRelation;)V]
LogicalRelation.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
LogicalRelation.newInstance ( ) : LogicalRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.newInstance:()Lorg/apache/spark/sql/sources/LogicalRelation;]
LogicalRelation.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/sources/LogicalRelation.output:()Lscala/collection/Seq;]
LogicalRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/LogicalRelation.productArity:()I]
LogicalRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/LogicalRelation.productElement:(I)Ljava/lang/Object;]
LogicalRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/LogicalRelation.productIterator:()Lscala/collection/Iterator;]
LogicalRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/LogicalRelation.productPrefix:()Ljava/lang/String;]
LogicalRelation.relation ( ) : BaseRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.relation:()Lorg/apache/spark/sql/sources/BaseRelation;]
LogicalRelation.sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan otherPlan ) : boolean
[mangled: org/apache/spark/sql/sources/LogicalRelation.sameResult:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Z]
LogicalRelation.simpleString ( ) : String
[mangled: org/apache/spark/sql/sources/LogicalRelation.simpleString:()Ljava/lang/String;]
LogicalRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/sources/LogicalRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, MySQLQuirks.class
package org.apache.spark.sql.jdbc
MySQLQuirks.MySQLQuirks ( )
[mangled: org/apache/spark/sql/jdbc/MySQLQuirks."<init>":()V]
spark-sql_2.10-1.3.0.jar, NanoTime.class
package org.apache.spark.sql.parquet.timestamp
NanoTime.getJulianDay ( ) : int
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.getJulianDay:()I]
NanoTime.getTimeOfDayNanos ( ) : long
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.getTimeOfDayNanos:()J]
NanoTime.NanoTime ( )
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime."<init>":()V]
NanoTime.set ( int julianDay, long timeOfDayNanos ) : NanoTime
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.set:(IJ)Lorg/apache/spark/sql/parquet/timestamp/NanoTime;]
NanoTime.toBinary ( ) : parquet.io.api.Binary
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.toBinary:()Lparquet/io/api/Binary;]
spark-sql_2.10-1.3.0.jar, NativeColumnType<T>.class
package org.apache.spark.sql.columnar
NativeColumnType<T>.dataType ( ) : T
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.dataType:()Lorg/apache/spark/sql/types/NativeType;]
NativeColumnType<T>.NativeColumnType ( T dataType, int typeId, int defaultSize ) : public
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.org.apache.spark.sql.columnar.NativeColumnType:(Lorg/apache/spark/sql/types/NativeType;II)V]
spark-sql_2.10-1.3.0.jar, NoQuirks.class
package org.apache.spark.sql.jdbc
NoQuirks.NoQuirks ( )
[mangled: org/apache/spark/sql/jdbc/NoQuirks."<init>":()V]
spark-sql_2.10-1.3.0.jar, Not.class
package org.apache.spark.sql.sources
Not.andThen ( scala.Function1<Not,A> p1 ) [static] : scala.Function1<Filter,A>
[mangled: org/apache/spark/sql/sources/Not.andThen:(Lscala/Function1;)Lscala/Function1;]
Not.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Not.canEqual:(Ljava/lang/Object;)Z]
Not.child ( ) : Filter
[mangled: org/apache/spark/sql/sources/Not.child:()Lorg/apache/spark/sql/sources/Filter;]
Not.compose ( scala.Function1<A,Filter> p1 ) [static] : scala.Function1<A,Not>
[mangled: org/apache/spark/sql/sources/Not.compose:(Lscala/Function1;)Lscala/Function1;]
Not.copy ( Filter child ) : Not
[mangled: org/apache/spark/sql/sources/Not.copy:(Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/sql/sources/Not;]
Not.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Not.equals:(Ljava/lang/Object;)Z]
Not.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/Not.hashCode:()I]
Not.Not ( Filter child )
[mangled: org/apache/spark/sql/sources/Not."<init>":(Lorg/apache/spark/sql/sources/Filter;)V]
Not.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/Not.productArity:()I]
Not.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/Not.productElement:(I)Ljava/lang/Object;]
Not.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/Not.productIterator:()Lscala/collection/Iterator;]
Not.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/Not.productPrefix:()Ljava/lang/String;]
Not.toString ( ) : String
[mangled: org/apache/spark/sql/sources/Not.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, NullableColumnBuilder.class
package org.apache.spark.sql.columnar
NullableColumnBuilder.appendFrom ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.appendFrom:(Lorg/apache/spark/sql/Row;I)V]
NullableColumnBuilder.NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..super.appendFrom:(Lorg/apache/spark/sql/Row;I)V]
spark-sql_2.10-1.3.0.jar, Or.class
package org.apache.spark.sql.sources
Or.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Or.canEqual:(Ljava/lang/Object;)Z]
Or.copy ( Filter left, Filter right ) : Or
[mangled: org/apache/spark/sql/sources/Or.copy:(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/sql/sources/Or;]
Or.curried ( ) [static] : scala.Function1<Filter,scala.Function1<Filter,Or>>
[mangled: org/apache/spark/sql/sources/Or.curried:()Lscala/Function1;]
Or.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Or.equals:(Ljava/lang/Object;)Z]
Or.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/Or.hashCode:()I]
Or.left ( ) : Filter
[mangled: org/apache/spark/sql/sources/Or.left:()Lorg/apache/spark/sql/sources/Filter;]
Or.Or ( Filter left, Filter right )
[mangled: org/apache/spark/sql/sources/Or."<init>":(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)V]
Or.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/Or.productArity:()I]
Or.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/Or.productElement:(I)Ljava/lang/Object;]
Or.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/Or.productIterator:()Lscala/collection/Iterator;]
Or.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/Or.productPrefix:()Ljava/lang/String;]
Or.right ( ) : Filter
[mangled: org/apache/spark/sql/sources/Or.right:()Lorg/apache/spark/sql/sources/Filter;]
Or.toString ( ) : String
[mangled: org/apache/spark/sql/sources/Or.toString:()Ljava/lang/String;]
Or.tupled ( ) [static] : scala.Function1<scala.Tuple2<Filter,Filter>,Or>
[mangled: org/apache/spark/sql/sources/Or.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ParquetRelation.class
package org.apache.spark.sql.parquet
ParquetRelation.attributeMap ( ) : org.apache.spark.sql.catalyst.expressions.AttributeMap<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/ParquetRelation.attributeMap:()Lorg/apache/spark/sql/catalyst/expressions/AttributeMap;]
ParquetRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/parquet/ParquetRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, ParquetRelation2.class
package org.apache.spark.sql.parquet
ParquetRelation2.buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> predicates ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.buildScan:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/rdd/RDD;]
ParquetRelation2.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.canEqual:(Ljava/lang/Object;)Z]
ParquetRelation2.copy ( scala.collection.Seq<String> paths, scala.collection.immutable.Map<String,String> parameters, scala.Option<org.apache.spark.sql.types.StructType> maybeSchema, scala.Option<PartitionSpec> maybePartitionSpec, org.apache.spark.sql.SQLContext sqlContext ) : ParquetRelation2
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.copy:(Lscala/collection/Seq;Lscala/collection/immutable/Map;Lscala/Option;Lscala/Option;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/parquet/ParquetRelation2;]
ParquetRelation2.DEFAULT_PARTITION_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.DEFAULT_PARTITION_NAME:()Ljava/lang/String;]
ParquetRelation2.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.equals:(Ljava/lang/Object;)Z]
ParquetRelation2.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.hashCode:()I]
ParquetRelation2.insert ( org.apache.spark.sql.DataFrame data, boolean overwrite ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.insert:(Lorg/apache/spark/sql/DataFrame;Z)V]
ParquetRelation2.isPartitioned ( ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.isPartitioned:()Z]
ParquetRelation2.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.isTraceEnabled:()Z]
ParquetRelation2.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.log:()Lorg/slf4j/Logger;]
ParquetRelation2.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logDebug:(Lscala/Function0;)V]
ParquetRelation2.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logError:(Lscala/Function0;)V]
ParquetRelation2.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logInfo:(Lscala/Function0;)V]
ParquetRelation2.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logName ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logName:()Ljava/lang/String;]
ParquetRelation2.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logTrace:(Lscala/Function0;)V]
ParquetRelation2.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logWarning:(Lscala/Function0;)V]
ParquetRelation2.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.maybePartitionSpec ( ) : scala.Option<PartitionSpec>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.maybePartitionSpec:()Lscala/Option;]
ParquetRelation2.maybeSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.maybeSchema:()Lscala/Option;]
ParquetRelation2.MERGE_SCHEMA ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.MERGE_SCHEMA:()Ljava/lang/String;]
ParquetRelation2.newJobContext ( org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.mapreduce.JobID jobId ) : org.apache.hadoop.mapreduce.JobContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.newJobContext:(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/mapreduce/JobID;)Lorg/apache/hadoop/mapreduce/JobContext;]
ParquetRelation2.newTaskAttemptContext ( org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.mapreduce.TaskAttemptID attemptId ) : org.apache.hadoop.mapreduce.TaskAttemptContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.newTaskAttemptContext:(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/mapreduce/TaskAttemptID;)Lorg/apache/hadoop/mapreduce/TaskAttemptContext;]
ParquetRelation2.newTaskAttemptID ( String jtIdentifier, int jobId, boolean isMap, int taskId, int attemptId ) : org.apache.hadoop.mapreduce.TaskAttemptID
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.newTaskAttemptID:(Ljava/lang/String;IZII)Lorg/apache/hadoop/mapreduce/TaskAttemptID;]
ParquetRelation2.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
ParquetRelation2.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
ParquetRelation2.ParquetRelation2..defaultPartitionName ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..defaultPartitionName:()Ljava/lang/String;]
ParquetRelation2.ParquetRelation2..isSummaryFile ( org.apache.hadoop.fs.Path file ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..isSummaryFile:(Lorg/apache/hadoop/fs/Path;)Z]
ParquetRelation2.ParquetRelation2..maybeMetastoreSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..maybeMetastoreSchema:()Lscala/Option;]
ParquetRelation2.ParquetRelation2..metadataCache ( ) : ParquetRelation2.MetadataCache
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..metadataCache:()Lorg/apache/spark/sql/parquet/ParquetRelation2$MetadataCache;]
ParquetRelation2.ParquetRelation2..shouldMergeSchemas ( ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..shouldMergeSchemas:()Z]
ParquetRelation2.parameters ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.parameters:()Lscala/collection/immutable/Map;]
ParquetRelation2.ParquetRelation2 ( scala.collection.Seq<String> paths, scala.collection.immutable.Map<String,String> parameters, scala.Option<org.apache.spark.sql.types.StructType> maybeSchema, scala.Option<PartitionSpec> maybePartitionSpec, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/parquet/ParquetRelation2."<init>":(Lscala/collection/Seq;Lscala/collection/immutable/Map;Lscala/Option;Lscala/Option;Lorg/apache/spark/sql/SQLContext;)V]
ParquetRelation2.partitionColumns ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.partitionColumns:()Lorg/apache/spark/sql/types/StructType;]
ParquetRelation2.partitions ( ) : scala.collection.Seq<Partition>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.partitions:()Lscala/collection/Seq;]
ParquetRelation2.partitionSpec ( ) : PartitionSpec
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.partitionSpec:()Lorg/apache/spark/sql/parquet/PartitionSpec;]
ParquetRelation2.paths ( ) : scala.collection.Seq<String>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.paths:()Lscala/collection/Seq;]
ParquetRelation2.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productArity:()I]
ParquetRelation2.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productElement:(I)Ljava/lang/Object;]
ParquetRelation2.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productIterator:()Lscala/collection/Iterator;]
ParquetRelation2.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productPrefix:()Ljava/lang/String;]
ParquetRelation2.schema ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.schema:()Lorg/apache/spark/sql/types/StructType;]
ParquetRelation2.sizeInBytes ( ) : long
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.sizeInBytes:()J]
ParquetRelation2.sparkContext ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.sparkContext:()Lorg/apache/spark/SparkContext;]
ParquetRelation2.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
ParquetRelation2.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, ParquetTableScan.class
package org.apache.spark.sql.parquet
ParquetTableScan.requestedPartitionOrdinals ( ) : scala.Tuple2<Object,Object>[ ]
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.requestedPartitionOrdinals:()[Lscala/Tuple2;]
spark-sql_2.10-1.3.0.jar, ParquetTest.class
package org.apache.spark.sql.parquet
ParquetTest.configuration ( ) [abstract] : org.apache.hadoop.conf.Configuration
[mangled: org/apache/spark/sql/parquet/ParquetTest.configuration:()Lorg/apache/hadoop/conf/Configuration;]
ParquetTest.makeParquetFile ( org.apache.spark.sql.DataFrame p1, java.io.File p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.makeParquetFile:(Lorg/apache/spark/sql/DataFrame;Ljava/io/File;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.makeParquetFile ( scala.collection.Seq<T> p1, java.io.File p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.makeParquetFile:(Lscala/collection/Seq;Ljava/io/File;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.makePartitionDir ( java.io.File p1, String p2, scala.collection.Seq<scala.Tuple2<String,Object>> p3 ) [abstract] : java.io.File
[mangled: org/apache/spark/sql/parquet/ParquetTest.makePartitionDir:(Ljava/io/File;Ljava/lang/String;Lscala/collection/Seq;)Ljava/io/File;]
ParquetTest.sqlContext ( ) [abstract] : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/parquet/ParquetTest.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
ParquetTest.withParquetDataFrame ( scala.collection.Seq<T> p1, scala.Function1<org.apache.spark.sql.DataFrame,scala.runtime.BoxedUnit> p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withParquetDataFrame:(Lscala/collection/Seq;Lscala/Function1;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.withParquetFile ( scala.collection.Seq<T> p1, scala.Function1<String,scala.runtime.BoxedUnit> p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withParquetFile:(Lscala/collection/Seq;Lscala/Function1;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.withParquetTable ( scala.collection.Seq<T> p1, String p2, scala.Function0<scala.runtime.BoxedUnit> p3, scala.reflect.ClassTag<T> p4, scala.reflect.api.TypeTags.TypeTag<T> p5 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withParquetTable:(Lscala/collection/Seq;Ljava/lang/String;Lscala/Function0;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.withSQLConf ( scala.collection.Seq<scala.Tuple2<String,String>> p1, scala.Function0<scala.runtime.BoxedUnit> p2 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withSQLConf:(Lscala/collection/Seq;Lscala/Function0;)V]
ParquetTest.withTempDir ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> p1 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withTempDir:(Lscala/Function1;)V]
ParquetTest.withTempPath ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> p1 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withTempPath:(Lscala/Function1;)V]
ParquetTest.withTempTable ( String p1, scala.Function0<scala.runtime.BoxedUnit> p2 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withTempTable:(Ljava/lang/String;Lscala/Function0;)V]
spark-sql_2.10-1.3.0.jar, ParquetTypeInfo.class
package org.apache.spark.sql.parquet
ParquetTypeInfo.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.canEqual:(Ljava/lang/Object;)Z]
ParquetTypeInfo.copy ( parquet.schema.PrimitiveType.PrimitiveTypeName primitiveType, scala.Option<parquet.schema.OriginalType> originalType, scala.Option<parquet.schema.DecimalMetadata> decimalMetadata, scala.Option<Object> length ) : ParquetTypeInfo
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.copy:(Lparquet/schema/PrimitiveType$PrimitiveTypeName;Lscala/Option;Lscala/Option;Lscala/Option;)Lorg/apache/spark/sql/parquet/ParquetTypeInfo;]
ParquetTypeInfo.curried ( ) [static] : scala.Function1<parquet.schema.PrimitiveType.PrimitiveTypeName,scala.Function1<scala.Option<parquet.schema.OriginalType>,scala.Function1<scala.Option<parquet.schema.DecimalMetadata>,scala.Function1<scala.Option<Object>,ParquetTypeInfo>>>>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.curried:()Lscala/Function1;]
ParquetTypeInfo.decimalMetadata ( ) : scala.Option<parquet.schema.DecimalMetadata>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.decimalMetadata:()Lscala/Option;]
ParquetTypeInfo.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.equals:(Ljava/lang/Object;)Z]
ParquetTypeInfo.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.hashCode:()I]
ParquetTypeInfo.length ( ) : scala.Option<Object>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.length:()Lscala/Option;]
ParquetTypeInfo.originalType ( ) : scala.Option<parquet.schema.OriginalType>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.originalType:()Lscala/Option;]
ParquetTypeInfo.ParquetTypeInfo ( parquet.schema.PrimitiveType.PrimitiveTypeName primitiveType, scala.Option<parquet.schema.OriginalType> originalType, scala.Option<parquet.schema.DecimalMetadata> decimalMetadata, scala.Option<Object> length )
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo."<init>":(Lparquet/schema/PrimitiveType$PrimitiveTypeName;Lscala/Option;Lscala/Option;Lscala/Option;)V]
ParquetTypeInfo.primitiveType ( ) : parquet.schema.PrimitiveType.PrimitiveTypeName
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.primitiveType:()Lparquet/schema/PrimitiveType$PrimitiveTypeName;]
ParquetTypeInfo.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productArity:()I]
ParquetTypeInfo.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productElement:(I)Ljava/lang/Object;]
ParquetTypeInfo.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productIterator:()Lscala/collection/Iterator;]
ParquetTypeInfo.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productPrefix:()Ljava/lang/String;]
ParquetTypeInfo.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.toString:()Ljava/lang/String;]
ParquetTypeInfo.tupled ( ) [static] : scala.Function1<scala.Tuple4<parquet.schema.PrimitiveType.PrimitiveTypeName,scala.Option<parquet.schema.OriginalType>,scala.Option<parquet.schema.DecimalMetadata>,scala.Option<Object>>,ParquetTypeInfo>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, Partition.class
package org.apache.spark.sql.parquet
Partition.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/Partition.canEqual:(Ljava/lang/Object;)Z]
Partition.copy ( org.apache.spark.sql.Row values, String path ) : Partition
[mangled: org/apache/spark/sql/parquet/Partition.copy:(Lorg/apache/spark/sql/Row;Ljava/lang/String;)Lorg/apache/spark/sql/parquet/Partition;]
Partition.curried ( ) [static] : scala.Function1<org.apache.spark.sql.Row,scala.Function1<String,Partition>>
[mangled: org/apache/spark/sql/parquet/Partition.curried:()Lscala/Function1;]
Partition.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/Partition.equals:(Ljava/lang/Object;)Z]
Partition.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/Partition.hashCode:()I]
Partition.Partition ( org.apache.spark.sql.Row values, String path )
[mangled: org/apache/spark/sql/parquet/Partition."<init>":(Lorg/apache/spark/sql/Row;Ljava/lang/String;)V]
Partition.path ( ) : String
[mangled: org/apache/spark/sql/parquet/Partition.path:()Ljava/lang/String;]
Partition.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/Partition.productArity:()I]
Partition.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/Partition.productElement:(I)Ljava/lang/Object;]
Partition.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/Partition.productIterator:()Lscala/collection/Iterator;]
Partition.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/Partition.productPrefix:()Ljava/lang/String;]
Partition.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/Partition.toString:()Ljava/lang/String;]
Partition.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.Row,String>,Partition>
[mangled: org/apache/spark/sql/parquet/Partition.tupled:()Lscala/Function1;]
Partition.values ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/parquet/Partition.values:()Lorg/apache/spark/sql/Row;]
spark-sql_2.10-1.3.0.jar, PartitionSpec.class
package org.apache.spark.sql.parquet
PartitionSpec.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/PartitionSpec.canEqual:(Ljava/lang/Object;)Z]
PartitionSpec.copy ( org.apache.spark.sql.types.StructType partitionColumns, scala.collection.Seq<Partition> partitions ) : PartitionSpec
[mangled: org/apache/spark/sql/parquet/PartitionSpec.copy:(Lorg/apache/spark/sql/types/StructType;Lscala/collection/Seq;)Lorg/apache/spark/sql/parquet/PartitionSpec;]
PartitionSpec.curried ( ) [static] : scala.Function1<org.apache.spark.sql.types.StructType,scala.Function1<scala.collection.Seq<Partition>,PartitionSpec>>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.curried:()Lscala/Function1;]
PartitionSpec.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/PartitionSpec.equals:(Ljava/lang/Object;)Z]
PartitionSpec.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/PartitionSpec.hashCode:()I]
PartitionSpec.partitionColumns ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/parquet/PartitionSpec.partitionColumns:()Lorg/apache/spark/sql/types/StructType;]
PartitionSpec.partitions ( ) : scala.collection.Seq<Partition>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.partitions:()Lscala/collection/Seq;]
PartitionSpec.PartitionSpec ( org.apache.spark.sql.types.StructType partitionColumns, scala.collection.Seq<Partition> partitions )
[mangled: org/apache/spark/sql/parquet/PartitionSpec."<init>":(Lorg/apache/spark/sql/types/StructType;Lscala/collection/Seq;)V]
PartitionSpec.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productArity:()I]
PartitionSpec.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productElement:(I)Ljava/lang/Object;]
PartitionSpec.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productIterator:()Lscala/collection/Iterator;]
PartitionSpec.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productPrefix:()Ljava/lang/String;]
PartitionSpec.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/PartitionSpec.toString:()Ljava/lang/String;]
PartitionSpec.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.types.StructType,scala.collection.Seq<Partition>>,PartitionSpec>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, PartitionStatistics.class
package org.apache.spark.sql.columnar
PartitionStatistics.forAttribute ( ) : org.apache.spark.sql.catalyst.expressions.AttributeMap<ColumnStatisticsSchema>
[mangled: org/apache/spark/sql/columnar/PartitionStatistics.forAttribute:()Lorg/apache/spark/sql/catalyst/expressions/AttributeMap;]
PartitionStatistics.PartitionStatistics ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> tableSchema )
[mangled: org/apache/spark/sql/columnar/PartitionStatistics."<init>":(Lscala/collection/Seq;)V]
PartitionStatistics.schema ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/columnar/PartitionStatistics.schema:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, PhysicalRDD.class
package org.apache.spark.sql.execution
PhysicalRDD.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/PhysicalRDD.canEqual:(Ljava/lang/Object;)Z]
PhysicalRDD.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/PhysicalRDD.children:()Lscala/collection/immutable/Nil$;]
PhysicalRDD.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/PhysicalRDD.children:()Lscala/collection/Seq;]
PhysicalRDD.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd ) : PhysicalRDD
[mangled: org/apache/spark/sql/execution/PhysicalRDD.copy:(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/execution/PhysicalRDD;]
PhysicalRDD.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>,PhysicalRDD>>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.curried:()Lscala/Function1;]
PhysicalRDD.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/PhysicalRDD.equals:(Ljava/lang/Object;)Z]
PhysicalRDD.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.execute:()Lorg/apache/spark/rdd/RDD;]
PhysicalRDD.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/PhysicalRDD.hashCode:()I]
PhysicalRDD.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.output:()Lscala/collection/Seq;]
PhysicalRDD.PhysicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd )
[mangled: org/apache/spark/sql/execution/PhysicalRDD."<init>":(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;)V]
PhysicalRDD.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productArity:()I]
PhysicalRDD.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productElement:(I)Ljava/lang/Object;]
PhysicalRDD.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productIterator:()Lscala/collection/Iterator;]
PhysicalRDD.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productPrefix:()Ljava/lang/String;]
PhysicalRDD.rdd ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.rdd:()Lorg/apache/spark/rdd/RDD;]
PhysicalRDD.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>>,PhysicalRDD>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, PostgresQuirks.class
package org.apache.spark.sql.jdbc
PostgresQuirks.PostgresQuirks ( )
[mangled: org/apache/spark/sql/jdbc/PostgresQuirks."<init>":()V]
spark-sql_2.10-1.3.0.jar, PreWriteCheck.class
package org.apache.spark.sql.sources
PreWriteCheck.andThen ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcID.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcID.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcIF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcIF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcII.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcII.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcIJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcIJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVD.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVF.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVI.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVJ.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.apply ( Object v1 ) : Object
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply:(Ljava/lang/Object;)Ljava/lang/Object;]
PreWriteCheck.apply ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
PreWriteCheck.apply.mcDD.sp ( double v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDD.sp:(D)D]
PreWriteCheck.apply.mcDF.sp ( float v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDF.sp:(F)D]
PreWriteCheck.apply.mcDI.sp ( int v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDI.sp:(I)D]
PreWriteCheck.apply.mcDJ.sp ( long v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDJ.sp:(J)D]
PreWriteCheck.apply.mcFD.sp ( double v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFD.sp:(D)F]
PreWriteCheck.apply.mcFF.sp ( float v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFF.sp:(F)F]
PreWriteCheck.apply.mcFI.sp ( int v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFI.sp:(I)F]
PreWriteCheck.apply.mcFJ.sp ( long v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFJ.sp:(J)F]
PreWriteCheck.apply.mcID.sp ( double v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcID.sp:(D)I]
PreWriteCheck.apply.mcIF.sp ( float v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcIF.sp:(F)I]
PreWriteCheck.apply.mcII.sp ( int v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcII.sp:(I)I]
PreWriteCheck.apply.mcIJ.sp ( long v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcIJ.sp:(J)I]
PreWriteCheck.apply.mcJD.sp ( double v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJD.sp:(D)J]
PreWriteCheck.apply.mcJF.sp ( float v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJF.sp:(F)J]
PreWriteCheck.apply.mcJI.sp ( int v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJI.sp:(I)J]
PreWriteCheck.apply.mcJJ.sp ( long v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJJ.sp:(J)J]
PreWriteCheck.apply.mcVD.sp ( double v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVD.sp:(D)V]
PreWriteCheck.apply.mcVF.sp ( float v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVF.sp:(F)V]
PreWriteCheck.apply.mcVI.sp ( int v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVI.sp:(I)V]
PreWriteCheck.apply.mcVJ.sp ( long v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVJ.sp:(J)V]
PreWriteCheck.apply.mcZD.sp ( double v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZD.sp:(D)Z]
PreWriteCheck.apply.mcZF.sp ( float v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZF.sp:(F)Z]
PreWriteCheck.apply.mcZI.sp ( int v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZI.sp:(I)Z]
PreWriteCheck.apply.mcZJ.sp ( long v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZJ.sp:(J)Z]
PreWriteCheck.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.canEqual:(Ljava/lang/Object;)Z]
PreWriteCheck.catalog ( ) : org.apache.spark.sql.catalyst.analysis.Catalog
[mangled: org/apache/spark/sql/sources/PreWriteCheck.catalog:()Lorg/apache/spark/sql/catalyst/analysis/Catalog;]
PreWriteCheck.compose ( scala.Function1<A,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcID.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcID.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcIF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcIF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcII.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcII.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcIJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcIJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.copy ( org.apache.spark.sql.catalyst.analysis.Catalog catalog ) : PreWriteCheck
[mangled: org/apache/spark/sql/sources/PreWriteCheck.copy:(Lorg/apache/spark/sql/catalyst/analysis/Catalog;)Lorg/apache/spark/sql/sources/PreWriteCheck;]
PreWriteCheck.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.equals:(Ljava/lang/Object;)Z]
PreWriteCheck.failAnalysis ( String msg ) : scala.runtime.Nothing.
[mangled: org/apache/spark/sql/sources/PreWriteCheck.failAnalysis:(Ljava/lang/String;)Lscala/runtime/Nothing$;]
PreWriteCheck.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.hashCode:()I]
PreWriteCheck.PreWriteCheck ( org.apache.spark.sql.catalyst.analysis.Catalog catalog )
[mangled: org/apache/spark/sql/sources/PreWriteCheck."<init>":(Lorg/apache/spark/sql/catalyst/analysis/Catalog;)V]
PreWriteCheck.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productArity:()I]
PreWriteCheck.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productElement:(I)Ljava/lang/Object;]
PreWriteCheck.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productIterator:()Lscala/collection/Iterator;]
PreWriteCheck.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productPrefix:()Ljava/lang/String;]
PreWriteCheck.toString ( ) : String
[mangled: org/apache/spark/sql/sources/PreWriteCheck.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, PrunedFilteredScan.class
package org.apache.spark.sql.sources
PrunedFilteredScan.buildScan ( String[ ] p1, Filter[ ] p2 ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/PrunedFilteredScan.buildScan:([Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/rdd/RDD;]
spark-sql_2.10-1.3.0.jar, PythonUDF.class
package org.apache.spark.sql.execution
PythonUDF.broadcastVars ( ) : java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>
[mangled: org/apache/spark/sql/execution/PythonUDF.broadcastVars:()Ljava/util/List;]
PythonUDF.copy ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, org.apache.spark.sql.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children ) : PythonUDF
[mangled: org/apache/spark/sql/execution/PythonUDF.copy:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/PythonUDF;]
PythonUDF.dataType ( ) : org.apache.spark.sql.types.DataType
[mangled: org/apache/spark/sql/execution/PythonUDF.dataType:()Lorg/apache/spark/sql/types/DataType;]
PythonUDF.eval ( org.apache.spark.sql.Row input ) : Object
[mangled: org/apache/spark/sql/execution/PythonUDF.eval:(Lorg/apache/spark/sql/Row;)Ljava/lang/Object;]
PythonUDF.eval ( org.apache.spark.sql.Row input ) : scala.runtime.Nothing.
[mangled: org/apache/spark/sql/execution/PythonUDF.eval:(Lorg/apache/spark/sql/Row;)Lscala/runtime/Nothing$;]
PythonUDF.PythonUDF ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, org.apache.spark.sql.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children )
[mangled: org/apache/spark/sql/execution/PythonUDF."<init>":(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;Lscala/collection/Seq;)V]
spark-sql_2.10-1.3.0.jar, RefreshTable.class
package org.apache.spark.sql.sources
RefreshTable.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/RefreshTable.canEqual:(Ljava/lang/Object;)Z]
RefreshTable.copy ( String databaseName, String tableName ) : RefreshTable
[mangled: org/apache/spark/sql/sources/RefreshTable.copy:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/sources/RefreshTable;]
RefreshTable.curried ( ) [static] : scala.Function1<String,scala.Function1<String,RefreshTable>>
[mangled: org/apache/spark/sql/sources/RefreshTable.curried:()Lscala/Function1;]
RefreshTable.databaseName ( ) : String
[mangled: org/apache/spark/sql/sources/RefreshTable.databaseName:()Ljava/lang/String;]
RefreshTable.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/RefreshTable.equals:(Ljava/lang/Object;)Z]
RefreshTable.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/RefreshTable.hashCode:()I]
RefreshTable.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/RefreshTable.productArity:()I]
RefreshTable.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/RefreshTable.productElement:(I)Ljava/lang/Object;]
RefreshTable.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/RefreshTable.productIterator:()Lscala/collection/Iterator;]
RefreshTable.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/RefreshTable.productPrefix:()Ljava/lang/String;]
RefreshTable.RefreshTable ( String databaseName, String tableName )
[mangled: org/apache/spark/sql/sources/RefreshTable."<init>":(Ljava/lang/String;Ljava/lang/String;)V]
RefreshTable.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/RefreshTable.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
RefreshTable.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/RefreshTable.tableName:()Ljava/lang/String;]
RefreshTable.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,String>,RefreshTable>
[mangled: org/apache/spark/sql/sources/RefreshTable.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, RelationProvider.class
package org.apache.spark.sql.sources
RelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, scala.collection.immutable.Map<String,String> p2 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/RelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.3.0.jar, ResolvedDataSource.class
package org.apache.spark.sql.sources
ResolvedDataSource.apply ( org.apache.spark.sql.SQLContext p1, scala.Option<org.apache.spark.sql.types.StructType> p2, String p3, scala.collection.immutable.Map<String,String> p4 ) [static] : ResolvedDataSource
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.apply:(Lorg/apache/spark/sql/SQLContext;Lscala/Option;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/ResolvedDataSource;]
ResolvedDataSource.apply ( org.apache.spark.sql.SQLContext p1, String p2, org.apache.spark.sql.SaveMode p3, scala.collection.immutable.Map<String,String> p4, org.apache.spark.sql.DataFrame p5 ) [static] : ResolvedDataSource
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.apply:(Lorg/apache/spark/sql/SQLContext;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/sources/ResolvedDataSource;]
ResolvedDataSource.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.canEqual:(Ljava/lang/Object;)Z]
ResolvedDataSource.copy ( Class<?> provider, BaseRelation relation ) : ResolvedDataSource
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.copy:(Ljava/lang/Class;Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/sources/ResolvedDataSource;]
ResolvedDataSource.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.equals:(Ljava/lang/Object;)Z]
ResolvedDataSource.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.hashCode:()I]
ResolvedDataSource.lookupDataSource ( String p1 ) [static] : Class<?>
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.lookupDataSource:(Ljava/lang/String;)Ljava/lang/Class;]
ResolvedDataSource.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productArity:()I]
ResolvedDataSource.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productElement:(I)Ljava/lang/Object;]
ResolvedDataSource.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productIterator:()Lscala/collection/Iterator;]
ResolvedDataSource.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productPrefix:()Ljava/lang/String;]
ResolvedDataSource.provider ( ) : Class<?>
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.provider:()Ljava/lang/Class;]
ResolvedDataSource.relation ( ) : BaseRelation
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.relation:()Lorg/apache/spark/sql/sources/BaseRelation;]
ResolvedDataSource.ResolvedDataSource ( Class<?> provider, BaseRelation relation )
[mangled: org/apache/spark/sql/sources/ResolvedDataSource."<init>":(Ljava/lang/Class;Lorg/apache/spark/sql/sources/BaseRelation;)V]
ResolvedDataSource.toString ( ) : String
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, RowWriteSupport.class
package org.apache.spark.sql.parquet
RowWriteSupport.attributes ( ) : org.apache.spark.sql.catalyst.expressions.Attribute[ ]
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.attributes:()[Lorg/apache/spark/sql/catalyst/expressions/Attribute;]
RowWriteSupport.attributes_.eq ( org.apache.spark.sql.catalyst.expressions.Attribute[ ] p1 ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.attributes_.eq:([Lorg/apache/spark/sql/catalyst/expressions/Attribute;)V]
RowWriteSupport.write ( org.apache.spark.sql.Row record ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.write:(Lorg/apache/spark/sql/Row;)V]
RowWriteSupport.writeArray ( org.apache.spark.sql.types.ArrayType schema, scala.collection.Seq<Object> array ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeArray:(Lorg/apache/spark/sql/types/ArrayType;Lscala/collection/Seq;)V]
RowWriteSupport.writeDecimal ( org.apache.spark.sql.types.Decimal decimal, int precision ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeDecimal:(Lorg/apache/spark/sql/types/Decimal;I)V]
RowWriteSupport.writeMap ( org.apache.spark.sql.types.MapType schema, scala.collection.immutable.Map<?,Object> map ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeMap:(Lorg/apache/spark/sql/types/MapType;Lscala/collection/immutable/Map;)V]
RowWriteSupport.writePrimitive ( org.apache.spark.sql.types.DataType schema, Object value ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writePrimitive:(Lorg/apache/spark/sql/types/DataType;Ljava/lang/Object;)V]
RowWriteSupport.writeStruct ( org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.Row struct ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeStruct:(Lorg/apache/spark/sql/types/StructType;Lorg/apache/spark/sql/Row;)V]
RowWriteSupport.writeTimestamp ( java.sql.Timestamp ts ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeTimestamp:(Ljava/sql/Timestamp;)V]
RowWriteSupport.writeValue ( org.apache.spark.sql.types.DataType schema, Object value ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeValue:(Lorg/apache/spark/sql/types/DataType;Ljava/lang/Object;)V]
spark-sql_2.10-1.3.0.jar, RunnableCommand.class
package org.apache.spark.sql.execution
RunnableCommand.run ( org.apache.spark.sql.SQLContext p1 ) [abstract] : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/RunnableCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, SaveMode.class
package org.apache.spark.sql
SaveMode.valueOf ( String name ) [static] : SaveMode
[mangled: org/apache/spark/sql/SaveMode.valueOf:(Ljava/lang/String;)Lorg/apache/spark/sql/SaveMode;]
SaveMode.values ( ) [static] : SaveMode[ ]
[mangled: org/apache/spark/sql/SaveMode.values:()[Lorg/apache/spark/sql/SaveMode;]
spark-sql_2.10-1.3.0.jar, ScalaBigDecimalSerializer.class
package org.apache.spark.sql.execution
ScalaBigDecimalSerializer.ScalaBigDecimalSerializer ( )
[mangled: org/apache/spark/sql/execution/ScalaBigDecimalSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, SchemaRelationProvider.class
package org.apache.spark.sql.sources
SchemaRelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, scala.collection.immutable.Map<String,String> p2, org.apache.spark.sql.types.StructType p3 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/SchemaRelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.3.0.jar, SetCommand.class
package org.apache.spark.sql.execution
SetCommand.copy ( scala.Option<scala.Tuple2<String,scala.Option<String>>> kv, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output ) : SetCommand
[mangled: org/apache/spark/sql/execution/SetCommand.copy:(Lscala/Option;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/SetCommand;]
SetCommand.curried ( ) [static] : scala.Function1<scala.Option<scala.Tuple2<String,scala.Option<String>>>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,SetCommand>>
[mangled: org/apache/spark/sql/execution/SetCommand.curried:()Lscala/Function1;]
SetCommand.kv ( ) : scala.Option<scala.Tuple2<String,scala.Option<String>>>
[mangled: org/apache/spark/sql/execution/SetCommand.kv:()Lscala/Option;]
SetCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/SetCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
SetCommand.SetCommand ( scala.Option<scala.Tuple2<String,scala.Option<String>>> kv, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output )
[mangled: org/apache/spark/sql/execution/SetCommand."<init>":(Lscala/Option;Lscala/collection/Seq;)V]
SetCommand.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.Option<scala.Tuple2<String,scala.Option<String>>>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>>,SetCommand>
[mangled: org/apache/spark/sql/execution/SetCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ShowTablesCommand.class
package org.apache.spark.sql.execution
ShowTablesCommand.andThen ( scala.Function1<ShowTablesCommand,A> p1 ) [static] : scala.Function1<scala.Option<String>,A>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.andThen:(Lscala/Function1;)Lscala/Function1;]
ShowTablesCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.canEqual:(Ljava/lang/Object;)Z]
ShowTablesCommand.compose ( scala.Function1<A,scala.Option<String>> p1 ) [static] : scala.Function1<A,ShowTablesCommand>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.compose:(Lscala/Function1;)Lscala/Function1;]
ShowTablesCommand.copy ( scala.Option<String> databaseName ) : ShowTablesCommand
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.copy:(Lscala/Option;)Lorg/apache/spark/sql/execution/ShowTablesCommand;]
ShowTablesCommand.databaseName ( ) : scala.Option<String>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.databaseName:()Lscala/Option;]
ShowTablesCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.equals:(Ljava/lang/Object;)Z]
ShowTablesCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.hashCode:()I]
ShowTablesCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.output:()Lscala/collection/Seq;]
ShowTablesCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productArity:()I]
ShowTablesCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productElement:(I)Ljava/lang/Object;]
ShowTablesCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productIterator:()Lscala/collection/Iterator;]
ShowTablesCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productPrefix:()Ljava/lang/String;]
ShowTablesCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
ShowTablesCommand.ShowTablesCommand ( scala.Option<String> databaseName )
[mangled: org/apache/spark/sql/execution/ShowTablesCommand."<init>":(Lscala/Option;)V]
spark-sql_2.10-1.3.0.jar, ShuffledHashJoin.class
package org.apache.spark.sql.execution.joins
ShuffledHashJoin.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
ShuffledHashJoin.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
ShuffledHashJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.canEqual:(Ljava/lang/Object;)Z]
ShuffledHashJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.children:()Lscala/collection/Seq;]
ShuffledHashJoin.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : ShuffledHashJoin
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/ShuffledHashJoin;]
ShuffledHashJoin.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<package.BuildSide,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,ShuffledHashJoin>>>>>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.curried:()Lscala/Function1;]
ShuffledHashJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.equals:(Ljava/lang/Object;)Z]
ShuffledHashJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.execute:()Lorg/apache/spark/rdd/RDD;]
ShuffledHashJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.hashCode:()I]
ShuffledHashJoin.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
ShuffledHashJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
ShuffledHashJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.leftKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.output:()Lscala/collection/Seq;]
ShuffledHashJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
ShuffledHashJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productArity:()I]
ShuffledHashJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productElement:(I)Ljava/lang/Object;]
ShuffledHashJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productIterator:()Lscala/collection/Iterator;]
ShuffledHashJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productPrefix:()Ljava/lang/String;]
ShuffledHashJoin.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.requiredChildDistribution:()Lscala/collection/immutable/List;]
ShuffledHashJoin.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.requiredChildDistribution:()Lscala/collection/Seq;]
ShuffledHashJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
ShuffledHashJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.rightKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.ShuffledHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
ShuffledHashJoin.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.streamedKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.streamSideKeyGenerator:()Lscala/Function0;]
ShuffledHashJoin.tupled ( ) [static] : scala.Function1<scala.Tuple5<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,package.BuildSide,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,ShuffledHashJoin>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, SparkPlan.class
package org.apache.spark.sql.execution
SparkPlan.executeCollect ( ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/SparkPlan.executeCollect:()[Lorg/apache/spark/sql/Row;]
SparkPlan.executeTake ( int n ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/SparkPlan.executeTake:(I)[Lorg/apache/spark/sql/Row;]
spark-sql_2.10-1.3.0.jar, SparkSQLParser.class
package org.apache.spark.sql
SparkSQLParser.AS ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.AS:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.CACHE ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.CACHE:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.CLEAR ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.CLEAR:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.IN ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.IN:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.LAZY ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.LAZY:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.SparkSQLParser..others ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..others:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SparkSQLParser..set ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..set:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SparkSQLParser..SetCommandParser ( ) : SparkSQLParser.SetCommandParser.
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..SetCommandParser:()Lorg/apache/spark/sql/SparkSQLParser$SetCommandParser$;]
SparkSQLParser.SparkSQLParser..show ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..show:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SparkSQLParser..uncache ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..uncache:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SET ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.SET:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.SHOW ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.SHOW:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.SparkSQLParser ( scala.Function1<String,catalyst.plans.logical.LogicalPlan> fallback )
[mangled: org/apache/spark/sql/SparkSQLParser."<init>":(Lscala/Function1;)V]
SparkSQLParser.start ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.start:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.TABLE ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.TABLE:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.TABLES ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.TABLES:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.UNCACHE ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.UNCACHE:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
spark-sql_2.10-1.3.0.jar, SparkStrategies.class
package org.apache.spark.sql.execution
SparkStrategies.DDLStrategy ( ) : SparkStrategies.DDLStrategy.
[mangled: org/apache/spark/sql/execution/SparkStrategies.DDLStrategy:()Lorg/apache/spark/sql/execution/SparkStrategies$DDLStrategy$;]
spark-sql_2.10-1.3.0.jar, SQLConf.class
package org.apache.spark.sql
SQLConf.broadcastTimeout ( ) : int
[mangled: org/apache/spark/sql/SQLConf.broadcastTimeout:()I]
SQLConf.columnNameOfCorruptRecord ( ) : String
[mangled: org/apache/spark/sql/SQLConf.columnNameOfCorruptRecord:()Ljava/lang/String;]
SQLConf.dataFrameEagerAnalysis ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.dataFrameEagerAnalysis:()Z]
SQLConf.defaultDataSourceName ( ) : String
[mangled: org/apache/spark/sql/SQLConf.defaultDataSourceName:()Ljava/lang/String;]
SQLConf.externalSortEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.externalSortEnabled:()Z]
SQLConf.inMemoryPartitionPruning ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.inMemoryPartitionPruning:()Z]
SQLConf.isParquetINT96AsTimestamp ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.isParquetINT96AsTimestamp:()Z]
SQLConf.parquetFilterPushDown ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.parquetFilterPushDown:()Z]
SQLConf.parquetUseDataSourceApi ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.parquetUseDataSourceApi:()Z]
SQLConf.SQLConf ( )
[mangled: org/apache/spark/sql/SQLConf."<init>":()V]
spark-sql_2.10-1.3.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.baseRelationToDataFrame ( sources.BaseRelation baseRelation ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.baseRelationToDataFrame:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.cacheManager ( ) : CacheManager
[mangled: org/apache/spark/sql/SQLContext.cacheManager:()Lorg/apache/spark/sql/CacheManager;]
SQLContext.checkAnalysis ( ) : catalyst.analysis.CheckAnalysis
[mangled: org/apache/spark/sql/SQLContext.checkAnalysis:()Lorg/apache/spark/sql/catalyst/analysis/CheckAnalysis;]
SQLContext.clearCache ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clearCache:()V]
SQLContext.conf ( ) : SQLConf
[mangled: org/apache/spark/sql/SQLContext.conf:()Lorg/apache/spark/sql/SQLConf;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<?> rdd, Class<?> beanClass ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/lang/Class;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, java.util.List<String> columns ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/util/List;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<?> rdd, Class<?> beanClass ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Ljava/lang/Class;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<A> rdd, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<Row> rowRDD, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( scala.collection.Seq<A> data, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lscala/collection/Seq;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String path, String source ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, types.StructType schema, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, types.StructType schema, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.ddlParser ( ) : sources.DDLParser
[mangled: org/apache/spark/sql/SQLContext.ddlParser:()Lorg/apache/spark/sql/sources/DDLParser;]
SQLContext.dropTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.dropTempTable:(Ljava/lang/String;)V]
SQLContext.emptyDataFrame ( ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.emptyDataFrame:()Lorg/apache/spark/sql/DataFrame;]
SQLContext.experimental ( ) : ExperimentalMethods
[mangled: org/apache/spark/sql/SQLContext.experimental:()Lorg/apache/spark/sql/ExperimentalMethods;]
SQLContext.getSchema ( Class<?> beanClass ) : scala.collection.Seq<catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/SQLContext.getSchema:(Ljava/lang/Class;)Lscala/collection/Seq;]
SQLContext.implicits ( ) : SQLContext.implicits.
[mangled: org/apache/spark/sql/SQLContext.implicits:()Lorg/apache/spark/sql/SQLContext$implicits$;]
SQLContext.jdbc ( String url, String table ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jdbc ( String url, String table, String columnName, long lowerBound, long upperBound, int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;JJI)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jdbc ( String url, String table, String[ ] theParts ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String path, String source ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, types.StructType schema, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, types.StructType schema, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.optimizer ( ) : catalyst.optimizer.Optimizer
[mangled: org/apache/spark/sql/SQLContext.optimizer:()Lorg/apache/spark/sql/catalyst/optimizer/Optimizer;]
SQLContext.parquetFile ( scala.collection.Seq<String> paths ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.parquetFile:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parquetFile ( String... paths ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.parquetFile:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parseDataType ( String dataTypeString ) : types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/types/DataType;]
SQLContext.registerDataFrameAsTable ( DataFrame df, String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.registerDataFrameAsTable:(Lorg/apache/spark/sql/DataFrame;Ljava/lang/String;)V]
SQLContext.sql ( String sqlText ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.SQLContext ( org.apache.spark.api.java.JavaSparkContext sparkContext )
[mangled: org/apache/spark/sql/SQLContext."<init>":(Lorg/apache/spark/api/java/JavaSparkContext;)V]
SQLContext.sqlParser ( ) : SparkSQLParser
[mangled: org/apache/spark/sql/SQLContext.sqlParser:()Lorg/apache/spark/sql/SparkSQLParser;]
SQLContext.table ( String tableName ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.table:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.tableNames ( ) : String[ ]
[mangled: org/apache/spark/sql/SQLContext.tableNames:()[Ljava/lang/String;]
SQLContext.tableNames ( String databaseName ) : String[ ]
[mangled: org/apache/spark/sql/SQLContext.tableNames:(Ljava/lang/String;)[Ljava/lang/String;]
SQLContext.tables ( ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.tables:()Lorg/apache/spark/sql/DataFrame;]
SQLContext.tables ( String databaseName ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.tables:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.udf ( ) : UDFRegistration
[mangled: org/apache/spark/sql/SQLContext.udf:()Lorg/apache/spark/sql/UDFRegistration;]
spark-sql_2.10-1.3.0.jar, TableScan.class
package org.apache.spark.sql.sources
TableScan.buildScan ( ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/TableScan.buildScan:()Lorg/apache/spark/rdd/RDD;]
spark-sql_2.10-1.3.0.jar, TakeOrdered.class
package org.apache.spark.sql.execution
TakeOrdered.ord ( ) : org.apache.spark.sql.catalyst.expressions.RowOrdering
[mangled: org/apache/spark/sql/execution/TakeOrdered.ord:()Lorg/apache/spark/sql/catalyst/expressions/RowOrdering;]
spark-sql_2.10-1.3.0.jar, UDFRegistration.class
package org.apache.spark.sql
UDFRegistration.UDFRegistration ( SQLContext sqlContext )
[mangled: org/apache/spark/sql/UDFRegistration."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
spark-sql_2.10-1.3.0.jar, UncacheTableCommand.class
package org.apache.spark.sql.execution
UncacheTableCommand.andThen ( scala.Function1<UncacheTableCommand,A> p1 ) [static] : scala.Function1<String,A>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.andThen:(Lscala/Function1;)Lscala/Function1;]
UncacheTableCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.canEqual:(Ljava/lang/Object;)Z]
UncacheTableCommand.compose ( scala.Function1<A,String> p1 ) [static] : scala.Function1<A,UncacheTableCommand>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.compose:(Lscala/Function1;)Lscala/Function1;]
UncacheTableCommand.copy ( String tableName ) : UncacheTableCommand
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/execution/UncacheTableCommand;]
UncacheTableCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.equals:(Ljava/lang/Object;)Z]
UncacheTableCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.hashCode:()I]
UncacheTableCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.output:()Lscala/collection/Seq;]
UncacheTableCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productArity:()I]
UncacheTableCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productElement:(I)Ljava/lang/Object;]
UncacheTableCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productIterator:()Lscala/collection/Iterator;]
UncacheTableCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productPrefix:()Ljava/lang/String;]
UncacheTableCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
UncacheTableCommand.tableName ( ) : String
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.tableName:()Ljava/lang/String;]
UncacheTableCommand.UncacheTableCommand ( String tableName )
[mangled: org/apache/spark/sql/execution/UncacheTableCommand."<init>":(Ljava/lang/String;)V]
spark-sql_2.10-1.3.0.jar, UniqueKeyHashedRelation.class
package org.apache.spark.sql.execution.joins
UniqueKeyHashedRelation.UniqueKeyHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.sql.Row> hashTable )
[mangled: org/apache/spark/sql/execution/joins/UniqueKeyHashedRelation."<init>":(Ljava/util/HashMap;)V]
spark-sql_2.10-1.3.0.jar, UserDefinedFunction.class
package org.apache.spark.sql
UserDefinedFunction.apply ( scala.collection.Seq<Column> exprs ) : Column
[mangled: org/apache/spark/sql/UserDefinedFunction.apply:(Lscala/collection/Seq;)Lorg/apache/spark/sql/Column;]
UserDefinedFunction.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedFunction.canEqual:(Ljava/lang/Object;)Z]
UserDefinedFunction.copy ( Object f, types.DataType dataType ) : UserDefinedFunction
[mangled: org/apache/spark/sql/UserDefinedFunction.copy:(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/UserDefinedFunction;]
UserDefinedFunction.curried ( ) [static] : scala.Function1<Object,scala.Function1<types.DataType,UserDefinedFunction>>
[mangled: org/apache/spark/sql/UserDefinedFunction.curried:()Lscala/Function1;]
UserDefinedFunction.dataType ( ) : types.DataType
[mangled: org/apache/spark/sql/UserDefinedFunction.dataType:()Lorg/apache/spark/sql/types/DataType;]
UserDefinedFunction.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedFunction.equals:(Ljava/lang/Object;)Z]
UserDefinedFunction.f ( ) : Object
[mangled: org/apache/spark/sql/UserDefinedFunction.f:()Ljava/lang/Object;]
UserDefinedFunction.hashCode ( ) : int
[mangled: org/apache/spark/sql/UserDefinedFunction.hashCode:()I]
UserDefinedFunction.productArity ( ) : int
[mangled: org/apache/spark/sql/UserDefinedFunction.productArity:()I]
UserDefinedFunction.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/UserDefinedFunction.productElement:(I)Ljava/lang/Object;]
UserDefinedFunction.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/UserDefinedFunction.productIterator:()Lscala/collection/Iterator;]
UserDefinedFunction.productPrefix ( ) : String
[mangled: org/apache/spark/sql/UserDefinedFunction.productPrefix:()Ljava/lang/String;]
UserDefinedFunction.toString ( ) : String
[mangled: org/apache/spark/sql/UserDefinedFunction.toString:()Ljava/lang/String;]
UserDefinedFunction.tupled ( ) [static] : scala.Function1<scala.Tuple2<Object,types.DataType>,UserDefinedFunction>
[mangled: org/apache/spark/sql/UserDefinedFunction.tupled:()Lscala/Function1;]
UserDefinedFunction.UserDefinedFunction ( Object f, types.DataType dataType )
[mangled: org/apache/spark/sql/UserDefinedFunction."<init>":(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)V]
spark-sql_2.10-1.3.0.jar, UserDefinedPythonFunction.class
package org.apache.spark.sql
UserDefinedPythonFunction.accumulator ( ) : org.apache.spark.Accumulator<java.util.List<byte[ ]>>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.accumulator:()Lorg/apache/spark/Accumulator;]
UserDefinedPythonFunction.apply ( scala.collection.Seq<Column> exprs ) : Column
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.apply:(Lscala/collection/Seq;)Lorg/apache/spark/sql/Column;]
UserDefinedPythonFunction.broadcastVars ( ) : java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.broadcastVars:()Ljava/util/List;]
UserDefinedPythonFunction.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.canEqual:(Ljava/lang/Object;)Z]
UserDefinedPythonFunction.command ( ) : byte[ ]
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.command:()[B]
UserDefinedPythonFunction.copy ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, types.DataType dataType ) : UserDefinedPythonFunction
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.copy:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/UserDefinedPythonFunction;]
UserDefinedPythonFunction.curried ( ) [static] : scala.Function1<String,scala.Function1<byte[ ],scala.Function1<java.util.Map<String,String>,scala.Function1<java.util.List<String>,scala.Function1<String,scala.Function1<java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>,scala.Function1<org.apache.spark.Accumulator<java.util.List<byte[ ]>>,scala.Function1<types.DataType,UserDefinedPythonFunction>>>>>>>>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.curried:()Lscala/Function1;]
UserDefinedPythonFunction.dataType ( ) : types.DataType
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.dataType:()Lorg/apache/spark/sql/types/DataType;]
UserDefinedPythonFunction.envVars ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.envVars:()Ljava/util/Map;]
UserDefinedPythonFunction.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.equals:(Ljava/lang/Object;)Z]
UserDefinedPythonFunction.hashCode ( ) : int
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.hashCode:()I]
UserDefinedPythonFunction.name ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.name:()Ljava/lang/String;]
UserDefinedPythonFunction.productArity ( ) : int
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productArity:()I]
UserDefinedPythonFunction.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productElement:(I)Ljava/lang/Object;]
UserDefinedPythonFunction.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productIterator:()Lscala/collection/Iterator;]
UserDefinedPythonFunction.productPrefix ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productPrefix:()Ljava/lang/String;]
UserDefinedPythonFunction.pythonExec ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.pythonExec:()Ljava/lang/String;]
UserDefinedPythonFunction.pythonIncludes ( ) : java.util.List<String>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.pythonIncludes:()Ljava/util/List;]
UserDefinedPythonFunction.toString ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.toString:()Ljava/lang/String;]
UserDefinedPythonFunction.tupled ( ) [static] : scala.Function1<scala.Tuple8<String,byte[ ],java.util.Map<String,String>,java.util.List<String>,String,java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>,org.apache.spark.Accumulator<java.util.List<byte[ ]>>,types.DataType>,UserDefinedPythonFunction>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.tupled:()Lscala/Function1;]
UserDefinedPythonFunction.UserDefinedPythonFunction ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, types.DataType dataType )
[mangled: org/apache/spark/sql/UserDefinedPythonFunction."<init>":(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;)V]
to the top
Problems with Data Types, High Severity (119)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql
[+] CachedData (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
CachedData ( catalyst.plans.logical.LogicalPlan, columnar.InMemoryRelation )This constructor is from 'CachedData' class.
cachedRepresentation ( )This method is from 'CachedData' class.
canEqual ( java.lang.Object )This method is from 'CachedData' class.
copy ( catalyst.plans.logical.LogicalPlan, columnar.InMemoryRelation )This method is from 'CachedData' class.
curried ( )This method is from 'CachedData' class.
equals ( java.lang.Object )This method is from 'CachedData' class.
hashCode ( )This method is from 'CachedData' class.
plan ( )This method is from 'CachedData' class.
productArity ( )This method is from 'CachedData' class.
productElement ( int )This method is from 'CachedData' class.
productIterator ( )This method is from 'CachedData' class.
productPrefix ( )This method is from 'CachedData' class.
toString ( )This method is from 'CachedData' class.
tupled ( )This method is from 'CachedData' class.
[+] CacheManager (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (9)
CacheManager ( SQLContext )This constructor is from 'CacheManager' class.
cacheQuery ( DataFrame, scala.Option<java.lang.String>, org.apache.spark.storage.StorageLevel )This method is from 'CacheManager' class.
cacheTable ( java.lang.String )This method is from 'CacheManager' class.
clearCache ( )This method is from 'CacheManager' class.
invalidateCache ( catalyst.plans.logical.LogicalPlan )This method is from 'CacheManager' class.
isCached ( java.lang.String )This method is from 'CacheManager' class.
tryUncacheQuery ( DataFrame, boolean )This method is from 'CacheManager' class.
uncacheTable ( java.lang.String )This method is from 'CacheManager' class.
useCachedData ( catalyst.plans.logical.LogicalPlan )This method is from 'CacheManager' class.
[+] Column (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (45)
and ( Column )This method is from 'Column' class.
apply ( java.lang.String )This method is from 'Column' class.
apply ( catalyst.expressions.Expression )This method is from 'Column' class.
as ( java.lang.String )This method is from 'Column' class.
as ( scala.Symbol )This method is from 'Column' class.
asc ( )This method is from 'Column' class.
cast ( java.lang.String )This method is from 'Column' class.
cast ( types.DataType )This method is from 'Column' class.
Column ( java.lang.String )This constructor is from 'Column' class.
Column ( catalyst.expressions.Expression )This constructor is from 'Column' class.
contains ( java.lang.Object )This method is from 'Column' class.
desc ( )This method is from 'Column' class.
divide ( java.lang.Object )This method is from 'Column' class.
endsWith ( java.lang.String )This method is from 'Column' class.
endsWith ( Column )This method is from 'Column' class.
eqNullSafe ( java.lang.Object )This method is from 'Column' class.
equalTo ( java.lang.Object )This method is from 'Column' class.
explain ( boolean )This method is from 'Column' class.
expr ( )This method is from 'Column' class.
geq ( java.lang.Object )This method is from 'Column' class.
getField ( java.lang.String )This method is from 'Column' class.
getItem ( int )This method is from 'Column' class.
gt ( java.lang.Object )This method is from 'Column' class.
in ( Column... )This method is from 'Column' class.
in ( scala.collection.Seq<Column> )This method is from 'Column' class.
isNotNull ( )This method is from 'Column' class.
isNull ( )This method is from 'Column' class.
leq ( java.lang.Object )This method is from 'Column' class.
like ( java.lang.String )This method is from 'Column' class.
lt ( java.lang.Object )This method is from 'Column' class.
minus ( java.lang.Object )This method is from 'Column' class.
mod ( java.lang.Object )This method is from 'Column' class.
multiply ( java.lang.Object )This method is from 'Column' class.
notEqual ( java.lang.Object )This method is from 'Column' class.
or ( Column )This method is from 'Column' class.
plus ( java.lang.Object )This method is from 'Column' class.
rlike ( java.lang.String )This method is from 'Column' class.
startsWith ( java.lang.String )This method is from 'Column' class.
startsWith ( Column )This method is from 'Column' class.
substr ( int, int )This method is from 'Column' class.
substr ( Column, Column )This method is from 'Column' class.
toString ( )This method is from 'Column' class.
unapply ( Column )This method is from 'Column' class.
unary_.bang ( )This method is from 'Column' class.
unary_.minus ( )This method is from 'Column' class.
[+] ColumnName (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
ColumnName ( java.lang.String )This constructor is from 'ColumnName' class.
[+] DataFrame (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (114)
agg ( java.util.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
agg ( Column, Column... )This method is from 'DataFrame' class.
agg ( Column, scala.collection.Seq<Column> )This method is from 'DataFrame' class.
agg ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
agg ( scala.Tuple2<java.lang.String,java.lang.String>, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> )This method is from 'DataFrame' class.
apply ( java.lang.String )This method is from 'DataFrame' class.
as ( java.lang.String )This method is from 'DataFrame' class.
as ( scala.Symbol )This method is from 'DataFrame' class.
cache ( )This method is from 'DataFrame' class.
cache ( )This method is from 'DataFrame' class.
col ( java.lang.String )This method is from 'DataFrame' class.
collect ( )This method is from 'DataFrame' class.
collect ( )This method is from 'DataFrame' class.
collectAsList ( )This method is from 'DataFrame' class.
columns ( )This method is from 'DataFrame' class.
count ( )This method is from 'DataFrame' class.
createJDBCTable ( java.lang.String, java.lang.String, boolean )This method is from 'DataFrame' class.
DataFrame ( SQLContext, catalyst.plans.logical.LogicalPlan )This constructor is from 'DataFrame' class.
DataFrame ( SQLContext, SQLContext.QueryExecution )This constructor is from 'DataFrame' class.
distinct ( )This method is from 'DataFrame' class.
dtypes ( )This method is from 'DataFrame' class.
except ( DataFrame )This method is from 'DataFrame' class.
explain ( )This method is from 'DataFrame' class.
explain ( boolean )This method is from 'DataFrame' class.
explode ( java.lang.String, java.lang.String, scala.Function1<A,scala.collection.TraversableOnce<B>>, scala.reflect.api.TypeTags.TypeTag<B> )This method is from 'DataFrame' class.
explode ( scala.collection.Seq<Column>, scala.Function1<Row,scala.collection.TraversableOnce<A>>, scala.reflect.api.TypeTags.TypeTag<A> )This method is from 'DataFrame' class.
filter ( java.lang.String )This method is from 'DataFrame' class.
filter ( Column )This method is from 'DataFrame' class.
first ( )This method is from 'DataFrame' class.
first ( )This method is from 'DataFrame' class.
flatMap ( scala.Function1<Row,scala.collection.TraversableOnce<R>>, scala.reflect.ClassTag<R> )This method is from 'DataFrame' class.
foreach ( scala.Function1<Row,scala.runtime.BoxedUnit> )This method is from 'DataFrame' class.
foreachPartition ( scala.Function1<scala.collection.Iterator<Row>,scala.runtime.BoxedUnit> )This method is from 'DataFrame' class.
groupBy ( java.lang.String, java.lang.String... )This method is from 'DataFrame' class.
groupBy ( java.lang.String, scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
groupBy ( Column... )This method is from 'DataFrame' class.
groupBy ( scala.collection.Seq<Column> )This method is from 'DataFrame' class.
head ( )This method is from 'DataFrame' class.
head ( int )This method is from 'DataFrame' class.
insertInto ( java.lang.String )This method is from 'DataFrame' class.
insertInto ( java.lang.String, boolean )This method is from 'DataFrame' class.
insertIntoJDBC ( java.lang.String, java.lang.String, boolean )This method is from 'DataFrame' class.
intersect ( DataFrame )This method is from 'DataFrame' class.
isLocal ( )This method is from 'DataFrame' class.
javaRDD ( )This method is from 'DataFrame' class.
javaToPython ( )This method is from 'DataFrame' class.
join ( DataFrame )This method is from 'DataFrame' class.
join ( DataFrame, Column )This method is from 'DataFrame' class.
join ( DataFrame, Column, java.lang.String )This method is from 'DataFrame' class.
limit ( int )This method is from 'DataFrame' class.
logicalPlan ( )This method is from 'DataFrame' class.
map ( scala.Function1<Row,R>, scala.reflect.ClassTag<R> )This method is from 'DataFrame' class.
mapPartitions ( scala.Function1<scala.collection.Iterator<Row>,scala.collection.Iterator<R>>, scala.reflect.ClassTag<R> )This method is from 'DataFrame' class.
numericColumns ( )This method is from 'DataFrame' class.
orderBy ( java.lang.String, java.lang.String... )This method is from 'DataFrame' class.
orderBy ( java.lang.String, scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
orderBy ( Column... )This method is from 'DataFrame' class.
orderBy ( scala.collection.Seq<Column> )This method is from 'DataFrame' class.
persist ( )This method is from 'DataFrame' class.
persist ( )This method is from 'DataFrame' class.
persist ( org.apache.spark.storage.StorageLevel )This method is from 'DataFrame' class.
persist ( org.apache.spark.storage.StorageLevel )This method is from 'DataFrame' class.
printSchema ( )This method is from 'DataFrame' class.
queryExecution ( )This method is from 'DataFrame' class.
rdd ( )This method is from 'DataFrame' class.
registerTempTable ( java.lang.String )This method is from 'DataFrame' class.
repartition ( int )This method is from 'DataFrame' class.
resolve ( java.lang.String )This method is from 'DataFrame' class.
sample ( boolean, double )This method is from 'DataFrame' class.
sample ( boolean, double, long )This method is from 'DataFrame' class.
save ( java.lang.String )This method is from 'DataFrame' class.
save ( java.lang.String, java.lang.String )This method is from 'DataFrame' class.
save ( java.lang.String, java.lang.String, SaveMode )This method is from 'DataFrame' class.
save ( java.lang.String, SaveMode )This method is from 'DataFrame' class.
save ( java.lang.String, SaveMode, java.util.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
save ( java.lang.String, SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
saveAsParquetFile ( java.lang.String )This method is from 'DataFrame' class.
saveAsTable ( java.lang.String )This method is from 'DataFrame' class.
saveAsTable ( java.lang.String, java.lang.String )This method is from 'DataFrame' class.
saveAsTable ( java.lang.String, java.lang.String, SaveMode )This method is from 'DataFrame' class.
saveAsTable ( java.lang.String, java.lang.String, SaveMode, java.util.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
saveAsTable ( java.lang.String, java.lang.String, SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
saveAsTable ( java.lang.String, SaveMode )This method is from 'DataFrame' class.
schema ( )This method is from 'DataFrame' class.
select ( java.lang.String, java.lang.String... )This method is from 'DataFrame' class.
select ( java.lang.String, scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
select ( Column... )This method is from 'DataFrame' class.
select ( scala.collection.Seq<Column> )This method is from 'DataFrame' class.
selectExpr ( java.lang.String... )This method is from 'DataFrame' class.
selectExpr ( scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
show ( )This method is from 'DataFrame' class.
show ( int )This method is from 'DataFrame' class.
showString ( int )This method is from 'DataFrame' class.
sort ( java.lang.String, java.lang.String... )This method is from 'DataFrame' class.
sort ( java.lang.String, scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
sort ( Column... )This method is from 'DataFrame' class.
sort ( scala.collection.Seq<Column> )This method is from 'DataFrame' class.
sqlContext ( )This method is from 'DataFrame' class.
take ( int )This method is from 'DataFrame' class.
take ( int )This method is from 'DataFrame' class.
toDF ( )This method is from 'DataFrame' class.
toDF ( java.lang.String... )This method is from 'DataFrame' class.
toDF ( scala.collection.Seq<java.lang.String> )This method is from 'DataFrame' class.
toJavaRDD ( )This method is from 'DataFrame' class.
toJSON ( )This method is from 'DataFrame' class.
toString ( )This method is from 'DataFrame' class.
unionAll ( DataFrame )This method is from 'DataFrame' class.
unpersist ( )This method is from 'DataFrame' class.
unpersist ( )This method is from 'DataFrame' class.
unpersist ( boolean )This method is from 'DataFrame' class.
unpersist ( boolean )This method is from 'DataFrame' class.
where ( Column )This method is from 'DataFrame' class.
withColumn ( java.lang.String, Column )This method is from 'DataFrame' class.
withColumnRenamed ( java.lang.String, java.lang.String )This method is from 'DataFrame' class.
[+] DataFrameHolder (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (15)
andThen ( scala.Function1<DataFrameHolder,A> )This method is from 'DataFrameHolder' class.
canEqual ( java.lang.Object )This method is from 'DataFrameHolder' class.
compose ( scala.Function1<A,DataFrame> )This method is from 'DataFrameHolder' class.
copy ( DataFrame )This method is from 'DataFrameHolder' class.
DataFrameHolder ( DataFrame )This constructor is from 'DataFrameHolder' class.
df ( )This method is from 'DataFrameHolder' class.
equals ( java.lang.Object )This method is from 'DataFrameHolder' class.
hashCode ( )This method is from 'DataFrameHolder' class.
productArity ( )This method is from 'DataFrameHolder' class.
productElement ( int )This method is from 'DataFrameHolder' class.
productIterator ( )This method is from 'DataFrameHolder' class.
productPrefix ( )This method is from 'DataFrameHolder' class.
toDF ( )This method is from 'DataFrameHolder' class.
toDF ( scala.collection.Seq<java.lang.String> )This method is from 'DataFrameHolder' class.
toString ( )This method is from 'DataFrameHolder' class.
[+] ExperimentalMethods (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
ExperimentalMethods ( SQLContext )This constructor is from 'ExperimentalMethods' class.
extraStrategies ( )This method is from 'ExperimentalMethods' class.
[+] GroupedData (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (6)
agg ( java.util.Map<java.lang.String,java.lang.String> )This method is from 'GroupedData' class.
agg ( Column, scala.collection.Seq<Column> )This method is from 'GroupedData' class.
agg ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'GroupedData' class.
agg ( scala.Tuple2<java.lang.String,java.lang.String>, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> )This method is from 'GroupedData' class.
count ( )This method is from 'GroupedData' class.
GroupedData ( DataFrame, scala.collection.Seq<catalyst.expressions.Expression> )This constructor is from 'GroupedData' class.
[+] SaveMode (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
valueOf ( java.lang.String )This method is from 'SaveMode' class.
values ( )This method is from 'SaveMode' class.
[+] SparkSQLParser (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (17)
AS ( )This method is from 'SparkSQLParser' class.
CACHE ( )This method is from 'SparkSQLParser' class.
CLEAR ( )This method is from 'SparkSQLParser' class.
IN ( )This method is from 'SparkSQLParser' class.
LAZY ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..others ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..set ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..SetCommandParser ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..show ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..uncache ( )This method is from 'SparkSQLParser' class.
SET ( )This method is from 'SparkSQLParser' class.
SHOW ( )This method is from 'SparkSQLParser' class.
SparkSQLParser ( scala.Function1<java.lang.String,catalyst.plans.logical.LogicalPlan> )This constructor is from 'SparkSQLParser' class.
start ( )This method is from 'SparkSQLParser' class.
TABLE ( )This method is from 'SparkSQLParser' class.
TABLES ( )This method is from 'SparkSQLParser' class.
UNCACHE ( )This method is from 'SparkSQLParser' class.
[+] SQLConf (1)
| Change | Effect |
---|
1 | This class became interface. | A client program may be interrupted by IncompatibleClassChangeError or InstantiationError exception dependent on the usage of this class. |
[+] affected methods (12)
autoBroadcastJoinThreshold ( )This method is from 'SQLConf' class.
codegenEnabled ( )This method is from 'SQLConf' class.
defaultSizeInBytes ( )This method is from 'SQLConf' class.
dialect ( )This method is from 'SQLConf' class.
getAllConfs ( )This method is from 'SQLConf' class.
getConf ( java.lang.String )This method is from 'SQLConf' class.
getConf ( java.lang.String, java.lang.String )This method is from 'SQLConf' class.
isParquetBinaryAsString ( )This method is from 'SQLConf' class.
numShufflePartitions ( )This method is from 'SQLConf' class.
parquetCompressionCodec ( )This method is from 'SQLConf' class.
setConf ( java.lang.String, java.lang.String )This method is from 'SQLConf' class.
setConf ( java.util.Properties )This method is from 'SQLConf' class.
[+] SQLContext.QueryExecution (1)
| Change | Effect |
---|
1 | This class became abstract. | A client program may be interrupted by InstantiationError exception. |
[+] affected methods (2)
executePlan ( catalyst.plans.logical.LogicalPlan )Return value of this method has type 'SQLContext.QueryExecution'.
executeSql ( java.lang.String )Return value of this method has type 'SQLContext.QueryExecution'.
[+] UserDefinedFunction (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (15)
apply ( scala.collection.Seq<Column> )This method is from 'UserDefinedFunction' class.
canEqual ( java.lang.Object )This method is from 'UserDefinedFunction' class.
copy ( java.lang.Object, types.DataType )This method is from 'UserDefinedFunction' class.
curried ( )This method is from 'UserDefinedFunction' class.
dataType ( )This method is from 'UserDefinedFunction' class.
equals ( java.lang.Object )This method is from 'UserDefinedFunction' class.
f ( )This method is from 'UserDefinedFunction' class.
hashCode ( )This method is from 'UserDefinedFunction' class.
productArity ( )This method is from 'UserDefinedFunction' class.
productElement ( int )This method is from 'UserDefinedFunction' class.
productIterator ( )This method is from 'UserDefinedFunction' class.
productPrefix ( )This method is from 'UserDefinedFunction' class.
toString ( )This method is from 'UserDefinedFunction' class.
tupled ( )This method is from 'UserDefinedFunction' class.
UserDefinedFunction ( java.lang.Object, types.DataType )This constructor is from 'UserDefinedFunction' class.
[+] UserDefinedPythonFunction (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (21)
accumulator ( )This method is from 'UserDefinedPythonFunction' class.
apply ( scala.collection.Seq<Column> )This method is from 'UserDefinedPythonFunction' class.
broadcastVars ( )This method is from 'UserDefinedPythonFunction' class.
canEqual ( java.lang.Object )This method is from 'UserDefinedPythonFunction' class.
command ( )This method is from 'UserDefinedPythonFunction' class.
copy ( java.lang.String, byte[ ], java.util.Map<java.lang.String,java.lang.String>, java.util.List<java.lang.String>, java.lang.String, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>, org.apache.spark.Accumulator<java.util.List<byte[ ]>>, types.DataType )This method is from 'UserDefinedPythonFunction' class.
curried ( )This method is from 'UserDefinedPythonFunction' class.
dataType ( )This method is from 'UserDefinedPythonFunction' class.
envVars ( )This method is from 'UserDefinedPythonFunction' class.
equals ( java.lang.Object )This method is from 'UserDefinedPythonFunction' class.
hashCode ( )This method is from 'UserDefinedPythonFunction' class.
name ( )This method is from 'UserDefinedPythonFunction' class.
productArity ( )This method is from 'UserDefinedPythonFunction' class.
productElement ( int )This method is from 'UserDefinedPythonFunction' class.
productIterator ( )This method is from 'UserDefinedPythonFunction' class.
productPrefix ( )This method is from 'UserDefinedPythonFunction' class.
pythonExec ( )This method is from 'UserDefinedPythonFunction' class.
pythonIncludes ( )This method is from 'UserDefinedPythonFunction' class.
toString ( )This method is from 'UserDefinedPythonFunction' class.
tupled ( )This method is from 'UserDefinedPythonFunction' class.
UserDefinedPythonFunction ( java.lang.String, byte[ ], java.util.Map<java.lang.String,java.lang.String>, java.util.List<java.lang.String>, java.lang.String, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>, org.apache.spark.Accumulator<java.util.List<byte[ ]>>, types.DataType )This constructor is from 'UserDefinedPythonFunction' class.
package org.apache.spark.sql.columnar
[+] BinaryColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
BinaryColumnStats ( )This constructor is from 'BinaryColumnStats' class.
[+] BooleanColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
BooleanColumnStats ( )This constructor is from 'BooleanColumnStats' class.
[+] ByteColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
ByteColumnStats ( )This constructor is from 'ByteColumnStats' class.
[+] CachedBatch (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
buffers ( )This method is from 'CachedBatch' class.
CachedBatch ( byte[ ][ ], org.apache.spark.sql.Row )This constructor is from 'CachedBatch' class.
canEqual ( java.lang.Object )This method is from 'CachedBatch' class.
copy ( byte[ ][ ], org.apache.spark.sql.Row )This method is from 'CachedBatch' class.
curried ( )This method is from 'CachedBatch' class.
equals ( java.lang.Object )This method is from 'CachedBatch' class.
hashCode ( )This method is from 'CachedBatch' class.
productArity ( )This method is from 'CachedBatch' class.
productElement ( int )This method is from 'CachedBatch' class.
productIterator ( )This method is from 'CachedBatch' class.
productPrefix ( )This method is from 'CachedBatch' class.
stats ( )This method is from 'CachedBatch' class.
toString ( )This method is from 'CachedBatch' class.
tupled ( )This method is from 'CachedBatch' class.
[+] ColumnBuilder (1)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (3)
build ( )This abstract method is from 'ColumnBuilder' interface.
columnStats ( )This abstract method is from 'ColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'ColumnBuilder' interface.
[+] DateColumnAccessor (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
DateColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'DateColumnAccessor' class.
[+] DateColumnBuilder (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
DateColumnBuilder ( )This constructor is from 'DateColumnBuilder' class.
[+] DateColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
DateColumnStats ( )This constructor is from 'DateColumnStats' class.
[+] DoubleColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
DoubleColumnStats ( )This constructor is from 'DoubleColumnStats' class.
[+] FloatColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
FloatColumnStats ( )This constructor is from 'FloatColumnStats' class.
[+] GenericColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
GenericColumnStats ( )This constructor is from 'GenericColumnStats' class.
[+] IntColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
IntColumnStats ( )This constructor is from 'IntColumnStats' class.
[+] LongColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
LongColumnStats ( )This constructor is from 'LongColumnStats' class.
[+] NullableColumnBuilder (2)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (11)
build ( )This abstract method is from 'NullableColumnBuilder' interface.
buildNonNulls ( )This abstract method is from 'NullableColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
nullCount ( )This abstract method is from 'NullableColumnBuilder' interface.
nullCount_.eq ( int )This abstract method is from 'NullableColumnBuilder' interface.
nulls ( )This abstract method is from 'NullableColumnBuilder' interface.
nulls_.eq ( java.nio.ByteBuffer )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos_.eq ( int )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.build ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
[+] PartitionStatistics (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (3)
forAttribute ( )This method is from 'PartitionStatistics' class.
PartitionStatistics ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> )This constructor is from 'PartitionStatistics' class.
schema ( )This method is from 'PartitionStatistics' class.
[+] ShortColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
ShortColumnStats ( )This constructor is from 'ShortColumnStats' class.
[+] StringColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
StringColumnStats ( )This constructor is from 'StringColumnStats' class.
[+] TimestampColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
TimestampColumnStats ( )This constructor is from 'TimestampColumnStats' class.
package org.apache.spark.sql.columnar.compression
[+] CompressionScheme (1)
| Change | Effect |
---|
1 | Abstract method encoder ( org.apache.spark.sql.columnar.NativeColumnType<T> ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (3)
decoder ( java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> )This abstract method is from 'CompressionScheme' interface.
supports ( org.apache.spark.sql.columnar.ColumnType<?,?> )This abstract method is from 'CompressionScheme' interface.
typeId ( )This abstract method is from 'CompressionScheme' interface.
[+] Decoder<T> (2)
| Change | Effect |
---|
1 | Abstract method hasNext ( ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method next ( org.apache.spark.sql.catalyst.expressions.MutableRow, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
decoder ( java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> )Return value of this abstract method has type 'Decoder<T>'.
[+] Encoder<T> (2)
| Change | Effect |
---|
1 | Abstract method compress ( java.nio.ByteBuffer, java.nio.ByteBuffer ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method gatherCompressibilityStats ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (3)
compressedSize ( )This abstract method is from 'Encoder<T>' interface.
compressionRatio ( )This abstract method is from 'Encoder<T>' interface.
uncompressedSize ( )This abstract method is from 'Encoder<T>' interface.
package org.apache.spark.sql.execution
[+] CacheTableCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (16)
CacheTableCommand ( java.lang.String, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>, boolean )This constructor is from 'CacheTableCommand' class.
canEqual ( java.lang.Object )This method is from 'CacheTableCommand' class.
copy ( java.lang.String, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>, boolean )This method is from 'CacheTableCommand' class.
curried ( )This method is from 'CacheTableCommand' class.
equals ( java.lang.Object )This method is from 'CacheTableCommand' class.
hashCode ( )This method is from 'CacheTableCommand' class.
isLazy ( )This method is from 'CacheTableCommand' class.
output ( )This method is from 'CacheTableCommand' class.
plan ( )This method is from 'CacheTableCommand' class.
productArity ( )This method is from 'CacheTableCommand' class.
productElement ( int )This method is from 'CacheTableCommand' class.
productIterator ( )This method is from 'CacheTableCommand' class.
productPrefix ( )This method is from 'CacheTableCommand' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'CacheTableCommand' class.
tableName ( )This method is from 'CacheTableCommand' class.
tupled ( )This method is from 'CacheTableCommand' class.
[+] DescribeCommand (2)
| Change | Effect |
---|
1 | Removed super-interface RunnableCommand. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface scala.Serializable. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (9)
canEqual ( java.lang.Object )This method is from 'DescribeCommand' class.
child ( )This method is from 'DescribeCommand' class.
equals ( java.lang.Object )This method is from 'DescribeCommand' class.
hashCode ( )This method is from 'DescribeCommand' class.
output ( )This method is from 'DescribeCommand' class.
productArity ( )This method is from 'DescribeCommand' class.
productElement ( int )This method is from 'DescribeCommand' class.
productIterator ( )This method is from 'DescribeCommand' class.
productPrefix ( )This method is from 'DescribeCommand' class.
[+] ExecutedCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (19)
andThen ( scala.Function1<ExecutedCommand,A> )This method is from 'ExecutedCommand' class.
canEqual ( java.lang.Object )This method is from 'ExecutedCommand' class.
children ( )This method is from 'ExecutedCommand' class.
children ( )This method is from 'ExecutedCommand' class.
cmd ( )This method is from 'ExecutedCommand' class.
compose ( scala.Function1<A,RunnableCommand> )This method is from 'ExecutedCommand' class.
copy ( RunnableCommand )This method is from 'ExecutedCommand' class.
equals ( java.lang.Object )This method is from 'ExecutedCommand' class.
execute ( )This method is from 'ExecutedCommand' class.
executeCollect ( )This method is from 'ExecutedCommand' class.
ExecutedCommand ( RunnableCommand )This constructor is from 'ExecutedCommand' class.
executeTake ( int )This method is from 'ExecutedCommand' class.
hashCode ( )This method is from 'ExecutedCommand' class.
output ( )This method is from 'ExecutedCommand' class.
productArity ( )This method is from 'ExecutedCommand' class.
productElement ( int )This method is from 'ExecutedCommand' class.
productIterator ( )This method is from 'ExecutedCommand' class.
productPrefix ( )This method is from 'ExecutedCommand' class.
sideEffectResult ( )This method is from 'ExecutedCommand' class.
[+] Expand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (19)
canEqual ( java.lang.Object )This method is from 'Expand' class.
child ( )This method is from 'Expand' class.
child ( )This method is from 'Expand' class.
children ( )This method is from 'Expand' class.
children ( )This method is from 'Expand' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This method is from 'Expand' class.
curried ( )This method is from 'Expand' class.
equals ( java.lang.Object )This method is from 'Expand' class.
execute ( )This method is from 'Expand' class.
Expand ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This constructor is from 'Expand' class.
hashCode ( )This method is from 'Expand' class.
output ( )This method is from 'Expand' class.
outputPartitioning ( )This method is from 'Expand' class.
productArity ( )This method is from 'Expand' class.
productElement ( int )This method is from 'Expand' class.
productIterator ( )This method is from 'Expand' class.
productPrefix ( )This method is from 'Expand' class.
projections ( )This method is from 'Expand' class.
tupled ( )This method is from 'Expand' class.
[+] ExplainCommand (2)
| Change | Effect |
---|
1 | Removed super-interface RunnableCommand. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface scala.Serializable. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (10)
canEqual ( java.lang.Object )This method is from 'ExplainCommand' class.
equals ( java.lang.Object )This method is from 'ExplainCommand' class.
extended ( )This method is from 'ExplainCommand' class.
hashCode ( )This method is from 'ExplainCommand' class.
logicalPlan ( )This method is from 'ExplainCommand' class.
output ( )This method is from 'ExplainCommand' class.
productArity ( )This method is from 'ExplainCommand' class.
productElement ( int )This method is from 'ExplainCommand' class.
productIterator ( )This method is from 'ExplainCommand' class.
productPrefix ( )This method is from 'ExplainCommand' class.
[+] ExternalSort (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (21)
canEqual ( java.lang.Object )This method is from 'ExternalSort' class.
child ( )This method is from 'ExternalSort' class.
child ( )This method is from 'ExternalSort' class.
children ( )This method is from 'ExternalSort' class.
children ( )This method is from 'ExternalSort' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>, boolean, SparkPlan )This method is from 'ExternalSort' class.
curried ( )This method is from 'ExternalSort' class.
equals ( java.lang.Object )This method is from 'ExternalSort' class.
execute ( )This method is from 'ExternalSort' class.
ExternalSort ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>, boolean, SparkPlan )This constructor is from 'ExternalSort' class.
global ( )This method is from 'ExternalSort' class.
hashCode ( )This method is from 'ExternalSort' class.
output ( )This method is from 'ExternalSort' class.
outputPartitioning ( )This method is from 'ExternalSort' class.
productArity ( )This method is from 'ExternalSort' class.
productElement ( int )This method is from 'ExternalSort' class.
productIterator ( )This method is from 'ExternalSort' class.
productPrefix ( )This method is from 'ExternalSort' class.
requiredChildDistribution ( )This method is from 'ExternalSort' class.
sortOrder ( )This method is from 'ExternalSort' class.
tupled ( )This method is from 'ExternalSort' class.
[+] JavaBigDecimalSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
JavaBigDecimalSerializer ( )This constructor is from 'JavaBigDecimalSerializer' class.
[+] LocalTableScan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (18)
canEqual ( java.lang.Object )This method is from 'LocalTableScan' class.
children ( )This method is from 'LocalTableScan' class.
children ( )This method is from 'LocalTableScan' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row> )This method is from 'LocalTableScan' class.
curried ( )This method is from 'LocalTableScan' class.
equals ( java.lang.Object )This method is from 'LocalTableScan' class.
execute ( )This method is from 'LocalTableScan' class.
executeCollect ( )This method is from 'LocalTableScan' class.
executeTake ( int )This method is from 'LocalTableScan' class.
hashCode ( )This method is from 'LocalTableScan' class.
LocalTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row> )This constructor is from 'LocalTableScan' class.
output ( )This method is from 'LocalTableScan' class.
productArity ( )This method is from 'LocalTableScan' class.
productElement ( int )This method is from 'LocalTableScan' class.
productIterator ( )This method is from 'LocalTableScan' class.
productPrefix ( )This method is from 'LocalTableScan' class.
rows ( )This method is from 'LocalTableScan' class.
tupled ( )This method is from 'LocalTableScan' class.
[+] LogicalLocalTable (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (17)
canEqual ( java.lang.Object )This method is from 'LogicalLocalTable' class.
children ( )This method is from 'LogicalLocalTable' class.
children ( )This method is from 'LogicalLocalTable' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This method is from 'LogicalLocalTable' class.
equals ( java.lang.Object )This method is from 'LogicalLocalTable' class.
hashCode ( )This method is from 'LogicalLocalTable' class.
LogicalLocalTable ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This constructor is from 'LogicalLocalTable' class.
newInstance ( )This method is from 'LogicalLocalTable' class.
newInstance ( )This method is from 'LogicalLocalTable' class.
output ( )This method is from 'LogicalLocalTable' class.
productArity ( )This method is from 'LogicalLocalTable' class.
productElement ( int )This method is from 'LogicalLocalTable' class.
productIterator ( )This method is from 'LogicalLocalTable' class.
productPrefix ( )This method is from 'LogicalLocalTable' class.
rows ( )This method is from 'LogicalLocalTable' class.
sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'LogicalLocalTable' class.
statistics ( )This method is from 'LogicalLocalTable' class.
[+] LogicalRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (17)
canEqual ( java.lang.Object )This method is from 'LogicalRDD' class.
children ( )This method is from 'LogicalRDD' class.
children ( )This method is from 'LogicalRDD' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This method is from 'LogicalRDD' class.
equals ( java.lang.Object )This method is from 'LogicalRDD' class.
hashCode ( )This method is from 'LogicalRDD' class.
LogicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This constructor is from 'LogicalRDD' class.
newInstance ( )This method is from 'LogicalRDD' class.
newInstance ( )This method is from 'LogicalRDD' class.
output ( )This method is from 'LogicalRDD' class.
productArity ( )This method is from 'LogicalRDD' class.
productElement ( int )This method is from 'LogicalRDD' class.
productIterator ( )This method is from 'LogicalRDD' class.
productPrefix ( )This method is from 'LogicalRDD' class.
rdd ( )This method is from 'LogicalRDD' class.
sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'LogicalRDD' class.
statistics ( )This method is from 'LogicalRDD' class.
[+] PhysicalRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (16)
canEqual ( java.lang.Object )This method is from 'PhysicalRDD' class.
children ( )This method is from 'PhysicalRDD' class.
children ( )This method is from 'PhysicalRDD' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> )This method is from 'PhysicalRDD' class.
curried ( )This method is from 'PhysicalRDD' class.
equals ( java.lang.Object )This method is from 'PhysicalRDD' class.
execute ( )This method is from 'PhysicalRDD' class.
hashCode ( )This method is from 'PhysicalRDD' class.
output ( )This method is from 'PhysicalRDD' class.
PhysicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> )This constructor is from 'PhysicalRDD' class.
productArity ( )This method is from 'PhysicalRDD' class.
productElement ( int )This method is from 'PhysicalRDD' class.
productIterator ( )This method is from 'PhysicalRDD' class.
productPrefix ( )This method is from 'PhysicalRDD' class.
rdd ( )This method is from 'PhysicalRDD' class.
tupled ( )This method is from 'PhysicalRDD' class.
[+] RunnableCommand (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
run ( org.apache.spark.sql.SQLContext )This abstract method is from 'RunnableCommand' interface.
[+] ScalaBigDecimalSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
ScalaBigDecimalSerializer ( )This constructor is from 'ScalaBigDecimalSerializer' class.
[+] SetCommand (2)
| Change | Effect |
---|
1 | Removed super-interface RunnableCommand. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface scala.Serializable. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (8)
canEqual ( java.lang.Object )This method is from 'SetCommand' class.
equals ( java.lang.Object )This method is from 'SetCommand' class.
hashCode ( )This method is from 'SetCommand' class.
output ( )This method is from 'SetCommand' class.
productArity ( )This method is from 'SetCommand' class.
productElement ( int )This method is from 'SetCommand' class.
productIterator ( )This method is from 'SetCommand' class.
productPrefix ( )This method is from 'SetCommand' class.
[+] ShowTablesCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
andThen ( scala.Function1<ShowTablesCommand,A> )This method is from 'ShowTablesCommand' class.
canEqual ( java.lang.Object )This method is from 'ShowTablesCommand' class.
compose ( scala.Function1<A,scala.Option<java.lang.String>> )This method is from 'ShowTablesCommand' class.
copy ( scala.Option<java.lang.String> )This method is from 'ShowTablesCommand' class.
databaseName ( )This method is from 'ShowTablesCommand' class.
equals ( java.lang.Object )This method is from 'ShowTablesCommand' class.
hashCode ( )This method is from 'ShowTablesCommand' class.
output ( )This method is from 'ShowTablesCommand' class.
productArity ( )This method is from 'ShowTablesCommand' class.
productElement ( int )This method is from 'ShowTablesCommand' class.
productIterator ( )This method is from 'ShowTablesCommand' class.
productPrefix ( )This method is from 'ShowTablesCommand' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'ShowTablesCommand' class.
ShowTablesCommand ( scala.Option<java.lang.String> )This constructor is from 'ShowTablesCommand' class.
[+] UncacheTableCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
andThen ( scala.Function1<UncacheTableCommand,A> )This method is from 'UncacheTableCommand' class.
canEqual ( java.lang.Object )This method is from 'UncacheTableCommand' class.
compose ( scala.Function1<A,java.lang.String> )This method is from 'UncacheTableCommand' class.
copy ( java.lang.String )This method is from 'UncacheTableCommand' class.
equals ( java.lang.Object )This method is from 'UncacheTableCommand' class.
hashCode ( )This method is from 'UncacheTableCommand' class.
output ( )This method is from 'UncacheTableCommand' class.
productArity ( )This method is from 'UncacheTableCommand' class.
productElement ( int )This method is from 'UncacheTableCommand' class.
productIterator ( )This method is from 'UncacheTableCommand' class.
productPrefix ( )This method is from 'UncacheTableCommand' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'UncacheTableCommand' class.
tableName ( )This method is from 'UncacheTableCommand' class.
UncacheTableCommand ( java.lang.String )This constructor is from 'UncacheTableCommand' class.
package org.apache.spark.sql.execution.joins
[+] BroadcastHashJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (32)
BroadcastHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'BroadcastHashJoin' class.
buildKeys ( )This method is from 'BroadcastHashJoin' class.
buildPlan ( )This method is from 'BroadcastHashJoin' class.
buildSide ( )This method is from 'BroadcastHashJoin' class.
buildSideKeyGenerator ( )This method is from 'BroadcastHashJoin' class.
canEqual ( java.lang.Object )This method is from 'BroadcastHashJoin' class.
children ( )This method is from 'BroadcastHashJoin' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'BroadcastHashJoin' class.
curried ( )This method is from 'BroadcastHashJoin' class.
equals ( java.lang.Object )This method is from 'BroadcastHashJoin' class.
execute ( )This method is from 'BroadcastHashJoin' class.
hashCode ( )This method is from 'BroadcastHashJoin' class.
hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row>, HashedRelation )This method is from 'BroadcastHashJoin' class.
left ( )This method is from 'BroadcastHashJoin' class.
left ( )This method is from 'BroadcastHashJoin' class.
leftKeys ( )This method is from 'BroadcastHashJoin' class.
output ( )This method is from 'BroadcastHashJoin' class.
outputPartitioning ( )This method is from 'BroadcastHashJoin' class.
productArity ( )This method is from 'BroadcastHashJoin' class.
productElement ( int )This method is from 'BroadcastHashJoin' class.
productIterator ( )This method is from 'BroadcastHashJoin' class.
productPrefix ( )This method is from 'BroadcastHashJoin' class.
requiredChildDistribution ( )This method is from 'BroadcastHashJoin' class.
requiredChildDistribution ( )This method is from 'BroadcastHashJoin' class.
right ( )This method is from 'BroadcastHashJoin' class.
right ( )This method is from 'BroadcastHashJoin' class.
rightKeys ( )This method is from 'BroadcastHashJoin' class.
streamedKeys ( )This method is from 'BroadcastHashJoin' class.
streamedPlan ( )This method is from 'BroadcastHashJoin' class.
streamSideKeyGenerator ( )This method is from 'BroadcastHashJoin' class.
timeout ( )This method is from 'BroadcastHashJoin' class.
tupled ( )This method is from 'BroadcastHashJoin' class.
[+] BroadcastLeftSemiJoinHash (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (29)
BroadcastLeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'BroadcastLeftSemiJoinHash' class.
buildKeys ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildPlan ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildSide ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildSide ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildSideKeyGenerator ( )This method is from 'BroadcastLeftSemiJoinHash' class.
canEqual ( java.lang.Object )This method is from 'BroadcastLeftSemiJoinHash' class.
children ( )This method is from 'BroadcastLeftSemiJoinHash' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'BroadcastLeftSemiJoinHash' class.
curried ( )This method is from 'BroadcastLeftSemiJoinHash' class.
equals ( java.lang.Object )This method is from 'BroadcastLeftSemiJoinHash' class.
execute ( )This method is from 'BroadcastLeftSemiJoinHash' class.
hashCode ( )This method is from 'BroadcastLeftSemiJoinHash' class.
hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row>, HashedRelation )This method is from 'BroadcastLeftSemiJoinHash' class.
left ( )This method is from 'BroadcastLeftSemiJoinHash' class.
left ( )This method is from 'BroadcastLeftSemiJoinHash' class.
leftKeys ( )This method is from 'BroadcastLeftSemiJoinHash' class.
output ( )This method is from 'BroadcastLeftSemiJoinHash' class.
productArity ( )This method is from 'BroadcastLeftSemiJoinHash' class.
productElement ( int )This method is from 'BroadcastLeftSemiJoinHash' class.
productIterator ( )This method is from 'BroadcastLeftSemiJoinHash' class.
productPrefix ( )This method is from 'BroadcastLeftSemiJoinHash' class.
right ( )This method is from 'BroadcastLeftSemiJoinHash' class.
right ( )This method is from 'BroadcastLeftSemiJoinHash' class.
rightKeys ( )This method is from 'BroadcastLeftSemiJoinHash' class.
streamedKeys ( )This method is from 'BroadcastLeftSemiJoinHash' class.
streamedPlan ( )This method is from 'BroadcastLeftSemiJoinHash' class.
streamSideKeyGenerator ( )This method is from 'BroadcastLeftSemiJoinHash' class.
tupled ( )This method is from 'BroadcastLeftSemiJoinHash' class.
[+] BroadcastNestedLoopJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (23)
BroadcastNestedLoopJoin ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, package.BuildSide, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This constructor is from 'BroadcastNestedLoopJoin' class.
buildSide ( )This method is from 'BroadcastNestedLoopJoin' class.
canEqual ( java.lang.Object )This method is from 'BroadcastNestedLoopJoin' class.
children ( )This method is from 'BroadcastNestedLoopJoin' class.
condition ( )This method is from 'BroadcastNestedLoopJoin' class.
copy ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, package.BuildSide, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'BroadcastNestedLoopJoin' class.
curried ( )This method is from 'BroadcastNestedLoopJoin' class.
equals ( java.lang.Object )This method is from 'BroadcastNestedLoopJoin' class.
execute ( )This method is from 'BroadcastNestedLoopJoin' class.
hashCode ( )This method is from 'BroadcastNestedLoopJoin' class.
joinType ( )This method is from 'BroadcastNestedLoopJoin' class.
left ( )This method is from 'BroadcastNestedLoopJoin' class.
left ( )This method is from 'BroadcastNestedLoopJoin' class.
BroadcastNestedLoopJoin..boundCondition ( )This method is from 'BroadcastNestedLoopJoin' class.
output ( )This method is from 'BroadcastNestedLoopJoin' class.
outputPartitioning ( )This method is from 'BroadcastNestedLoopJoin' class.
productArity ( )This method is from 'BroadcastNestedLoopJoin' class.
productElement ( int )This method is from 'BroadcastNestedLoopJoin' class.
productIterator ( )This method is from 'BroadcastNestedLoopJoin' class.
productPrefix ( )This method is from 'BroadcastNestedLoopJoin' class.
right ( )This method is from 'BroadcastNestedLoopJoin' class.
right ( )This method is from 'BroadcastNestedLoopJoin' class.
tupled ( )This method is from 'BroadcastNestedLoopJoin' class.
[+] CartesianProduct (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (18)
canEqual ( java.lang.Object )This method is from 'CartesianProduct' class.
CartesianProduct ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'CartesianProduct' class.
children ( )This method is from 'CartesianProduct' class.
copy ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'CartesianProduct' class.
curried ( )This method is from 'CartesianProduct' class.
equals ( java.lang.Object )This method is from 'CartesianProduct' class.
execute ( )This method is from 'CartesianProduct' class.
hashCode ( )This method is from 'CartesianProduct' class.
left ( )This method is from 'CartesianProduct' class.
left ( )This method is from 'CartesianProduct' class.
output ( )This method is from 'CartesianProduct' class.
productArity ( )This method is from 'CartesianProduct' class.
productElement ( int )This method is from 'CartesianProduct' class.
productIterator ( )This method is from 'CartesianProduct' class.
productPrefix ( )This method is from 'CartesianProduct' class.
right ( )This method is from 'CartesianProduct' class.
right ( )This method is from 'CartesianProduct' class.
tupled ( )This method is from 'CartesianProduct' class.
[+] GeneralHashedRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
GeneralHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>> )This constructor is from 'GeneralHashedRelation' class.
[+] HashedRelation (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
get ( org.apache.spark.sql.Row )This abstract method is from 'HashedRelation' interface.
[+] HashJoin (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (13)
buildKeys ( )This abstract method is from 'HashJoin' interface.
buildPlan ( )This abstract method is from 'HashJoin' interface.
buildSide ( )This abstract method is from 'HashJoin' interface.
buildSideKeyGenerator ( )This abstract method is from 'HashJoin' interface.
hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row>, HashedRelation )This abstract method is from 'HashJoin' interface.
left ( )This abstract method is from 'HashJoin' interface.
leftKeys ( )This abstract method is from 'HashJoin' interface.
output ( )This abstract method is from 'HashJoin' interface.
right ( )This abstract method is from 'HashJoin' interface.
rightKeys ( )This abstract method is from 'HashJoin' interface.
streamedKeys ( )This abstract method is from 'HashJoin' interface.
streamedPlan ( )This abstract method is from 'HashJoin' interface.
streamSideKeyGenerator ( )This abstract method is from 'HashJoin' interface.
[+] HashOuterJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (34)
canEqual ( java.lang.Object )This method is from 'HashOuterJoin' class.
children ( )This method is from 'HashOuterJoin' class.
condition ( )This method is from 'HashOuterJoin' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'HashOuterJoin' class.
curried ( )This method is from 'HashOuterJoin' class.
equals ( java.lang.Object )This method is from 'HashOuterJoin' class.
execute ( )This method is from 'HashOuterJoin' class.
hashCode ( )This method is from 'HashOuterJoin' class.
HashOuterJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'HashOuterJoin' class.
joinType ( )This method is from 'HashOuterJoin' class.
left ( )This method is from 'HashOuterJoin' class.
left ( )This method is from 'HashOuterJoin' class.
leftKeys ( )This method is from 'HashOuterJoin' class.
HashOuterJoin..boundCondition ( )This method is from 'HashOuterJoin' class.
HashOuterJoin..buildHashTable ( scala.collection.Iterator<org.apache.spark.sql.Row>, org.apache.spark.sql.catalyst.expressions.package.Projection )This method is from 'HashOuterJoin' class.
HashOuterJoin..DUMMY_LIST ( )This method is from 'HashOuterJoin' class.
HashOuterJoin..EMPTY_LIST ( )This method is from 'HashOuterJoin' class.
HashOuterJoin..fullOuterIterator ( org.apache.spark.sql.Row, scala.collection.Iterable<org.apache.spark.sql.Row>, scala.collection.Iterable<org.apache.spark.sql.Row>, org.apache.spark.sql.catalyst.expressions.JoinedRow )This method is from 'HashOuterJoin' class.
HashOuterJoin..leftNullRow ( )This method is from 'HashOuterJoin' class.
HashOuterJoin..leftOuterIterator ( org.apache.spark.sql.Row, org.apache.spark.sql.catalyst.expressions.JoinedRow, scala.collection.Iterable<org.apache.spark.sql.Row> )This method is from 'HashOuterJoin' class.
HashOuterJoin..rightNullRow ( )This method is from 'HashOuterJoin' class.
HashOuterJoin..rightOuterIterator ( org.apache.spark.sql.Row, scala.collection.Iterable<org.apache.spark.sql.Row>, org.apache.spark.sql.catalyst.expressions.JoinedRow )This method is from 'HashOuterJoin' class.
output ( )This method is from 'HashOuterJoin' class.
outputPartitioning ( )This method is from 'HashOuterJoin' class.
productArity ( )This method is from 'HashOuterJoin' class.
productElement ( int )This method is from 'HashOuterJoin' class.
productIterator ( )This method is from 'HashOuterJoin' class.
productPrefix ( )This method is from 'HashOuterJoin' class.
requiredChildDistribution ( )This method is from 'HashOuterJoin' class.
requiredChildDistribution ( )This method is from 'HashOuterJoin' class.
right ( )This method is from 'HashOuterJoin' class.
right ( )This method is from 'HashOuterJoin' class.
rightKeys ( )This method is from 'HashOuterJoin' class.
tupled ( )This method is from 'HashOuterJoin' class.
[+] LeftSemiJoinBNL (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (23)
broadcast ( )This method is from 'LeftSemiJoinBNL' class.
canEqual ( java.lang.Object )This method is from 'LeftSemiJoinBNL' class.
children ( )This method is from 'LeftSemiJoinBNL' class.
condition ( )This method is from 'LeftSemiJoinBNL' class.
copy ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'LeftSemiJoinBNL' class.
curried ( )This method is from 'LeftSemiJoinBNL' class.
equals ( java.lang.Object )This method is from 'LeftSemiJoinBNL' class.
execute ( )This method is from 'LeftSemiJoinBNL' class.
hashCode ( )This method is from 'LeftSemiJoinBNL' class.
left ( )This method is from 'LeftSemiJoinBNL' class.
left ( )This method is from 'LeftSemiJoinBNL' class.
LeftSemiJoinBNL ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This constructor is from 'LeftSemiJoinBNL' class.
LeftSemiJoinBNL..boundCondition ( )This method is from 'LeftSemiJoinBNL' class.
output ( )This method is from 'LeftSemiJoinBNL' class.
outputPartitioning ( )This method is from 'LeftSemiJoinBNL' class.
productArity ( )This method is from 'LeftSemiJoinBNL' class.
productElement ( int )This method is from 'LeftSemiJoinBNL' class.
productIterator ( )This method is from 'LeftSemiJoinBNL' class.
productPrefix ( )This method is from 'LeftSemiJoinBNL' class.
right ( )This method is from 'LeftSemiJoinBNL' class.
right ( )This method is from 'LeftSemiJoinBNL' class.
streamed ( )This method is from 'LeftSemiJoinBNL' class.
tupled ( )This method is from 'LeftSemiJoinBNL' class.
[+] LeftSemiJoinHash (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (31)
buildKeys ( )This method is from 'LeftSemiJoinHash' class.
buildPlan ( )This method is from 'LeftSemiJoinHash' class.
buildSide ( )This method is from 'LeftSemiJoinHash' class.
buildSide ( )This method is from 'LeftSemiJoinHash' class.
buildSideKeyGenerator ( )This method is from 'LeftSemiJoinHash' class.
canEqual ( java.lang.Object )This method is from 'LeftSemiJoinHash' class.
children ( )This method is from 'LeftSemiJoinHash' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'LeftSemiJoinHash' class.
curried ( )This method is from 'LeftSemiJoinHash' class.
equals ( java.lang.Object )This method is from 'LeftSemiJoinHash' class.
execute ( )This method is from 'LeftSemiJoinHash' class.
hashCode ( )This method is from 'LeftSemiJoinHash' class.
hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row>, HashedRelation )This method is from 'LeftSemiJoinHash' class.
left ( )This method is from 'LeftSemiJoinHash' class.
left ( )This method is from 'LeftSemiJoinHash' class.
leftKeys ( )This method is from 'LeftSemiJoinHash' class.
LeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'LeftSemiJoinHash' class.
output ( )This method is from 'LeftSemiJoinHash' class.
productArity ( )This method is from 'LeftSemiJoinHash' class.
productElement ( int )This method is from 'LeftSemiJoinHash' class.
productIterator ( )This method is from 'LeftSemiJoinHash' class.
productPrefix ( )This method is from 'LeftSemiJoinHash' class.
requiredChildDistribution ( )This method is from 'LeftSemiJoinHash' class.
requiredChildDistribution ( )This method is from 'LeftSemiJoinHash' class.
right ( )This method is from 'LeftSemiJoinHash' class.
right ( )This method is from 'LeftSemiJoinHash' class.
rightKeys ( )This method is from 'LeftSemiJoinHash' class.
streamedKeys ( )This method is from 'LeftSemiJoinHash' class.
streamedPlan ( )This method is from 'LeftSemiJoinHash' class.
streamSideKeyGenerator ( )This method is from 'LeftSemiJoinHash' class.
tupled ( )This method is from 'LeftSemiJoinHash' class.
[+] ShuffledHashJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (31)
buildKeys ( )This method is from 'ShuffledHashJoin' class.
buildPlan ( )This method is from 'ShuffledHashJoin' class.
buildSide ( )This method is from 'ShuffledHashJoin' class.
buildSideKeyGenerator ( )This method is from 'ShuffledHashJoin' class.
canEqual ( java.lang.Object )This method is from 'ShuffledHashJoin' class.
children ( )This method is from 'ShuffledHashJoin' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'ShuffledHashJoin' class.
curried ( )This method is from 'ShuffledHashJoin' class.
equals ( java.lang.Object )This method is from 'ShuffledHashJoin' class.
execute ( )This method is from 'ShuffledHashJoin' class.
hashCode ( )This method is from 'ShuffledHashJoin' class.
hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row>, HashedRelation )This method is from 'ShuffledHashJoin' class.
left ( )This method is from 'ShuffledHashJoin' class.
left ( )This method is from 'ShuffledHashJoin' class.
leftKeys ( )This method is from 'ShuffledHashJoin' class.
output ( )This method is from 'ShuffledHashJoin' class.
outputPartitioning ( )This method is from 'ShuffledHashJoin' class.
productArity ( )This method is from 'ShuffledHashJoin' class.
productElement ( int )This method is from 'ShuffledHashJoin' class.
productIterator ( )This method is from 'ShuffledHashJoin' class.
productPrefix ( )This method is from 'ShuffledHashJoin' class.
requiredChildDistribution ( )This method is from 'ShuffledHashJoin' class.
requiredChildDistribution ( )This method is from 'ShuffledHashJoin' class.
right ( )This method is from 'ShuffledHashJoin' class.
right ( )This method is from 'ShuffledHashJoin' class.
rightKeys ( )This method is from 'ShuffledHashJoin' class.
ShuffledHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'ShuffledHashJoin' class.
streamedKeys ( )This method is from 'ShuffledHashJoin' class.
streamedPlan ( )This method is from 'ShuffledHashJoin' class.
streamSideKeyGenerator ( )This method is from 'ShuffledHashJoin' class.
tupled ( )This method is from 'ShuffledHashJoin' class.
[+] UniqueKeyHashedRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
UniqueKeyHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.sql.Row> )This constructor is from 'UniqueKeyHashedRelation' class.
package org.apache.spark.sql.jdbc
[+] DriverQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (4)
DriverQuirks ( )This constructor is from 'DriverQuirks' abstract class.
get ( java.lang.String )This method is from 'DriverQuirks' abstract class.
getCatalystType ( int, java.lang.String, int, org.apache.spark.sql.types.MetadataBuilder )This abstract method is from 'DriverQuirks' abstract class.
getJDBCType ( org.apache.spark.sql.types.DataType )This abstract method is from 'DriverQuirks' abstract class.
[+] JDBCPartition (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (15)
canEqual ( java.lang.Object )This method is from 'JDBCPartition' class.
copy ( java.lang.String, int )This method is from 'JDBCPartition' class.
curried ( )This method is from 'JDBCPartition' class.
equals ( java.lang.Object )This method is from 'JDBCPartition' class.
hashCode ( )This method is from 'JDBCPartition' class.
idx ( )This method is from 'JDBCPartition' class.
index ( )This method is from 'JDBCPartition' class.
JDBCPartition ( java.lang.String, int )This constructor is from 'JDBCPartition' class.
productArity ( )This method is from 'JDBCPartition' class.
productElement ( int )This method is from 'JDBCPartition' class.
productIterator ( )This method is from 'JDBCPartition' class.
productPrefix ( )This method is from 'JDBCPartition' class.
toString ( )This method is from 'JDBCPartition' class.
tupled ( )This method is from 'JDBCPartition' class.
whereClause ( )This method is from 'JDBCPartition' class.
[+] JDBCPartitioningInfo (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (16)
canEqual ( java.lang.Object )This method is from 'JDBCPartitioningInfo' class.
column ( )This method is from 'JDBCPartitioningInfo' class.
copy ( java.lang.String, long, long, int )This method is from 'JDBCPartitioningInfo' class.
curried ( )This method is from 'JDBCPartitioningInfo' class.
equals ( java.lang.Object )This method is from 'JDBCPartitioningInfo' class.
hashCode ( )This method is from 'JDBCPartitioningInfo' class.
JDBCPartitioningInfo ( java.lang.String, long, long, int )This constructor is from 'JDBCPartitioningInfo' class.
lowerBound ( )This method is from 'JDBCPartitioningInfo' class.
numPartitions ( )This method is from 'JDBCPartitioningInfo' class.
productArity ( )This method is from 'JDBCPartitioningInfo' class.
productElement ( int )This method is from 'JDBCPartitioningInfo' class.
productIterator ( )This method is from 'JDBCPartitioningInfo' class.
productPrefix ( )This method is from 'JDBCPartitioningInfo' class.
toString ( )This method is from 'JDBCPartitioningInfo' class.
tupled ( )This method is from 'JDBCPartitioningInfo' class.
upperBound ( )This method is from 'JDBCPartitioningInfo' class.
[+] JDBCRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (21)
BinaryConversion ( )This method is from 'JDBCRDD' class.
BinaryLongConversion ( )This method is from 'JDBCRDD' class.
BooleanConversion ( )This method is from 'JDBCRDD' class.
compute ( org.apache.spark.Partition, org.apache.spark.TaskContext )This method is from 'JDBCRDD' class.
DateConversion ( )This method is from 'JDBCRDD' class.
DecimalConversion ( )This method is from 'JDBCRDD' class.
DoubleConversion ( )This method is from 'JDBCRDD' class.
FloatConversion ( )This method is from 'JDBCRDD' class.
getConnector ( java.lang.String, java.lang.String )This method is from 'JDBCRDD' class.
getConversions ( org.apache.spark.sql.types.StructType )This method is from 'JDBCRDD' class.
getPartitions ( )This method is from 'JDBCRDD' class.
IntegerConversion ( )This method is from 'JDBCRDD' class.
JDBCRDD ( org.apache.spark.SparkContext, scala.Function0<java.sql.Connection>, org.apache.spark.sql.types.StructType, java.lang.String, java.lang.String[ ], org.apache.spark.sql.sources.Filter[ ], org.apache.spark.Partition[ ] )This constructor is from 'JDBCRDD' class.
LongConversion ( )This method is from 'JDBCRDD' class.
JDBCRDD..columnList ( )This method is from 'JDBCRDD' class.
JDBCRDD..compileFilter ( org.apache.spark.sql.sources.Filter )This method is from 'JDBCRDD' class.
JDBCRDD..getWhereClause ( JDBCPartition )This method is from 'JDBCRDD' class.
resolveTable ( java.lang.String, java.lang.String )This method is from 'JDBCRDD' class.
scanTable ( org.apache.spark.SparkContext, org.apache.spark.sql.types.StructType, java.lang.String, java.lang.String, java.lang.String, java.lang.String[ ], org.apache.spark.sql.sources.Filter[ ], org.apache.spark.Partition[ ] )This method is from 'JDBCRDD' class.
StringConversion ( )This method is from 'JDBCRDD' class.
TimestampConversion ( )This method is from 'JDBCRDD' class.
[+] JDBCRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (17)
buildScan ( java.lang.String[ ], org.apache.spark.sql.sources.Filter[ ] )This method is from 'JDBCRelation' class.
canEqual ( java.lang.Object )This method is from 'JDBCRelation' class.
columnPartition ( JDBCPartitioningInfo )This method is from 'JDBCRelation' class.
copy ( java.lang.String, java.lang.String, org.apache.spark.Partition[ ], org.apache.spark.sql.SQLContext )This method is from 'JDBCRelation' class.
equals ( java.lang.Object )This method is from 'JDBCRelation' class.
hashCode ( )This method is from 'JDBCRelation' class.
JDBCRelation ( java.lang.String, java.lang.String, org.apache.spark.Partition[ ], org.apache.spark.sql.SQLContext )This constructor is from 'JDBCRelation' class.
parts ( )This method is from 'JDBCRelation' class.
productArity ( )This method is from 'JDBCRelation' class.
productElement ( int )This method is from 'JDBCRelation' class.
productIterator ( )This method is from 'JDBCRelation' class.
productPrefix ( )This method is from 'JDBCRelation' class.
schema ( )This method is from 'JDBCRelation' class.
sqlContext ( )This method is from 'JDBCRelation' class.
table ( )This method is from 'JDBCRelation' class.
toString ( )This method is from 'JDBCRelation' class.
url ( )This method is from 'JDBCRelation' class.
[+] MySQLQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
MySQLQuirks ( )This constructor is from 'MySQLQuirks' class.
[+] NoQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
NoQuirks ( )This constructor is from 'NoQuirks' class.
[+] PostgresQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
PostgresQuirks ( )This constructor is from 'PostgresQuirks' class.
package org.apache.spark.sql.json
[+] JSONRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (18)
buildScan ( )This method is from 'JSONRelation' class.
canEqual ( java.lang.Object )This method is from 'JSONRelation' class.
copy ( java.lang.String, double, scala.Option<org.apache.spark.sql.types.StructType>, org.apache.spark.sql.SQLContext )This method is from 'JSONRelation' class.
equals ( java.lang.Object )This method is from 'JSONRelation' class.
hashCode ( )This method is from 'JSONRelation' class.
insert ( org.apache.spark.sql.DataFrame, boolean )This method is from 'JSONRelation' class.
JSONRelation ( java.lang.String, double, scala.Option<org.apache.spark.sql.types.StructType>, org.apache.spark.sql.SQLContext )This constructor is from 'JSONRelation' class.
JSONRelation..baseRDD ( )This method is from 'JSONRelation' class.
path ( )This method is from 'JSONRelation' class.
productArity ( )This method is from 'JSONRelation' class.
productElement ( int )This method is from 'JSONRelation' class.
productIterator ( )This method is from 'JSONRelation' class.
productPrefix ( )This method is from 'JSONRelation' class.
samplingRatio ( )This method is from 'JSONRelation' class.
schema ( )This method is from 'JSONRelation' class.
sqlContext ( )This method is from 'JSONRelation' class.
toString ( )This method is from 'JSONRelation' class.
userSpecifiedSchema ( )This method is from 'JSONRelation' class.
package org.apache.spark.sql.parquet
[+] CatalystPrimitiveStringConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystPrimitiveStringConverter ( CatalystConverter, int )This constructor is from 'CatalystPrimitiveStringConverter' class.
[+] InsertIntoParquetTable (1)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.mapreduce.SparkHadoopMapReduceUtil. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (23)
canEqual ( java.lang.Object )This method is from 'InsertIntoParquetTable' class.
child ( )This method is from 'InsertIntoParquetTable' class.
child ( )This method is from 'InsertIntoParquetTable' class.
children ( )This method is from 'InsertIntoParquetTable' class.
children ( )This method is from 'InsertIntoParquetTable' class.
copy ( ParquetRelation, org.apache.spark.sql.execution.SparkPlan, boolean )Return value of this method has type 'InsertIntoParquetTable'.
curried ( )This method is from 'InsertIntoParquetTable' class.
equals ( java.lang.Object )This method is from 'InsertIntoParquetTable' class.
execute ( )This method is from 'InsertIntoParquetTable' class.
hashCode ( )This method is from 'InsertIntoParquetTable' class.
InsertIntoParquetTable ( ParquetRelation, org.apache.spark.sql.execution.SparkPlan, boolean )This constructor is from 'InsertIntoParquetTable' class.
newJobContext ( org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.JobID )This method is from 'InsertIntoParquetTable' class.
newTaskAttemptContext ( org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.TaskAttemptID )This method is from 'InsertIntoParquetTable' class.
newTaskAttemptID ( java.lang.String, int, boolean, int, int )This method is from 'InsertIntoParquetTable' class.
output ( )This method is from 'InsertIntoParquetTable' class.
outputPartitioning ( )This method is from 'InsertIntoParquetTable' class.
overwrite ( )This method is from 'InsertIntoParquetTable' class.
productArity ( )This method is from 'InsertIntoParquetTable' class.
productElement ( int )This method is from 'InsertIntoParquetTable' class.
productIterator ( )This method is from 'InsertIntoParquetTable' class.
productPrefix ( )This method is from 'InsertIntoParquetTable' class.
relation ( )This method is from 'InsertIntoParquetTable' class.
tupled ( )This method is from 'InsertIntoParquetTable' class.
[+] ParquetRelation2 (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (49)
buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'ParquetRelation2' class.
canEqual ( java.lang.Object )This method is from 'ParquetRelation2' class.
copy ( scala.collection.Seq<java.lang.String>, scala.collection.immutable.Map<java.lang.String,java.lang.String>, scala.Option<org.apache.spark.sql.types.StructType>, scala.Option<PartitionSpec>, org.apache.spark.sql.SQLContext )This method is from 'ParquetRelation2' class.
DEFAULT_PARTITION_NAME ( )This method is from 'ParquetRelation2' class.
equals ( java.lang.Object )This method is from 'ParquetRelation2' class.
hashCode ( )This method is from 'ParquetRelation2' class.
insert ( org.apache.spark.sql.DataFrame, boolean )This method is from 'ParquetRelation2' class.
isPartitioned ( )This method is from 'ParquetRelation2' class.
isTraceEnabled ( )This method is from 'ParquetRelation2' class.
log ( )This method is from 'ParquetRelation2' class.
logDebug ( scala.Function0<java.lang.String> )This method is from 'ParquetRelation2' class.
logDebug ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'ParquetRelation2' class.
logError ( scala.Function0<java.lang.String> )This method is from 'ParquetRelation2' class.
logError ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'ParquetRelation2' class.
logInfo ( scala.Function0<java.lang.String> )This method is from 'ParquetRelation2' class.
logInfo ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'ParquetRelation2' class.
logName ( )This method is from 'ParquetRelation2' class.
logTrace ( scala.Function0<java.lang.String> )This method is from 'ParquetRelation2' class.
logTrace ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'ParquetRelation2' class.
logWarning ( scala.Function0<java.lang.String> )This method is from 'ParquetRelation2' class.
logWarning ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'ParquetRelation2' class.
maybePartitionSpec ( )This method is from 'ParquetRelation2' class.
maybeSchema ( )This method is from 'ParquetRelation2' class.
MERGE_SCHEMA ( )This method is from 'ParquetRelation2' class.
newJobContext ( org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.JobID )This method is from 'ParquetRelation2' class.
newTaskAttemptContext ( org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.TaskAttemptID )This method is from 'ParquetRelation2' class.
newTaskAttemptID ( java.lang.String, int, boolean, int, int )This method is from 'ParquetRelation2' class.
org.apache.spark.Logging..log_ ( )This method is from 'ParquetRelation2' class.
org.apache.spark.Logging..log__.eq ( org.slf4j.Logger )This method is from 'ParquetRelation2' class.
ParquetRelation2..defaultPartitionName ( )This method is from 'ParquetRelation2' class.
ParquetRelation2..isSummaryFile ( org.apache.hadoop.fs.Path )This method is from 'ParquetRelation2' class.
ParquetRelation2..maybeMetastoreSchema ( )This method is from 'ParquetRelation2' class.
ParquetRelation2..metadataCache ( )This method is from 'ParquetRelation2' class.
ParquetRelation2..shouldMergeSchemas ( )This method is from 'ParquetRelation2' class.
parameters ( )This method is from 'ParquetRelation2' class.
ParquetRelation2 ( scala.collection.Seq<java.lang.String>, scala.collection.immutable.Map<java.lang.String,java.lang.String>, scala.Option<org.apache.spark.sql.types.StructType>, scala.Option<PartitionSpec>, org.apache.spark.sql.SQLContext )This constructor is from 'ParquetRelation2' class.
partitionColumns ( )This method is from 'ParquetRelation2' class.
partitions ( )This method is from 'ParquetRelation2' class.
partitionSpec ( )This method is from 'ParquetRelation2' class.
paths ( )This method is from 'ParquetRelation2' class.
productArity ( )This method is from 'ParquetRelation2' class.
productElement ( int )This method is from 'ParquetRelation2' class.
productIterator ( )This method is from 'ParquetRelation2' class.
productPrefix ( )This method is from 'ParquetRelation2' class.
schema ( )This method is from 'ParquetRelation2' class.
sizeInBytes ( )This method is from 'ParquetRelation2' class.
sparkContext ( )This method is from 'ParquetRelation2' class.
sqlContext ( )This method is from 'ParquetRelation2' class.
toString ( )This method is from 'ParquetRelation2' class.
[+] ParquetTest (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (12)
configuration ( )This abstract method is from 'ParquetTest' interface.
makeParquetFile ( org.apache.spark.sql.DataFrame, java.io.File, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
makeParquetFile ( scala.collection.Seq<T>, java.io.File, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
makePartitionDir ( java.io.File, java.lang.String, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.Object>> )This abstract method is from 'ParquetTest' interface.
sqlContext ( )This abstract method is from 'ParquetTest' interface.
withParquetDataFrame ( scala.collection.Seq<T>, scala.Function1<org.apache.spark.sql.DataFrame,scala.runtime.BoxedUnit>, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
withParquetFile ( scala.collection.Seq<T>, scala.Function1<java.lang.String,scala.runtime.BoxedUnit>, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
withParquetTable ( scala.collection.Seq<T>, java.lang.String, scala.Function0<scala.runtime.BoxedUnit>, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
withSQLConf ( scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>>, scala.Function0<scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
withTempDir ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
withTempPath ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
withTempTable ( java.lang.String, scala.Function0<scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
[+] ParquetTypeInfo (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (16)
canEqual ( java.lang.Object )This method is from 'ParquetTypeInfo' class.
copy ( parquet.schema.PrimitiveType.PrimitiveTypeName, scala.Option<parquet.schema.OriginalType>, scala.Option<parquet.schema.DecimalMetadata>, scala.Option<java.lang.Object> )This method is from 'ParquetTypeInfo' class.
curried ( )This method is from 'ParquetTypeInfo' class.
decimalMetadata ( )This method is from 'ParquetTypeInfo' class.
equals ( java.lang.Object )This method is from 'ParquetTypeInfo' class.
hashCode ( )This method is from 'ParquetTypeInfo' class.
length ( )This method is from 'ParquetTypeInfo' class.
originalType ( )This method is from 'ParquetTypeInfo' class.
ParquetTypeInfo ( parquet.schema.PrimitiveType.PrimitiveTypeName, scala.Option<parquet.schema.OriginalType>, scala.Option<parquet.schema.DecimalMetadata>, scala.Option<java.lang.Object> )This constructor is from 'ParquetTypeInfo' class.
primitiveType ( )This method is from 'ParquetTypeInfo' class.
productArity ( )This method is from 'ParquetTypeInfo' class.
productElement ( int )This method is from 'ParquetTypeInfo' class.
productIterator ( )This method is from 'ParquetTypeInfo' class.
productPrefix ( )This method is from 'ParquetTypeInfo' class.
toString ( )This method is from 'ParquetTypeInfo' class.
tupled ( )This method is from 'ParquetTypeInfo' class.
[+] Partition (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
canEqual ( java.lang.Object )This method is from 'Partition' class.
copy ( org.apache.spark.sql.Row, java.lang.String )This method is from 'Partition' class.
curried ( )This method is from 'Partition' class.
equals ( java.lang.Object )This method is from 'Partition' class.
hashCode ( )This method is from 'Partition' class.
Partition ( org.apache.spark.sql.Row, java.lang.String )This constructor is from 'Partition' class.
path ( )This method is from 'Partition' class.
productArity ( )This method is from 'Partition' class.
productElement ( int )This method is from 'Partition' class.
productIterator ( )This method is from 'Partition' class.
productPrefix ( )This method is from 'Partition' class.
toString ( )This method is from 'Partition' class.
tupled ( )This method is from 'Partition' class.
values ( )This method is from 'Partition' class.
[+] PartitionSpec (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
canEqual ( java.lang.Object )This method is from 'PartitionSpec' class.
copy ( org.apache.spark.sql.types.StructType, scala.collection.Seq<Partition> )This method is from 'PartitionSpec' class.
curried ( )This method is from 'PartitionSpec' class.
equals ( java.lang.Object )This method is from 'PartitionSpec' class.
hashCode ( )This method is from 'PartitionSpec' class.
partitionColumns ( )This method is from 'PartitionSpec' class.
partitions ( )This method is from 'PartitionSpec' class.
PartitionSpec ( org.apache.spark.sql.types.StructType, scala.collection.Seq<Partition> )This constructor is from 'PartitionSpec' class.
productArity ( )This method is from 'PartitionSpec' class.
productElement ( int )This method is from 'PartitionSpec' class.
productIterator ( )This method is from 'PartitionSpec' class.
productPrefix ( )This method is from 'PartitionSpec' class.
toString ( )This method is from 'PartitionSpec' class.
tupled ( )This method is from 'PartitionSpec' class.
package org.apache.spark.sql.parquet.timestamp
[+] NanoTime (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (5)
getJulianDay ( )This method is from 'NanoTime' class.
getTimeOfDayNanos ( )This method is from 'NanoTime' class.
NanoTime ( )This constructor is from 'NanoTime' class.
set ( int, long )This method is from 'NanoTime' class.
toBinary ( )This method is from 'NanoTime' class.
package org.apache.spark.sql.sources
[+] And (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
And ( Filter, Filter )This constructor is from 'And' class.
canEqual ( java.lang.Object )This method is from 'And' class.
copy ( Filter, Filter )This method is from 'And' class.
curried ( )This method is from 'And' class.
equals ( java.lang.Object )This method is from 'And' class.
hashCode ( )This method is from 'And' class.
left ( )This method is from 'And' class.
productArity ( )This method is from 'And' class.
productElement ( int )This method is from 'And' class.
productIterator ( )This method is from 'And' class.
productPrefix ( )This method is from 'And' class.
right ( )This method is from 'And' class.
toString ( )This method is from 'And' class.
tupled ( )This method is from 'And' class.
[+] BaseRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (4)
BaseRelation ( )This constructor is from 'BaseRelation' abstract class.
schema ( )This abstract method is from 'BaseRelation' abstract class.
sizeInBytes ( )This method is from 'BaseRelation' abstract class.
sqlContext ( )This abstract method is from 'BaseRelation' abstract class.
[+] CaseInsensitiveMap (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CaseInsensitiveMap ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )This constructor is from 'CaseInsensitiveMap' class.
[+] CatalystScan (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> )This abstract method is from 'CatalystScan' interface.
[+] CreatableRelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.DataFrame )This abstract method is from 'CreatableRelationProvider' interface.
[+] CreateTableUsing (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (18)
allowExisting ( )This method is from 'CreateTableUsing' class.
canEqual ( java.lang.Object )This method is from 'CreateTableUsing' class.
copy ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, boolean, scala.collection.immutable.Map<java.lang.String,java.lang.String>, boolean, boolean )This method is from 'CreateTableUsing' class.
CreateTableUsing ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, boolean, scala.collection.immutable.Map<java.lang.String,java.lang.String>, boolean, boolean )This constructor is from 'CreateTableUsing' class.
curried ( )This method is from 'CreateTableUsing' class.
equals ( java.lang.Object )This method is from 'CreateTableUsing' class.
hashCode ( )This method is from 'CreateTableUsing' class.
managedIfNoPath ( )This method is from 'CreateTableUsing' class.
options ( )This method is from 'CreateTableUsing' class.
productArity ( )This method is from 'CreateTableUsing' class.
productElement ( int )This method is from 'CreateTableUsing' class.
productIterator ( )This method is from 'CreateTableUsing' class.
productPrefix ( )This method is from 'CreateTableUsing' class.
provider ( )This method is from 'CreateTableUsing' class.
tableName ( )This method is from 'CreateTableUsing' class.
temporary ( )This method is from 'CreateTableUsing' class.
tupled ( )This method is from 'CreateTableUsing' class.
userSpecifiedSchema ( )This method is from 'CreateTableUsing' class.
[+] CreateTableUsingAsSelect (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (19)
canEqual ( java.lang.Object )This method is from 'CreateTableUsingAsSelect' class.
child ( )This method is from 'CreateTableUsingAsSelect' class.
child ( )This method is from 'CreateTableUsingAsSelect' class.
copy ( java.lang.String, java.lang.String, boolean, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'CreateTableUsingAsSelect' class.
CreateTableUsingAsSelect ( java.lang.String, java.lang.String, boolean, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This constructor is from 'CreateTableUsingAsSelect' class.
curried ( )This method is from 'CreateTableUsingAsSelect' class.
equals ( java.lang.Object )This method is from 'CreateTableUsingAsSelect' class.
hashCode ( )This method is from 'CreateTableUsingAsSelect' class.
mode ( )This method is from 'CreateTableUsingAsSelect' class.
options ( )This method is from 'CreateTableUsingAsSelect' class.
output ( )This method is from 'CreateTableUsingAsSelect' class.
productArity ( )This method is from 'CreateTableUsingAsSelect' class.
productElement ( int )This method is from 'CreateTableUsingAsSelect' class.
productIterator ( )This method is from 'CreateTableUsingAsSelect' class.
productPrefix ( )This method is from 'CreateTableUsingAsSelect' class.
provider ( )This method is from 'CreateTableUsingAsSelect' class.
tableName ( )This method is from 'CreateTableUsingAsSelect' class.
temporary ( )This method is from 'CreateTableUsingAsSelect' class.
tupled ( )This method is from 'CreateTableUsingAsSelect' class.
[+] CreateTempTableUsing (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (16)
canEqual ( java.lang.Object )This method is from 'CreateTempTableUsing' class.
copy ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'CreateTempTableUsing' class.
CreateTempTableUsing ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This constructor is from 'CreateTempTableUsing' class.
curried ( )This method is from 'CreateTempTableUsing' class.
equals ( java.lang.Object )This method is from 'CreateTempTableUsing' class.
hashCode ( )This method is from 'CreateTempTableUsing' class.
options ( )This method is from 'CreateTempTableUsing' class.
productArity ( )This method is from 'CreateTempTableUsing' class.
productElement ( int )This method is from 'CreateTempTableUsing' class.
productIterator ( )This method is from 'CreateTempTableUsing' class.
productPrefix ( )This method is from 'CreateTempTableUsing' class.
provider ( )This method is from 'CreateTempTableUsing' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'CreateTempTableUsing' class.
tableName ( )This method is from 'CreateTempTableUsing' class.
tupled ( )This method is from 'CreateTempTableUsing' class.
userSpecifiedSchema ( )This method is from 'CreateTempTableUsing' class.
[+] CreateTempTableUsingAsSelect (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (17)
canEqual ( java.lang.Object )This method is from 'CreateTempTableUsingAsSelect' class.
copy ( java.lang.String, java.lang.String, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'CreateTempTableUsingAsSelect' class.
CreateTempTableUsingAsSelect ( java.lang.String, java.lang.String, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This constructor is from 'CreateTempTableUsingAsSelect' class.
curried ( )This method is from 'CreateTempTableUsingAsSelect' class.
equals ( java.lang.Object )This method is from 'CreateTempTableUsingAsSelect' class.
hashCode ( )This method is from 'CreateTempTableUsingAsSelect' class.
mode ( )This method is from 'CreateTempTableUsingAsSelect' class.
options ( )This method is from 'CreateTempTableUsingAsSelect' class.
productArity ( )This method is from 'CreateTempTableUsingAsSelect' class.
productElement ( int )This method is from 'CreateTempTableUsingAsSelect' class.
productIterator ( )This method is from 'CreateTempTableUsingAsSelect' class.
productPrefix ( )This method is from 'CreateTempTableUsingAsSelect' class.
provider ( )This method is from 'CreateTempTableUsingAsSelect' class.
query ( )This method is from 'CreateTempTableUsingAsSelect' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'CreateTempTableUsingAsSelect' class.
tableName ( )This method is from 'CreateTempTableUsingAsSelect' class.
tupled ( )This method is from 'CreateTempTableUsingAsSelect' class.
[+] DDLParser (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
apply ( java.lang.String, boolean )This method is from 'DDLParser' class.
DDLParser ( scala.Function1<java.lang.String,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> )This constructor is from 'DDLParser' class.
[+] DescribeCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
canEqual ( java.lang.Object )This method is from 'DescribeCommand' class.
copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This method is from 'DescribeCommand' class.
curried ( )This method is from 'DescribeCommand' class.
DescribeCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This constructor is from 'DescribeCommand' class.
equals ( java.lang.Object )This method is from 'DescribeCommand' class.
hashCode ( )This method is from 'DescribeCommand' class.
isExtended ( )This method is from 'DescribeCommand' class.
output ( )This method is from 'DescribeCommand' class.
productArity ( )This method is from 'DescribeCommand' class.
productElement ( int )This method is from 'DescribeCommand' class.
productIterator ( )This method is from 'DescribeCommand' class.
productPrefix ( )This method is from 'DescribeCommand' class.
table ( )This method is from 'DescribeCommand' class.
tupled ( )This method is from 'DescribeCommand' class.
[+] EqualTo (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
attribute ( )This method is from 'EqualTo' class.
canEqual ( java.lang.Object )This method is from 'EqualTo' class.
copy ( java.lang.String, java.lang.Object )This method is from 'EqualTo' class.
curried ( )This method is from 'EqualTo' class.
equals ( java.lang.Object )This method is from 'EqualTo' class.
EqualTo ( java.lang.String, java.lang.Object )This constructor is from 'EqualTo' class.
hashCode ( )This method is from 'EqualTo' class.
productArity ( )This method is from 'EqualTo' class.
productElement ( int )This method is from 'EqualTo' class.
productIterator ( )This method is from 'EqualTo' class.
productPrefix ( )This method is from 'EqualTo' class.
toString ( )This method is from 'EqualTo' class.
tupled ( )This method is from 'EqualTo' class.
value ( )This method is from 'EqualTo' class.
[+] Filter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
Filter ( )This constructor is from 'Filter' abstract class.
[+] GreaterThan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
attribute ( )This method is from 'GreaterThan' class.
canEqual ( java.lang.Object )This method is from 'GreaterThan' class.
copy ( java.lang.String, java.lang.Object )This method is from 'GreaterThan' class.
curried ( )This method is from 'GreaterThan' class.
equals ( java.lang.Object )This method is from 'GreaterThan' class.
GreaterThan ( java.lang.String, java.lang.Object )This constructor is from 'GreaterThan' class.
hashCode ( )This method is from 'GreaterThan' class.
productArity ( )This method is from 'GreaterThan' class.
productElement ( int )This method is from 'GreaterThan' class.
productIterator ( )This method is from 'GreaterThan' class.
productPrefix ( )This method is from 'GreaterThan' class.
toString ( )This method is from 'GreaterThan' class.
tupled ( )This method is from 'GreaterThan' class.
value ( )This method is from 'GreaterThan' class.
[+] GreaterThanOrEqual (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
attribute ( )This method is from 'GreaterThanOrEqual' class.
canEqual ( java.lang.Object )This method is from 'GreaterThanOrEqual' class.
copy ( java.lang.String, java.lang.Object )This method is from 'GreaterThanOrEqual' class.
curried ( )This method is from 'GreaterThanOrEqual' class.
equals ( java.lang.Object )This method is from 'GreaterThanOrEqual' class.
GreaterThanOrEqual ( java.lang.String, java.lang.Object )This constructor is from 'GreaterThanOrEqual' class.
hashCode ( )This method is from 'GreaterThanOrEqual' class.
productArity ( )This method is from 'GreaterThanOrEqual' class.
productElement ( int )This method is from 'GreaterThanOrEqual' class.
productIterator ( )This method is from 'GreaterThanOrEqual' class.
productPrefix ( )This method is from 'GreaterThanOrEqual' class.
toString ( )This method is from 'GreaterThanOrEqual' class.
tupled ( )This method is from 'GreaterThanOrEqual' class.
value ( )This method is from 'GreaterThanOrEqual' class.
[+] In (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
attribute ( )This method is from 'In' class.
canEqual ( java.lang.Object )This method is from 'In' class.
copy ( java.lang.String, java.lang.Object[ ] )This method is from 'In' class.
curried ( )This method is from 'In' class.
equals ( java.lang.Object )This method is from 'In' class.
hashCode ( )This method is from 'In' class.
In ( java.lang.String, java.lang.Object[ ] )This constructor is from 'In' class.
productArity ( )This method is from 'In' class.
productElement ( int )This method is from 'In' class.
productIterator ( )This method is from 'In' class.
productPrefix ( )This method is from 'In' class.
toString ( )This method is from 'In' class.
tupled ( )This method is from 'In' class.
values ( )This method is from 'In' class.
[+] InsertableRelation (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
insert ( org.apache.spark.sql.DataFrame, boolean )This abstract method is from 'InsertableRelation' interface.
[+] InsertIntoDataSource (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (15)
canEqual ( java.lang.Object )This method is from 'InsertIntoDataSource' class.
copy ( LogicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This method is from 'InsertIntoDataSource' class.
curried ( )This method is from 'InsertIntoDataSource' class.
equals ( java.lang.Object )This method is from 'InsertIntoDataSource' class.
hashCode ( )This method is from 'InsertIntoDataSource' class.
InsertIntoDataSource ( LogicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This constructor is from 'InsertIntoDataSource' class.
logicalRelation ( )This method is from 'InsertIntoDataSource' class.
overwrite ( )This method is from 'InsertIntoDataSource' class.
productArity ( )This method is from 'InsertIntoDataSource' class.
productElement ( int )This method is from 'InsertIntoDataSource' class.
productIterator ( )This method is from 'InsertIntoDataSource' class.
productPrefix ( )This method is from 'InsertIntoDataSource' class.
query ( )This method is from 'InsertIntoDataSource' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'InsertIntoDataSource' class.
tupled ( )This method is from 'InsertIntoDataSource' class.
[+] IsNotNull (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (13)
andThen ( scala.Function1<IsNotNull,A> )This method is from 'IsNotNull' class.
attribute ( )This method is from 'IsNotNull' class.
canEqual ( java.lang.Object )This method is from 'IsNotNull' class.
compose ( scala.Function1<A,java.lang.String> )This method is from 'IsNotNull' class.
copy ( java.lang.String )This method is from 'IsNotNull' class.
equals ( java.lang.Object )This method is from 'IsNotNull' class.
hashCode ( )This method is from 'IsNotNull' class.
IsNotNull ( java.lang.String )This constructor is from 'IsNotNull' class.
productArity ( )This method is from 'IsNotNull' class.
productElement ( int )This method is from 'IsNotNull' class.
productIterator ( )This method is from 'IsNotNull' class.
productPrefix ( )This method is from 'IsNotNull' class.
toString ( )This method is from 'IsNotNull' class.
[+] IsNull (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (13)
andThen ( scala.Function1<IsNull,A> )This method is from 'IsNull' class.
attribute ( )This method is from 'IsNull' class.
canEqual ( java.lang.Object )This method is from 'IsNull' class.
compose ( scala.Function1<A,java.lang.String> )This method is from 'IsNull' class.
copy ( java.lang.String )This method is from 'IsNull' class.
equals ( java.lang.Object )This method is from 'IsNull' class.
hashCode ( )This method is from 'IsNull' class.
IsNull ( java.lang.String )This constructor is from 'IsNull' class.
productArity ( )This method is from 'IsNull' class.
productElement ( int )This method is from 'IsNull' class.
productIterator ( )This method is from 'IsNull' class.
productPrefix ( )This method is from 'IsNull' class.
toString ( )This method is from 'IsNull' class.
[+] LessThan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
attribute ( )This method is from 'LessThan' class.
canEqual ( java.lang.Object )This method is from 'LessThan' class.
copy ( java.lang.String, java.lang.Object )This method is from 'LessThan' class.
curried ( )This method is from 'LessThan' class.
equals ( java.lang.Object )This method is from 'LessThan' class.
hashCode ( )This method is from 'LessThan' class.
LessThan ( java.lang.String, java.lang.Object )This constructor is from 'LessThan' class.
productArity ( )This method is from 'LessThan' class.
productElement ( int )This method is from 'LessThan' class.
productIterator ( )This method is from 'LessThan' class.
productPrefix ( )This method is from 'LessThan' class.
toString ( )This method is from 'LessThan' class.
tupled ( )This method is from 'LessThan' class.
value ( )This method is from 'LessThan' class.
[+] LessThanOrEqual (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
attribute ( )This method is from 'LessThanOrEqual' class.
canEqual ( java.lang.Object )This method is from 'LessThanOrEqual' class.
copy ( java.lang.String, java.lang.Object )This method is from 'LessThanOrEqual' class.
curried ( )This method is from 'LessThanOrEqual' class.
equals ( java.lang.Object )This method is from 'LessThanOrEqual' class.
hashCode ( )This method is from 'LessThanOrEqual' class.
LessThanOrEqual ( java.lang.String, java.lang.Object )This constructor is from 'LessThanOrEqual' class.
productArity ( )This method is from 'LessThanOrEqual' class.
productElement ( int )This method is from 'LessThanOrEqual' class.
productIterator ( )This method is from 'LessThanOrEqual' class.
productPrefix ( )This method is from 'LessThanOrEqual' class.
toString ( )This method is from 'LessThanOrEqual' class.
tupled ( )This method is from 'LessThanOrEqual' class.
value ( )This method is from 'LessThanOrEqual' class.
[+] LogicalRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (19)
andThen ( scala.Function1<LogicalRelation,A> )This method is from 'LogicalRelation' class.
attributeMap ( )This method is from 'LogicalRelation' class.
canEqual ( java.lang.Object )This method is from 'LogicalRelation' class.
compose ( scala.Function1<A,BaseRelation> )This method is from 'LogicalRelation' class.
copy ( BaseRelation )This method is from 'LogicalRelation' class.
equals ( java.lang.Object )This method is from 'LogicalRelation' class.
hashCode ( )This method is from 'LogicalRelation' class.
LogicalRelation ( BaseRelation )This constructor is from 'LogicalRelation' class.
newInstance ( )This method is from 'LogicalRelation' class.
newInstance ( )This method is from 'LogicalRelation' class.
output ( )This method is from 'LogicalRelation' class.
productArity ( )This method is from 'LogicalRelation' class.
productElement ( int )This method is from 'LogicalRelation' class.
productIterator ( )This method is from 'LogicalRelation' class.
productPrefix ( )This method is from 'LogicalRelation' class.
relation ( )This method is from 'LogicalRelation' class.
sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'LogicalRelation' class.
simpleString ( )This method is from 'LogicalRelation' class.
statistics ( )This method is from 'LogicalRelation' class.
[+] Not (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (13)
andThen ( scala.Function1<Not,A> )This method is from 'Not' class.
canEqual ( java.lang.Object )This method is from 'Not' class.
child ( )This method is from 'Not' class.
compose ( scala.Function1<A,Filter> )This method is from 'Not' class.
copy ( Filter )This method is from 'Not' class.
equals ( java.lang.Object )This method is from 'Not' class.
hashCode ( )This method is from 'Not' class.
Not ( Filter )This constructor is from 'Not' class.
productArity ( )This method is from 'Not' class.
productElement ( int )This method is from 'Not' class.
productIterator ( )This method is from 'Not' class.
productPrefix ( )This method is from 'Not' class.
toString ( )This method is from 'Not' class.
[+] Or (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
canEqual ( java.lang.Object )This method is from 'Or' class.
copy ( Filter, Filter )This method is from 'Or' class.
curried ( )This method is from 'Or' class.
equals ( java.lang.Object )This method is from 'Or' class.
hashCode ( )This method is from 'Or' class.
left ( )This method is from 'Or' class.
Or ( Filter, Filter )This constructor is from 'Or' class.
productArity ( )This method is from 'Or' class.
productElement ( int )This method is from 'Or' class.
productIterator ( )This method is from 'Or' class.
productPrefix ( )This method is from 'Or' class.
right ( )This method is from 'Or' class.
toString ( )This method is from 'Or' class.
tupled ( )This method is from 'Or' class.
[+] PreWriteCheck (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (88)
andThen ( scala.Function1<scala.runtime.BoxedUnit,A> )This method is from 'PreWriteCheck' class.
andThen.mcDD.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcDF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcDI.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcDJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFD.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFI.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcID.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcIF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcII.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcIJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcJD.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcJF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcJI.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcJJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcVD.sp ( scala.Function1<scala.runtime.BoxedUnit,A> )This method is from 'PreWriteCheck' class.
andThen.mcVF.sp ( scala.Function1<scala.runtime.BoxedUnit,A> )This method is from 'PreWriteCheck' class.
andThen.mcVI.sp ( scala.Function1<scala.runtime.BoxedUnit,A> )This method is from 'PreWriteCheck' class.
andThen.mcVJ.sp ( scala.Function1<scala.runtime.BoxedUnit,A> )This method is from 'PreWriteCheck' class.
andThen.mcZD.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcZF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcZI.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcZJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
apply ( java.lang.Object )This method is from 'PreWriteCheck' class.
apply ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'PreWriteCheck' class.
apply.mcDD.sp ( double )This method is from 'PreWriteCheck' class.
apply.mcDF.sp ( float )This method is from 'PreWriteCheck' class.
apply.mcDI.sp ( int )This method is from 'PreWriteCheck' class.
apply.mcDJ.sp ( long )This method is from 'PreWriteCheck' class.
apply.mcFD.sp ( double )This method is from 'PreWriteCheck' class.
apply.mcFF.sp ( float )This method is from 'PreWriteCheck' class.
apply.mcFI.sp ( int )This method is from 'PreWriteCheck' class.
apply.mcFJ.sp ( long )This method is from 'PreWriteCheck' class.
apply.mcID.sp ( double )This method is from 'PreWriteCheck' class.
apply.mcIF.sp ( float )This method is from 'PreWriteCheck' class.
apply.mcII.sp ( int )This method is from 'PreWriteCheck' class.
apply.mcIJ.sp ( long )This method is from 'PreWriteCheck' class.
apply.mcJD.sp ( double )This method is from 'PreWriteCheck' class.
apply.mcJF.sp ( float )This method is from 'PreWriteCheck' class.
apply.mcJI.sp ( int )This method is from 'PreWriteCheck' class.
apply.mcJJ.sp ( long )This method is from 'PreWriteCheck' class.
apply.mcVD.sp ( double )This method is from 'PreWriteCheck' class.
apply.mcVF.sp ( float )This method is from 'PreWriteCheck' class.
apply.mcVI.sp ( int )This method is from 'PreWriteCheck' class.
apply.mcVJ.sp ( long )This method is from 'PreWriteCheck' class.
apply.mcZD.sp ( double )This method is from 'PreWriteCheck' class.
apply.mcZF.sp ( float )This method is from 'PreWriteCheck' class.
apply.mcZI.sp ( int )This method is from 'PreWriteCheck' class.
apply.mcZJ.sp ( long )This method is from 'PreWriteCheck' class.
canEqual ( java.lang.Object )This method is from 'PreWriteCheck' class.
catalog ( )This method is from 'PreWriteCheck' class.
compose ( scala.Function1<A,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> )This method is from 'PreWriteCheck' class.
compose.mcDD.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcDF.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcDI.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcDJ.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcFD.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcFF.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcFI.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcFJ.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcID.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcIF.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcII.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcIJ.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcJD.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcJF.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcJI.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcJJ.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcVD.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcVF.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcVI.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcVJ.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcZD.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcZF.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcZI.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
compose.mcZJ.sp ( scala.Function1<A,java.lang.Object> )This method is from 'PreWriteCheck' class.
copy ( org.apache.spark.sql.catalyst.analysis.Catalog )This method is from 'PreWriteCheck' class.
equals ( java.lang.Object )This method is from 'PreWriteCheck' class.
failAnalysis ( java.lang.String )This method is from 'PreWriteCheck' class.
hashCode ( )This method is from 'PreWriteCheck' class.
PreWriteCheck ( org.apache.spark.sql.catalyst.analysis.Catalog )This constructor is from 'PreWriteCheck' class.
productArity ( )This method is from 'PreWriteCheck' class.
productElement ( int )This method is from 'PreWriteCheck' class.
productIterator ( )This method is from 'PreWriteCheck' class.
productPrefix ( )This method is from 'PreWriteCheck' class.
toString ( )This method is from 'PreWriteCheck' class.
[+] PrunedFilteredScan (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
buildScan ( java.lang.String[ ], Filter[ ] )This abstract method is from 'PrunedFilteredScan' interface.
[+] RefreshTable (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (14)
canEqual ( java.lang.Object )This method is from 'RefreshTable' class.
copy ( java.lang.String, java.lang.String )This method is from 'RefreshTable' class.
curried ( )This method is from 'RefreshTable' class.
databaseName ( )This method is from 'RefreshTable' class.
equals ( java.lang.Object )This method is from 'RefreshTable' class.
hashCode ( )This method is from 'RefreshTable' class.
productArity ( )This method is from 'RefreshTable' class.
productElement ( int )This method is from 'RefreshTable' class.
productIterator ( )This method is from 'RefreshTable' class.
productPrefix ( )This method is from 'RefreshTable' class.
RefreshTable ( java.lang.String, java.lang.String )This constructor is from 'RefreshTable' class.
run ( org.apache.spark.sql.SQLContext )This method is from 'RefreshTable' class.
tableName ( )This method is from 'RefreshTable' class.
tupled ( )This method is from 'RefreshTable' class.
[+] RelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This abstract method is from 'RelationProvider' interface.
[+] ResolvedDataSource (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (15)
apply ( org.apache.spark.sql.SQLContext, java.lang.String, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.DataFrame )This method is from 'ResolvedDataSource' class.
apply ( org.apache.spark.sql.SQLContext, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'ResolvedDataSource' class.
canEqual ( java.lang.Object )This method is from 'ResolvedDataSource' class.
copy ( java.lang.Class<?>, BaseRelation )This method is from 'ResolvedDataSource' class.
equals ( java.lang.Object )This method is from 'ResolvedDataSource' class.
hashCode ( )This method is from 'ResolvedDataSource' class.
lookupDataSource ( java.lang.String )This method is from 'ResolvedDataSource' class.
productArity ( )This method is from 'ResolvedDataSource' class.
productElement ( int )This method is from 'ResolvedDataSource' class.
productIterator ( )This method is from 'ResolvedDataSource' class.
productPrefix ( )This method is from 'ResolvedDataSource' class.
provider ( )This method is from 'ResolvedDataSource' class.
relation ( )This method is from 'ResolvedDataSource' class.
ResolvedDataSource ( java.lang.Class<?>, BaseRelation )This constructor is from 'ResolvedDataSource' class.
toString ( )This method is from 'ResolvedDataSource' class.
[+] SchemaRelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.types.StructType )This abstract method is from 'SchemaRelationProvider' interface.
[+] TableScan (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
buildScan ( )This abstract method is from 'TableScan' interface.
package org.apache.spark.sql.test
[+] ExamplePoint (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (3)
ExamplePoint ( double, double )This constructor is from 'ExamplePoint' class.
x ( )This method is from 'ExamplePoint' class.
y ( )This method is from 'ExamplePoint' class.
to the top
Problems with Data Types, Medium Severity (38)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql.columnar
[+] BinaryColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from BasicColumnAccessor<org.apache.spark.sql.types.BinaryType.,byte[]> to BasicColumnAccessor<org.apache.spark.sql.catalyst.types.BinaryType.,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BinaryColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'BinaryColumnAccessor' class.
[+] BinaryColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from ComplexColumnBuilder<org.apache.spark.sql.types.BinaryType.,byte[]> to ComplexColumnBuilder<org.apache.spark.sql.catalyst.types.BinaryType.,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BinaryColumnBuilder ( )This constructor is from 'BinaryColumnBuilder' class.
[+] BooleanColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.BooleanType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.BooleanType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BooleanColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'BooleanColumnAccessor' class.
[+] BooleanColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.BooleanType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.BooleanType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BooleanColumnBuilder ( )This constructor is from 'BooleanColumnBuilder' class.
[+] ByteColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.ByteType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.ByteType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ByteColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'ByteColumnAccessor' class.
[+] ByteColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.ByteType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.ByteType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ByteColumnBuilder ( )This constructor is from 'ByteColumnBuilder' class.
[+] DoubleColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.DoubleType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.DoubleType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
DoubleColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'DoubleColumnAccessor' class.
[+] DoubleColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.DoubleType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.DoubleType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
DoubleColumnBuilder ( )This constructor is from 'DoubleColumnBuilder' class.
[+] FloatColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.FloatType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.FloatType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
FloatColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'FloatColumnAccessor' class.
[+] FloatColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.FloatType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.FloatType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
FloatColumnBuilder ( )This constructor is from 'FloatColumnBuilder' class.
[+] GenericColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from BasicColumnAccessor<org.apache.spark.sql.types.DataType,byte[]> to BasicColumnAccessor<org.apache.spark.sql.catalyst.types.DataType,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
GenericColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'GenericColumnAccessor' class.
[+] GenericColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from ComplexColumnBuilder<org.apache.spark.sql.types.DataType,byte[]> to ComplexColumnBuilder<org.apache.spark.sql.catalyst.types.DataType,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
GenericColumnBuilder ( )This constructor is from 'GenericColumnBuilder' class.
[+] IntColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.IntegerType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.IntegerType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
IntColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'IntColumnAccessor' class.
[+] IntColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.IntegerType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.IntegerType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
IntColumnBuilder ( )This constructor is from 'IntColumnBuilder' class.
[+] LongColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.LongType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.LongType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LongColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'LongColumnAccessor' class.
[+] LongColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.LongType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.LongType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LongColumnBuilder ( )This constructor is from 'LongColumnBuilder' class.
[+] ShortColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.ShortType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.ShortType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ShortColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'ShortColumnAccessor' class.
[+] ShortColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.ShortType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.ShortType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ShortColumnBuilder ( )This constructor is from 'ShortColumnBuilder' class.
[+] StringColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.StringType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.StringType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
StringColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'StringColumnAccessor' class.
[+] StringColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.StringType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.StringType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
StringColumnBuilder ( )This constructor is from 'StringColumnBuilder' class.
[+] TimestampColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.TimestampType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.TimestampType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
TimestampColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'TimestampColumnAccessor' class.
[+] TimestampColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.TimestampType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.TimestampType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
TimestampColumnBuilder ( )This constructor is from 'TimestampColumnBuilder' class.
package org.apache.spark.sql.execution
[+] DescribeCommand (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.plans.logical.Command to SparkPlan. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (9)
canEqual ( java.lang.Object )This method is from 'DescribeCommand' class.
child ( )This method is from 'DescribeCommand' class.
equals ( java.lang.Object )This method is from 'DescribeCommand' class.
hashCode ( )This method is from 'DescribeCommand' class.
output ( )This method is from 'DescribeCommand' class.
productArity ( )This method is from 'DescribeCommand' class.
productElement ( int )This method is from 'DescribeCommand' class.
productIterator ( )This method is from 'DescribeCommand' class.
productPrefix ( )This method is from 'DescribeCommand' class.
[+] ExplainCommand (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.plans.logical.Command to SparkPlan. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (10)
canEqual ( java.lang.Object )This method is from 'ExplainCommand' class.
equals ( java.lang.Object )This method is from 'ExplainCommand' class.
extended ( )This method is from 'ExplainCommand' class.
hashCode ( )This method is from 'ExplainCommand' class.
logicalPlan ( )This method is from 'ExplainCommand' class.
output ( )This method is from 'ExplainCommand' class.
productArity ( )This method is from 'ExplainCommand' class.
productElement ( int )This method is from 'ExplainCommand' class.
productIterator ( )This method is from 'ExplainCommand' class.
productPrefix ( )This method is from 'ExplainCommand' class.
[+] SetCommand (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.plans.logical.Command to SparkPlan. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (8)
canEqual ( java.lang.Object )This method is from 'SetCommand' class.
equals ( java.lang.Object )This method is from 'SetCommand' class.
hashCode ( )This method is from 'SetCommand' class.
output ( )This method is from 'SetCommand' class.
productArity ( )This method is from 'SetCommand' class.
productElement ( int )This method is from 'SetCommand' class.
productIterator ( )This method is from 'SetCommand' class.
productPrefix ( )This method is from 'SetCommand' class.
[+] SparkStrategies.BasicOperators. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BasicOperators ( )Return value of this method has type 'SparkStrategies.BasicOperators.'.
[+] SparkStrategies.BroadcastNestedLoopJoin. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BroadcastNestedLoopJoin ( )Return value of this method has type 'SparkStrategies.BroadcastNestedLoopJoin.'.
[+] SparkStrategies.CartesianProduct. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
CartesianProduct ( )Return value of this method has type 'SparkStrategies.CartesianProduct.'.
[+] SparkStrategies.HashAggregation. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
HashAggregation ( )Return value of this method has type 'SparkStrategies.HashAggregation.'.
[+] SparkStrategies.HashJoin. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
HashJoin ( )Return value of this method has type 'SparkStrategies.HashJoin.'.
[+] SparkStrategies.InMemoryScans. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
InMemoryScans ( )Return value of this method has type 'SparkStrategies.InMemoryScans.'.
[+] SparkStrategies.LeftSemiJoin. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LeftSemiJoin ( )Return value of this method has type 'SparkStrategies.LeftSemiJoin.'.
[+] SparkStrategies.ParquetOperations. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ParquetOperations ( )Return value of this method has type 'SparkStrategies.ParquetOperations.'.
[+] SparkStrategies.TakeOrdered. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
TakeOrdered ( )Return value of this method has type 'SparkStrategies.TakeOrdered.'.
package org.apache.spark.sql.parquet
[+] AppendingParquetOutputFormat (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.hadoop.ParquetOutputFormat<org.apache.spark.sql.Row> to parquet.hadoop.ParquetOutputFormat<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
AppendingParquetOutputFormat ( int )This constructor is from 'AppendingParquetOutputFormat' class.
[+] RowReadSupport (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.hadoop.api.ReadSupport<org.apache.spark.sql.Row> to parquet.hadoop.api.ReadSupport<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
RowReadSupport ( )This constructor is from 'RowReadSupport' class.
[+] RowRecordMaterializer (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.io.api.RecordMaterializer<org.apache.spark.sql.Row> to parquet.io.api.RecordMaterializer<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
RowRecordMaterializer ( parquet.schema.MessageType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> )This constructor is from 'RowRecordMaterializer' class.
[+] RowWriteSupport (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.hadoop.api.WriteSupport<org.apache.spark.sql.Row> to parquet.hadoop.api.WriteSupport<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (24)
getSchema ( org.apache.hadoop.conf.Configuration )This method is from 'RowWriteSupport' class.
init ( org.apache.hadoop.conf.Configuration )This method is from 'RowWriteSupport' class.
isTraceEnabled ( )This method is from 'RowWriteSupport' class.
log ( )This method is from 'RowWriteSupport' class.
logDebug ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logDebug ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logError ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logError ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logInfo ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logInfo ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logName ( )This method is from 'RowWriteSupport' class.
logTrace ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logTrace ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logWarning ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logWarning ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
org.apache.spark.Logging..log_ ( )This method is from 'RowWriteSupport' class.
org.apache.spark.Logging..log__.eq ( org.slf4j.Logger )This method is from 'RowWriteSupport' class.
prepareForWrite ( parquet.io.api.RecordConsumer )This method is from 'RowWriteSupport' class.
RowWriteSupport ( )This constructor is from 'RowWriteSupport' class.
setSchema ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.hadoop.conf.Configuration )This method is from 'RowWriteSupport' class.
SPARK_ROW_SCHEMA ( )This method is from 'RowWriteSupport' class.
write ( java.lang.Object )This method is from 'RowWriteSupport' class.
writer ( )This method is from 'RowWriteSupport' class.
writer_.eq ( parquet.io.api.RecordConsumer )This method is from 'RowWriteSupport' class.
to the top
Problems with Data Types, Low Severity (17)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql.columnar
[+] BooleanColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.BooleanType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BooleanColumnStats ( )This constructor is from 'BooleanColumnStats' class.
[+] ByteColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.ByteType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ByteColumnStats ( )This constructor is from 'ByteColumnStats' class.
[+] DoubleColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.DoubleType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
DoubleColumnStats ( )This constructor is from 'DoubleColumnStats' class.
[+] FloatColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.FloatType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
FloatColumnStats ( )This constructor is from 'FloatColumnStats' class.
[+] IntColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.IntegerType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
IntColumnStats ( )This constructor is from 'IntColumnStats' class.
[+] LongColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.LongType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LongColumnStats ( )This constructor is from 'LongColumnStats' class.
[+] ShortColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.ShortType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ShortColumnStats ( )This constructor is from 'ShortColumnStats' class.
[+] StringColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.StringType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
StringColumnStats ( )This constructor is from 'StringColumnStats' class.
[+] TimestampColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.TimestampType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
TimestampColumnStats ( )This constructor is from 'TimestampColumnStats' class.
package org.apache.spark.sql.execution
[+] DescribeCommand (1)
| Change | Effect |
---|
1 | Method execute ( ) has been overridden by execute ( ) | Method execute ( ) will be called instead of execute ( ) in a client program. |
[+] affected methods (1)
execute ( )Method 'execute ( )' will be called instead of this method in a client program.
[+] ExplainCommand (1)
| Change | Effect |
---|
1 | Method execute ( ) has been overridden by execute ( ) | Method execute ( ) will be called instead of execute ( ) in a client program. |
[+] affected methods (1)
execute ( )Method 'execute ( )' will be called instead of this method in a client program.
[+] Limit (2)
| Change | Effect |
---|
1 | Method executeCollect ( ) has been moved up type hierarchy to executeCollect ( ) | Method executeCollect ( ) will be called instead of executeCollect ( ) in a client program. |
2 | Method outputPartitioning ( ) has been moved up type hierarchy to outputPartitioning ( ) | Method outputPartitioning ( ) will be called instead of outputPartitioning ( ) in a client program. |
[+] affected methods (2)
executeCollect ( )Method 'executeCollect ( )' will be called instead of this method in a client program.
outputPartitioning ( )Method 'outputPartitioning ( )' will be called instead of this method in a client program.
[+] SetCommand (1)
| Change | Effect |
---|
1 | Method execute ( ) has been overridden by execute ( ) | Method execute ( ) will be called instead of execute ( ) in a client program. |
[+] affected methods (1)
execute ( )Method 'execute ( )' will be called instead of this method in a client program.
[+] TakeOrdered (2)
| Change | Effect |
---|
1 | Method executeCollect ( ) has been moved up type hierarchy to executeCollect ( ) | Method executeCollect ( ) will be called instead of executeCollect ( ) in a client program. |
2 | Method outputPartitioning ( ) has been moved up type hierarchy to outputPartitioning ( ) | Method outputPartitioning ( ) will be called instead of outputPartitioning ( ) in a client program. |
[+] affected methods (2)
executeCollect ( )Method 'executeCollect ( )' will be called instead of this method in a client program.
outputPartitioning ( )Method 'outputPartitioning ( )' will be called instead of this method in a client program.
package org.apache.spark.sql.parquet
[+] CatalystGroupConverter (1)
| Change | Effect |
---|
1 | Method getCurrentRecord ( ) has been moved up type hierarchy to getCurrentRecord ( ) | Method getCurrentRecord ( ) will be called instead of getCurrentRecord ( ) in a client program. |
[+] affected methods (1)
getCurrentRecord ( )Method 'getCurrentRecord ( )' will be called instead of this method in a client program.
to the top
Other Changes in Data Types (6)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql.columnar
[+] ColumnBuilder (1)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.catalyst.expressions.Row, int ) has been added to this interface. | No effect. |
[+] affected methods (3)
build ( )This abstract method is from 'ColumnBuilder' interface.
columnStats ( )This abstract method is from 'ColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'ColumnBuilder' interface.
[+] NullableColumnBuilder (2)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.catalyst.expressions.Row, int ) has been added to this interface. | No effect. |
2 | Abstract method NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row, int ) has been added to this interface. | No effect. |
[+] affected methods (11)
build ( )This abstract method is from 'NullableColumnBuilder' interface.
buildNonNulls ( )This abstract method is from 'NullableColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
nullCount ( )This abstract method is from 'NullableColumnBuilder' interface.
nullCount_.eq ( int )This abstract method is from 'NullableColumnBuilder' interface.
nulls ( )This abstract method is from 'NullableColumnBuilder' interface.
nulls_.eq ( java.nio.ByteBuffer )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos_.eq ( int )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.build ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
package org.apache.spark.sql.columnar.compression
[+] CompressionScheme (1)
| Change | Effect |
---|
1 | Abstract method encoder ( ) has been added to this interface. | No effect. |
[+] affected methods (3)
decoder ( java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> )This abstract method is from 'CompressionScheme' interface.
supports ( org.apache.spark.sql.columnar.ColumnType<?,?> )This abstract method is from 'CompressionScheme' interface.
typeId ( )This abstract method is from 'CompressionScheme' interface.
[+] Encoder<T> (2)
| Change | Effect |
---|
1 | Abstract method compress ( java.nio.ByteBuffer, java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> ) has been added to this interface. | No effect. |
2 | Abstract method gatherCompressibilityStats ( java.lang.Object, org.apache.spark.sql.columnar.NativeColumnType<T> ) has been added to this interface. | No effect. |
[+] affected methods (3)
compressedSize ( )This abstract method is from 'Encoder<T>' interface.
compressionRatio ( )This abstract method is from 'Encoder<T>' interface.
uncompressedSize ( )This abstract method is from 'Encoder<T>' interface.
to the top
Java ARchives (1)
spark-sql_2.10-1.3.0.jar
to the top
Generated on Wed Oct 28 11:07:55 2015 for succinct-0.1.2 by Java API Compliance Checker 1.4.1
A tool for checking backward compatibility of a Java library API