Binary compatibility report for the succinct-0.1.2 library between 1.3.0 and 1.0.0 versions (relating to the portability of client application succinct-0.1.2.jar)
Test Info
Library Name | succinct-0.1.2 |
Version #1 | 1.3.0 |
Version #2 | 1.0.0 |
Java Version | 1.7.0_75 |
Test Results
Total Java ARchives | 1 |
---|
Total Methods / Classes | 2349 / 463 |
---|
Verdict | Incompatible (83.9%) |
Problem Summary
| Severity | Count |
---|
Added Methods | - | 115 |
---|
Removed Methods | High | 1864 |
---|
Problems with Data Types | High | 151 |
---|
Medium | 32 |
Low | 12 |
Problems with Methods | High | 4 |
---|
Medium | 0 |
Low | 0 |
Other Changes in Data Types | - | 10 |
Added Methods (115)
spark-sql_2.10-1.0.0.jar, AddExchange.class
package org.apache.spark.sql.execution
AddExchange.logger ( ) [static] : com.typesafe.scalalogging.slf4j.Logger
[mangled: org/apache/spark/sql/execution/AddExchange.logger:()Lcom/typesafe/scalalogging/slf4j/Logger;]
AddExchange.ruleName ( ) [static] : String
[mangled: org/apache/spark/sql/execution/AddExchange.ruleName:()Ljava/lang/String;]
spark-sql_2.10-1.0.0.jar, Aggregate.class
package org.apache.spark.sql.execution
Aggregate.Aggregate ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, SparkPlan child, org.apache.spark.SparkContext sc )
[mangled: org/apache/spark/sql/execution/Aggregate."<init>":(ZLscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/SparkContext;)V]
Aggregate.copy ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, SparkPlan child, org.apache.spark.SparkContext sc ) : Aggregate
[mangled: org/apache/spark/sql/execution/Aggregate.copy:(ZLscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/execution/Aggregate;]
Aggregate.Aggregate..computedAggregates ( ) : Aggregate.ComputedAggregate[ ]
[mangled: org/apache/spark/sql/execution/Aggregate.org.apache.spark.sql.execution.Aggregate..computedAggregates:()[Lorg/apache/spark/sql/execution/Aggregate$ComputedAggregate;]
Aggregate.Aggregate..computedSchema ( ) : org.apache.spark.sql.catalyst.expressions.AttributeReference[ ]
[mangled: org/apache/spark/sql/execution/Aggregate.org.apache.spark.sql.execution.Aggregate..computedSchema:()[Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;]
Aggregate.Aggregate..namedGroups ( ) : scala.collection.Seq<scala.Tuple2<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Attribute>>
[mangled: org/apache/spark/sql/execution/Aggregate.org.apache.spark.sql.execution.Aggregate..namedGroups:()Lscala/collection/Seq;]
Aggregate.Aggregate..resultExpressions ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/Aggregate.org.apache.spark.sql.execution.Aggregate..resultExpressions:()Lscala/collection/Seq;]
Aggregate.Aggregate..resultMap ( ) : scala.collection.immutable.Map<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Aggregate.org.apache.spark.sql.execution.Aggregate..resultMap:()Lscala/collection/immutable/Map;]
Aggregate.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.SparkContext>
[mangled: org/apache/spark/sql/execution/Aggregate.otherCopyArgs:()Lscala/collection/immutable/List;]
Aggregate.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/Aggregate.otherCopyArgs:()Lscala/collection/Seq;]
spark-sql_2.10-1.0.0.jar, CatalystGroupConverter.class
package org.apache.spark.sql.parquet
CatalystGroupConverter.CatalystGroupConverter ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":(Lscala/collection/Seq;)V]
CatalystGroupConverter.CatalystGroupConverter ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema, org.apache.spark.sql.catalyst.expressions.GenericMutableRow current )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":(Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/expressions/GenericMutableRow;)V]
CatalystGroupConverter.current ( ) : org.apache.spark.sql.catalyst.expressions.GenericMutableRow
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.current:()Lorg/apache/spark/sql/catalyst/expressions/GenericMutableRow;]
CatalystGroupConverter.getCurrentRecord ( ) : org.apache.spark.sql.catalyst.expressions.GenericMutableRow
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.getCurrentRecord:()Lorg/apache/spark/sql/catalyst/expressions/GenericMutableRow;]
spark-sql_2.10-1.0.0.jar, CatalystPrimitiveConverter.class
package org.apache.spark.sql.parquet
CatalystPrimitiveConverter.CatalystPrimitiveConverter ( CatalystGroupConverter parent, int fieldIndex )
[mangled: org/apache/spark/sql/parquet/CatalystPrimitiveConverter."<init>":(Lorg/apache/spark/sql/parquet/CatalystGroupConverter;I)V]
spark-sql_2.10-1.0.0.jar, ColumnBuilder.class
package org.apache.spark.sql.columnar
ColumnBuilder.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnBuilder.appendFrom:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
spark-sql_2.10-1.0.0.jar, CompressionScheme.class
package org.apache.spark.sql.columnar.compression
CompressionScheme.encoder ( ) [abstract] : Encoder<T>
[mangled: org/apache/spark/sql/columnar/compression/CompressionScheme.encoder:()Lorg/apache/spark/sql/columnar/compression/Encoder;]
spark-sql_2.10-1.0.0.jar, Encoder<T>.class
package org.apache.spark.sql.columnar.compression
Encoder<T>.compress ( java.nio.ByteBuffer p1, java.nio.ByteBuffer p2, org.apache.spark.sql.columnar.NativeColumnType<T> p3 ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.compress:(Ljava/nio/ByteBuffer;Ljava/nio/ByteBuffer;Lorg/apache/spark/sql/columnar/NativeColumnType;)Ljava/nio/ByteBuffer;]
Encoder<T>.gatherCompressibilityStats ( Object p1, org.apache.spark.sql.columnar.NativeColumnType<T> p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.gatherCompressibilityStats:(Ljava/lang/Object;Lorg/apache/spark/sql/columnar/NativeColumnType;)V]
spark-sql_2.10-1.0.0.jar, InMemoryColumnarTableScan.class
package org.apache.spark.sql.columnar
InMemoryColumnarTableScan.cachedColumnBuffers ( ) : org.apache.spark.rdd.RDD<java.nio.ByteBuffer[ ]>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.cachedColumnBuffers:()Lorg/apache/spark/rdd/RDD;]
InMemoryColumnarTableScan.child ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
InMemoryColumnarTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, org.apache.spark.sql.execution.SparkPlan child, boolean useCompression ) : InMemoryColumnarTableScan
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.copy:(Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Z)Lorg/apache/spark/sql/columnar/InMemoryColumnarTableScan;]
InMemoryColumnarTableScan.InMemoryColumnarTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, org.apache.spark.sql.execution.SparkPlan child, boolean useCompression )
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan."<init>":(Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Z)V]
InMemoryColumnarTableScan.useCompression ( ) : boolean
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.useCompression:()Z]
spark-sql_2.10-1.0.0.jar, InsertIntoParquetTable.class
package org.apache.spark.sql.parquet
InsertIntoParquetTable.copy ( ParquetRelation relation, org.apache.spark.sql.execution.SparkPlan child, boolean overwrite, org.apache.spark.SparkContext sc ) : InsertIntoParquetTable
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.copy:(Lorg/apache/spark/sql/parquet/ParquetRelation;Lorg/apache/spark/sql/execution/SparkPlan;ZLorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/parquet/InsertIntoParquetTable;]
InsertIntoParquetTable.InsertIntoParquetTable ( ParquetRelation relation, org.apache.spark.sql.execution.SparkPlan child, boolean overwrite, org.apache.spark.SparkContext sc )
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable."<init>":(Lorg/apache/spark/sql/parquet/ParquetRelation;Lorg/apache/spark/sql/execution/SparkPlan;ZLorg/apache/spark/SparkContext;)V]
InsertIntoParquetTable.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.SparkContext>
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.otherCopyArgs:()Lscala/collection/immutable/List;]
InsertIntoParquetTable.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.otherCopyArgs:()Lscala/collection/Seq;]
InsertIntoParquetTable.sc ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.sc:()Lorg/apache/spark/SparkContext;]
spark-sql_2.10-1.0.0.jar, IntColumnStats.class
package org.apache.spark.sql.columnar
IntColumnStats.ASCENDING ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.ASCENDING:()I]
IntColumnStats.contains ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.contains:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)Z]
IntColumnStats.DESCENDING ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.DESCENDING:()I]
IntColumnStats.gatherStats ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.gatherStats:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
IntColumnStats.initialBounds ( ) : scala.Tuple2<Object,Object>
[mangled: org/apache/spark/sql/columnar/IntColumnStats.initialBounds:()Lscala/Tuple2;]
IntColumnStats.INITIALIZED ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.INITIALIZED:()I]
IntColumnStats.isAbove ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isAbove:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)Z]
IntColumnStats.isAscending ( ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isAscending:()Z]
IntColumnStats.isBelow ( org.apache.spark.sql.catalyst.expressions.Row row, int ordinal ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isBelow:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)Z]
IntColumnStats.isDescending ( ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isDescending:()Z]
IntColumnStats.isOrdered ( ) : boolean
[mangled: org/apache/spark/sql/columnar/IntColumnStats.isOrdered:()Z]
IntColumnStats.maxDelta ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.maxDelta:()I]
IntColumnStats.UNINITIALIZED ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.UNINITIALIZED:()I]
IntColumnStats.UNORDERED ( ) [static] : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.UNORDERED:()I]
spark-sql_2.10-1.0.0.jar, Limit.class
package org.apache.spark.sql.execution
Limit.copy ( int limit, SparkPlan child, org.apache.spark.SparkContext sc ) : Limit
[mangled: org/apache/spark/sql/execution/Limit.copy:(ILorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/execution/Limit;]
Limit.executeCollect ( ) : org.apache.spark.sql.catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/execution/Limit.executeCollect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
Limit.Limit ( int limit, SparkPlan child, org.apache.spark.SparkContext sc )
[mangled: org/apache/spark/sql/execution/Limit."<init>":(ILorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/SparkContext;)V]
Limit.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.SparkContext>
[mangled: org/apache/spark/sql/execution/Limit.otherCopyArgs:()Lscala/collection/immutable/List;]
Limit.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/Limit.otherCopyArgs:()Lscala/collection/Seq;]
spark-sql_2.10-1.0.0.jar, NativeColumnType<T>.class
package org.apache.spark.sql.columnar
NativeColumnType<T>.dataType ( ) : T
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.dataType:()Lorg/apache/spark/sql/catalyst/types/NativeType;]
NativeColumnType<T>.NativeColumnType ( T dataType, int typeId, int defaultSize ) : public
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.org.apache.spark.sql.columnar.NativeColumnType:(Lorg/apache/spark/sql/catalyst/types/NativeType;II)V]
spark-sql_2.10-1.0.0.jar, NullableColumnBuilder.class
package org.apache.spark.sql.columnar
NullableColumnBuilder.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.appendFrom:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
NullableColumnBuilder.NullableColumnBuilder..nullCount ( ) [abstract] : int
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..nullCount:()I]
NullableColumnBuilder.NullableColumnBuilder..nullCount_.eq ( int p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..nullCount_.eq:(I)V]
NullableColumnBuilder.NullableColumnBuilder..nulls ( ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..nulls:()Ljava/nio/ByteBuffer;]
NullableColumnBuilder.NullableColumnBuilder..nulls_.eq ( java.nio.ByteBuffer p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..nulls_.eq:(Ljava/nio/ByteBuffer;)V]
NullableColumnBuilder.NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..super.appendFrom:(Lorg/apache/spark/sql/catalyst/expressions/Row;I)V]
spark-sql_2.10-1.0.0.jar, ParquetRelation.class
package org.apache.spark.sql.parquet
ParquetRelation.copy ( String path ) : ParquetRelation
[mangled: org/apache/spark/sql/parquet/ParquetRelation.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/parquet/ParquetRelation;]
ParquetRelation.create ( String p1, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan p2, org.apache.hadoop.conf.Configuration p3 ) [static] : ParquetRelation
[mangled: org/apache/spark/sql/parquet/ParquetRelation.create:(Ljava/lang/String;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/spark/sql/parquet/ParquetRelation;]
ParquetRelation.createEmpty ( String p1, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> p2, boolean p3, org.apache.hadoop.conf.Configuration p4 ) [static] : ParquetRelation
[mangled: org/apache/spark/sql/parquet/ParquetRelation.createEmpty:(Ljava/lang/String;Lscala/collection/Seq;ZLorg/apache/hadoop/conf/Configuration;)Lorg/apache/spark/sql/parquet/ParquetRelation;]
ParquetRelation.defaultCompression ( ) [static] : parquet.hadoop.metadata.CompressionCodecName
[mangled: org/apache/spark/sql/parquet/ParquetRelation.defaultCompression:()Lparquet/hadoop/metadata/CompressionCodecName;]
ParquetRelation.ParquetRelation ( String path )
[mangled: org/apache/spark/sql/parquet/ParquetRelation."<init>":(Ljava/lang/String;)V]
spark-sql_2.10-1.0.0.jar, ParquetTableScan.class
package org.apache.spark.sql.parquet
ParquetTableScan.columnPruningPred ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.columnPruningPred:()Lscala/Option;]
ParquetTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, ParquetRelation relation, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> columnPruningPred, org.apache.spark.SparkContext sc ) : ParquetTableScan
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.copy:(Lscala/collection/Seq;Lorg/apache/spark/sql/parquet/ParquetRelation;Lscala/Option;Lorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/parquet/ParquetTableScan;]
ParquetTableScan.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.SparkContext>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.otherCopyArgs:()Lscala/collection/immutable/List;]
ParquetTableScan.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.otherCopyArgs:()Lscala/collection/Seq;]
ParquetTableScan.ParquetTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, ParquetRelation relation, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> columnPruningPred, org.apache.spark.SparkContext sc )
[mangled: org/apache/spark/sql/parquet/ParquetTableScan."<init>":(Lscala/collection/Seq;Lorg/apache/spark/sql/parquet/ParquetRelation;Lscala/Option;Lorg/apache/spark/SparkContext;)V]
ParquetTableScan.sc ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.sc:()Lorg/apache/spark/SparkContext;]
spark-sql_2.10-1.0.0.jar, RowWriteSupport.class
package org.apache.spark.sql.parquet
RowWriteSupport.getSchema ( org.apache.hadoop.conf.Configuration configuration ) : parquet.schema.MessageType
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.getSchema:(Lorg/apache/hadoop/conf/Configuration;)Lparquet/schema/MessageType;]
RowWriteSupport.PARQUET_ROW_SCHEMA ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.PARQUET_ROW_SCHEMA:()Ljava/lang/String;]
RowWriteSupport.setSchema ( parquet.schema.MessageType schema, org.apache.hadoop.conf.Configuration configuration ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.setSchema:(Lparquet/schema/MessageType;Lorg/apache/hadoop/conf/Configuration;)V]
RowWriteSupport.write ( org.apache.spark.sql.catalyst.expressions.Row record ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.write:(Lorg/apache/spark/sql/catalyst/expressions/Row;)V]
spark-sql_2.10-1.0.0.jar, Sort.class
package org.apache.spark.sql.execution
Sort.ordering ( ) : org.apache.spark.sql.catalyst.expressions.RowOrdering
[mangled: org/apache/spark/sql/execution/Sort.ordering:()Lorg/apache/spark/sql/catalyst/expressions/RowOrdering;]
spark-sql_2.10-1.0.0.jar, SparkPlan.class
package org.apache.spark.sql.execution
SparkPlan.buildRow ( scala.collection.Seq<Object> values ) : org.apache.spark.sql.catalyst.expressions.Row
[mangled: org/apache/spark/sql/execution/SparkPlan.buildRow:(Lscala/collection/Seq;)Lorg/apache/spark/sql/catalyst/expressions/Row;]
SparkPlan.executeCollect ( ) : org.apache.spark.sql.catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/execution/SparkPlan.executeCollect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
SparkPlan.logger ( ) : com.typesafe.scalalogging.slf4j.Logger
[mangled: org/apache/spark/sql/execution/SparkPlan.logger:()Lcom/typesafe/scalalogging/slf4j/Logger;]
spark-sql_2.10-1.0.0.jar, SparkStrategies.class
package org.apache.spark.sql.execution
SparkStrategies.convertToCatalyst ( Object a ) : Object
[mangled: org/apache/spark/sql/execution/SparkStrategies.convertToCatalyst:(Ljava/lang/Object;)Ljava/lang/Object;]
SparkStrategies.PartialAggregation ( ) : SparkStrategies.PartialAggregation.
[mangled: org/apache/spark/sql/execution/SparkStrategies.PartialAggregation:()Lorg/apache/spark/sql/execution/SparkStrategies$PartialAggregation$;]
spark-sql_2.10-1.0.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.binaryToLiteral ( byte[ ] a ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.binaryToLiteral:([B)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.booleanToLiteral ( boolean b ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.booleanToLiteral:(Z)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.byteToLiteral ( byte b ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.byteToLiteral:(B)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.createParquetFile ( String path, boolean allowExisting, org.apache.hadoop.conf.Configuration conf, scala.reflect.api.TypeTags.TypeTag<A> p4 ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.createParquetFile:(Ljava/lang/String;ZLorg/apache/hadoop/conf/Configuration;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.createSchemaRDD ( org.apache.spark.rdd.RDD<A> rdd, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.createSchemaRDD:(Lorg/apache/spark/rdd/RDD;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.decimalToLiteral ( scala.math.BigDecimal d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.decimalToLiteral:(Lscala/math/BigDecimal;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.doubleToLiteral ( double d ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.doubleToLiteral:(D)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.DslAttribute ( catalyst.expressions.AttributeReference a ) : catalyst.dsl.package.ExpressionConversions.DslAttribute
[mangled: org/apache/spark/sql/SQLContext.DslAttribute:(Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslAttribute;]
SQLContext.DslExpression ( catalyst.expressions.Expression e ) : catalyst.dsl.package.ExpressionConversions.DslExpression
[mangled: org/apache/spark/sql/SQLContext.DslExpression:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslExpression;]
SQLContext.DslString ( String s ) : catalyst.dsl.package.ExpressionConversions.DslString
[mangled: org/apache/spark/sql/SQLContext.DslString:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslString;]
SQLContext.DslSymbol ( scala.Symbol sym ) : catalyst.dsl.package.ExpressionConversions.DslSymbol
[mangled: org/apache/spark/sql/SQLContext.DslSymbol:(Lscala/Symbol;)Lorg/apache/spark/sql/catalyst/dsl/package$ExpressionConversions$DslSymbol;]
SQLContext.floatToLiteral ( float f ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.floatToLiteral:(F)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.inferSchema ( org.apache.spark.rdd.RDD<scala.collection.immutable.Map<String,Object>> rdd ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.inferSchema:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.intToLiteral ( int i ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.intToLiteral:(I)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.logger ( ) : com.typesafe.scalalogging.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.logger:()Lcom/typesafe/scalalogging/slf4j/Logger;]
SQLContext.logicalPlanToSparkQuery ( catalyst.plans.logical.LogicalPlan plan ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.logicalPlanToSparkQuery:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.longToLiteral ( long l ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.longToLiteral:(J)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.optimizer ( ) : catalyst.optimizer.Optimizer.
[mangled: org/apache/spark/sql/SQLContext.optimizer:()Lorg/apache/spark/sql/catalyst/optimizer/Optimizer$;]
SQLContext.parquetFile ( String path ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.parquetFile:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.parser ( ) : catalyst.SqlParser
[mangled: org/apache/spark/sql/SQLContext.parser:()Lorg/apache/spark/sql/catalyst/SqlParser;]
SQLContext.registerRDDAsTable ( SchemaRDD rdd, String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.registerRDDAsTable:(Lorg/apache/spark/sql/SchemaRDD;Ljava/lang/String;)V]
SQLContext.shortToLiteral ( short s ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.shortToLiteral:(S)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.sql ( String sqlText ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.stringToLiteral ( String s ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.stringToLiteral:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
SQLContext.symbolToUnresolvedAttribute ( scala.Symbol s ) : catalyst.analysis.UnresolvedAttribute
[mangled: org/apache/spark/sql/SQLContext.symbolToUnresolvedAttribute:(Lscala/Symbol;)Lorg/apache/spark/sql/catalyst/analysis/UnresolvedAttribute;]
SQLContext.table ( String tableName ) : SchemaRDD
[mangled: org/apache/spark/sql/SQLContext.table:(Ljava/lang/String;)Lorg/apache/spark/sql/SchemaRDD;]
SQLContext.timestampToLiteral ( java.sql.Timestamp t ) : catalyst.expressions.Literal
[mangled: org/apache/spark/sql/SQLContext.timestampToLiteral:(Ljava/sql/Timestamp;)Lorg/apache/spark/sql/catalyst/expressions/Literal;]
spark-sql_2.10-1.0.0.jar, TakeOrdered.class
package org.apache.spark.sql.execution
TakeOrdered.copy ( int limit, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, SparkPlan child, org.apache.spark.SparkContext sc ) : TakeOrdered
[mangled: org/apache/spark/sql/execution/TakeOrdered.copy:(ILscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/execution/TakeOrdered;]
TakeOrdered.executeCollect ( ) : org.apache.spark.sql.catalyst.expressions.Row[ ]
[mangled: org/apache/spark/sql/execution/TakeOrdered.executeCollect:()[Lorg/apache/spark/sql/catalyst/expressions/Row;]
TakeOrdered.ordering ( ) : org.apache.spark.sql.catalyst.expressions.RowOrdering
[mangled: org/apache/spark/sql/execution/TakeOrdered.ordering:()Lorg/apache/spark/sql/catalyst/expressions/RowOrdering;]
TakeOrdered.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.SparkContext>
[mangled: org/apache/spark/sql/execution/TakeOrdered.otherCopyArgs:()Lscala/collection/immutable/List;]
TakeOrdered.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/TakeOrdered.otherCopyArgs:()Lscala/collection/Seq;]
TakeOrdered.TakeOrdered ( int limit, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, SparkPlan child, org.apache.spark.SparkContext sc )
[mangled: org/apache/spark/sql/execution/TakeOrdered."<init>":(ILscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/SparkContext;)V]
spark-sql_2.10-1.0.0.jar, Union.class
package org.apache.spark.sql.execution
Union.copy ( scala.collection.Seq<SparkPlan> children, org.apache.spark.SparkContext sc ) : Union
[mangled: org/apache/spark/sql/execution/Union.copy:(Lscala/collection/Seq;Lorg/apache/spark/SparkContext;)Lorg/apache/spark/sql/execution/Union;]
Union.otherCopyArgs ( ) : scala.collection.immutable.List<org.apache.spark.SparkContext>
[mangled: org/apache/spark/sql/execution/Union.otherCopyArgs:()Lscala/collection/immutable/List;]
Union.otherCopyArgs ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/Union.otherCopyArgs:()Lscala/collection/Seq;]
Union.Union ( scala.collection.Seq<SparkPlan> children, org.apache.spark.SparkContext sc )
[mangled: org/apache/spark/sql/execution/Union."<init>":(Lscala/collection/Seq;Lorg/apache/spark/SparkContext;)V]
to the top
Removed Methods (1864)
spark-sql_2.10-1.3.0.jar, AddExchange.class
package org.apache.spark.sql.execution
AddExchange.AddExchange ( org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/execution/AddExchange."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
AddExchange.andThen ( scala.Function1<AddExchange,A> p1 ) [static] : scala.Function1<org.apache.spark.sql.SQLContext,A>
[mangled: org/apache/spark/sql/execution/AddExchange.andThen:(Lscala/Function1;)Lscala/Function1;]
AddExchange.apply ( org.apache.spark.sql.catalyst.trees.TreeNode plan ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/AddExchange.apply:(Lorg/apache/spark/sql/catalyst/trees/TreeNode;)Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
AddExchange.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/AddExchange.canEqual:(Ljava/lang/Object;)Z]
AddExchange.compose ( scala.Function1<A,org.apache.spark.sql.SQLContext> p1 ) [static] : scala.Function1<A,AddExchange>
[mangled: org/apache/spark/sql/execution/AddExchange.compose:(Lscala/Function1;)Lscala/Function1;]
AddExchange.copy ( org.apache.spark.sql.SQLContext sqlContext ) : AddExchange
[mangled: org/apache/spark/sql/execution/AddExchange.copy:(Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/AddExchange;]
AddExchange.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/AddExchange.equals:(Ljava/lang/Object;)Z]
AddExchange.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/AddExchange.hashCode:()I]
AddExchange.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/AddExchange.productArity:()I]
AddExchange.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/AddExchange.productElement:(I)Ljava/lang/Object;]
AddExchange.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/AddExchange.productIterator:()Lscala/collection/Iterator;]
AddExchange.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/AddExchange.productPrefix:()Ljava/lang/String;]
AddExchange.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/execution/AddExchange.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
AddExchange.toString ( ) : String
[mangled: org/apache/spark/sql/execution/AddExchange.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, Aggregate.class
package org.apache.spark.sql.execution
Aggregate.Aggregate ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, SparkPlan child )
[mangled: org/apache/spark/sql/execution/Aggregate."<init>":(ZLscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
Aggregate.copy ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, SparkPlan child ) : Aggregate
[mangled: org/apache/spark/sql/execution/Aggregate.copy:(ZLscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Aggregate;]
Aggregate.curried ( ) [static] : scala.Function1<Object,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>,scala.Function1<SparkPlan,Aggregate>>>>
[mangled: org/apache/spark/sql/execution/Aggregate.curried:()Lscala/Function1;]
Aggregate.tupled ( ) [static] : scala.Function1<scala.Tuple4<Object,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>,SparkPlan>,Aggregate>
[mangled: org/apache/spark/sql/execution/Aggregate.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, AggregateEvaluation.class
package org.apache.spark.sql.execution
AggregateEvaluation.AggregateEvaluation ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> initialValues, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> update, org.apache.spark.sql.catalyst.expressions.Expression result )
[mangled: org/apache/spark/sql/execution/AggregateEvaluation."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/expressions/Expression;)V]
AggregateEvaluation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.canEqual:(Ljava/lang/Object;)Z]
AggregateEvaluation.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> initialValues, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> update, org.apache.spark.sql.catalyst.expressions.Expression result ) : AggregateEvaluation
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/execution/AggregateEvaluation;]
AggregateEvaluation.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.catalyst.expressions.Expression,AggregateEvaluation>>>>
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.curried:()Lscala/Function1;]
AggregateEvaluation.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.equals:(Ljava/lang/Object;)Z]
AggregateEvaluation.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.hashCode:()I]
AggregateEvaluation.initialValues ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.initialValues:()Lscala/collection/Seq;]
AggregateEvaluation.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.productArity:()I]
AggregateEvaluation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.productElement:(I)Ljava/lang/Object;]
AggregateEvaluation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.productIterator:()Lscala/collection/Iterator;]
AggregateEvaluation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.productPrefix:()Ljava/lang/String;]
AggregateEvaluation.result ( ) : org.apache.spark.sql.catalyst.expressions.Expression
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.result:()Lorg/apache/spark/sql/catalyst/expressions/Expression;]
AggregateEvaluation.schema ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.schema:()Lscala/collection/Seq;]
AggregateEvaluation.toString ( ) : String
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.toString:()Ljava/lang/String;]
AggregateEvaluation.tupled ( ) [static] : scala.Function1<scala.Tuple4<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.catalyst.expressions.Expression>,AggregateEvaluation>
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.tupled:()Lscala/Function1;]
AggregateEvaluation.update ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/AggregateEvaluation.update:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, And.class
package org.apache.spark.sql.sources
And.And ( Filter left, Filter right )
[mangled: org/apache/spark/sql/sources/And."<init>":(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)V]
And.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/And.canEqual:(Ljava/lang/Object;)Z]
And.copy ( Filter left, Filter right ) : And
[mangled: org/apache/spark/sql/sources/And.copy:(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/sql/sources/And;]
And.curried ( ) [static] : scala.Function1<Filter,scala.Function1<Filter,And>>
[mangled: org/apache/spark/sql/sources/And.curried:()Lscala/Function1;]
And.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/And.equals:(Ljava/lang/Object;)Z]
And.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/And.hashCode:()I]
And.left ( ) : Filter
[mangled: org/apache/spark/sql/sources/And.left:()Lorg/apache/spark/sql/sources/Filter;]
And.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/And.productArity:()I]
And.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/And.productElement:(I)Ljava/lang/Object;]
And.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/And.productIterator:()Lscala/collection/Iterator;]
And.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/And.productPrefix:()Ljava/lang/String;]
And.right ( ) : Filter
[mangled: org/apache/spark/sql/sources/And.right:()Lorg/apache/spark/sql/sources/Filter;]
And.toString ( ) : String
[mangled: org/apache/spark/sql/sources/And.toString:()Ljava/lang/String;]
And.tupled ( ) [static] : scala.Function1<scala.Tuple2<Filter,Filter>,And>
[mangled: org/apache/spark/sql/sources/And.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, BaseRelation.class
package org.apache.spark.sql.sources
BaseRelation.BaseRelation ( )
[mangled: org/apache/spark/sql/sources/BaseRelation."<init>":()V]
BaseRelation.schema ( ) [abstract] : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/sources/BaseRelation.schema:()Lorg/apache/spark/sql/types/StructType;]
BaseRelation.sizeInBytes ( ) : long
[mangled: org/apache/spark/sql/sources/BaseRelation.sizeInBytes:()J]
BaseRelation.sqlContext ( ) [abstract] : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/sources/BaseRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
spark-sql_2.10-1.3.0.jar, BatchPythonEvaluation.class
package org.apache.spark.sql.execution
BatchPythonEvaluation.BatchPythonEvaluation ( PythonUDF udf, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child )
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation."<init>":(Lorg/apache/spark/sql/execution/PythonUDF;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
BatchPythonEvaluation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.canEqual:(Ljava/lang/Object;)Z]
BatchPythonEvaluation.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
BatchPythonEvaluation.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.children:()Lscala/collection/immutable/List;]
BatchPythonEvaluation.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.children:()Lscala/collection/Seq;]
BatchPythonEvaluation.copy ( PythonUDF udf, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child ) : BatchPythonEvaluation
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.copy:(Lorg/apache/spark/sql/execution/PythonUDF;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/BatchPythonEvaluation;]
BatchPythonEvaluation.curried ( ) [static] : scala.Function1<PythonUDF,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<SparkPlan,BatchPythonEvaluation>>>
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.curried:()Lscala/Function1;]
BatchPythonEvaluation.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.equals:(Ljava/lang/Object;)Z]
BatchPythonEvaluation.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.execute:()Lorg/apache/spark/rdd/RDD;]
BatchPythonEvaluation.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.hashCode:()I]
BatchPythonEvaluation.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.output:()Lscala/collection/Seq;]
BatchPythonEvaluation.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.productArity:()I]
BatchPythonEvaluation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.productElement:(I)Ljava/lang/Object;]
BatchPythonEvaluation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.productIterator:()Lscala/collection/Iterator;]
BatchPythonEvaluation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.productPrefix:()Ljava/lang/String;]
BatchPythonEvaluation.tupled ( ) [static] : scala.Function1<scala.Tuple3<PythonUDF,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,SparkPlan>,BatchPythonEvaluation>
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.tupled:()Lscala/Function1;]
BatchPythonEvaluation.udf ( ) : PythonUDF
[mangled: org/apache/spark/sql/execution/BatchPythonEvaluation.udf:()Lorg/apache/spark/sql/execution/PythonUDF;]
spark-sql_2.10-1.3.0.jar, BinaryColumnStats.class
package org.apache.spark.sql.columnar
BinaryColumnStats.BinaryColumnStats ( )
[mangled: org/apache/spark/sql/columnar/BinaryColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, BroadcastHashJoin.class
package org.apache.spark.sql.execution.joins
BroadcastHashJoin.BroadcastHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
BroadcastHashJoin.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
BroadcastHashJoin.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
BroadcastHashJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.canEqual:(Ljava/lang/Object;)Z]
BroadcastHashJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.children:()Lscala/collection/Seq;]
BroadcastHashJoin.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : BroadcastHashJoin
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/BroadcastHashJoin;]
BroadcastHashJoin.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<package.BuildSide,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,BroadcastHashJoin>>>>>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.curried:()Lscala/Function1;]
BroadcastHashJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.equals:(Ljava/lang/Object;)Z]
BroadcastHashJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.execute:()Lorg/apache/spark/rdd/RDD;]
BroadcastHashJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.hashCode:()I]
BroadcastHashJoin.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
BroadcastHashJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastHashJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.leftKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.output:()Lscala/collection/Seq;]
BroadcastHashJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
BroadcastHashJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productArity:()I]
BroadcastHashJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productElement:(I)Ljava/lang/Object;]
BroadcastHashJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productIterator:()Lscala/collection/Iterator;]
BroadcastHashJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.productPrefix:()Ljava/lang/String;]
BroadcastHashJoin.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.UnspecifiedDistribution.>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.requiredChildDistribution:()Lscala/collection/immutable/List;]
BroadcastHashJoin.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.requiredChildDistribution:()Lscala/collection/Seq;]
BroadcastHashJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastHashJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.rightKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.streamedKeys:()Lscala/collection/Seq;]
BroadcastHashJoin.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastHashJoin.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.streamSideKeyGenerator:()Lscala/Function0;]
BroadcastHashJoin.timeout ( ) : scala.concurrent.duration.Duration
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.timeout:()Lscala/concurrent/duration/Duration;]
BroadcastHashJoin.tupled ( ) [static] : scala.Function1<scala.Tuple5<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,package.BuildSide,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,BroadcastHashJoin>
[mangled: org/apache/spark/sql/execution/joins/BroadcastHashJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, BroadcastLeftSemiJoinHash.class
package org.apache.spark.sql.execution.joins
BroadcastLeftSemiJoinHash.BroadcastLeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
BroadcastLeftSemiJoinHash.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.buildSide ( ) : package.BuildRight.
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildRight$;]
BroadcastLeftSemiJoinHash.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
BroadcastLeftSemiJoinHash.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
BroadcastLeftSemiJoinHash.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.canEqual:(Ljava/lang/Object;)Z]
BroadcastLeftSemiJoinHash.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.children:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : BroadcastLeftSemiJoinHash
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash;]
BroadcastLeftSemiJoinHash.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,BroadcastLeftSemiJoinHash>>>>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.curried:()Lscala/Function1;]
BroadcastLeftSemiJoinHash.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.equals:(Ljava/lang/Object;)Z]
BroadcastLeftSemiJoinHash.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.execute:()Lorg/apache/spark/rdd/RDD;]
BroadcastLeftSemiJoinHash.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.hashCode:()I]
BroadcastLeftSemiJoinHash.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
BroadcastLeftSemiJoinHash.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastLeftSemiJoinHash.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.leftKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.output:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productArity:()I]
BroadcastLeftSemiJoinHash.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productElement:(I)Ljava/lang/Object;]
BroadcastLeftSemiJoinHash.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productIterator:()Lscala/collection/Iterator;]
BroadcastLeftSemiJoinHash.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.productPrefix:()Ljava/lang/String;]
BroadcastLeftSemiJoinHash.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastLeftSemiJoinHash.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.rightKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.streamedKeys:()Lscala/collection/Seq;]
BroadcastLeftSemiJoinHash.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastLeftSemiJoinHash.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.streamSideKeyGenerator:()Lscala/Function0;]
BroadcastLeftSemiJoinHash.tupled ( ) [static] : scala.Function1<scala.Tuple4<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,BroadcastLeftSemiJoinHash>
[mangled: org/apache/spark/sql/execution/joins/BroadcastLeftSemiJoinHash.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, BroadcastNestedLoopJoin.class
package org.apache.spark.sql.execution.joins
BroadcastNestedLoopJoin.BroadcastNestedLoopJoin ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right, package.BuildSide buildSide, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition )
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;)V]
BroadcastNestedLoopJoin.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
BroadcastNestedLoopJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.canEqual:(Ljava/lang/Object;)Z]
BroadcastNestedLoopJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.children:()Lscala/collection/Seq;]
BroadcastNestedLoopJoin.condition ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.condition:()Lscala/Option;]
BroadcastNestedLoopJoin.copy ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right, package.BuildSide buildSide, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition ) : BroadcastNestedLoopJoin
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;)Lorg/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin;]
BroadcastNestedLoopJoin.curried ( ) [static] : scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<package.BuildSide,scala.Function1<org.apache.spark.sql.catalyst.plans.JoinType,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,BroadcastNestedLoopJoin>>>>>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.curried:()Lscala/Function1;]
BroadcastNestedLoopJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.equals:(Ljava/lang/Object;)Z]
BroadcastNestedLoopJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.execute:()Lorg/apache/spark/rdd/RDD;]
BroadcastNestedLoopJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.hashCode:()I]
BroadcastNestedLoopJoin.joinType ( ) : org.apache.spark.sql.catalyst.plans.JoinType
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.joinType:()Lorg/apache/spark/sql/catalyst/plans/JoinType;]
BroadcastNestedLoopJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastNestedLoopJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastNestedLoopJoin.BroadcastNestedLoopJoin..boundCondition ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin..boundCondition:()Lscala/Function1;]
BroadcastNestedLoopJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.output:()Lscala/collection/Seq;]
BroadcastNestedLoopJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
BroadcastNestedLoopJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productArity:()I]
BroadcastNestedLoopJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productElement:(I)Ljava/lang/Object;]
BroadcastNestedLoopJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productIterator:()Lscala/collection/Iterator;]
BroadcastNestedLoopJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.productPrefix:()Ljava/lang/String;]
BroadcastNestedLoopJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
BroadcastNestedLoopJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
BroadcastNestedLoopJoin.tupled ( ) [static] : scala.Function1<scala.Tuple5<org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan,package.BuildSide,org.apache.spark.sql.catalyst.plans.JoinType,scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>>,BroadcastNestedLoopJoin>
[mangled: org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CachedBatch.class
package org.apache.spark.sql.columnar
CachedBatch.buffers ( ) : byte[ ][ ]
[mangled: org/apache/spark/sql/columnar/CachedBatch.buffers:()[[B]
CachedBatch.CachedBatch ( byte[ ][ ] buffers, org.apache.spark.sql.Row stats )
[mangled: org/apache/spark/sql/columnar/CachedBatch."<init>":([[BLorg/apache/spark/sql/Row;)V]
CachedBatch.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/columnar/CachedBatch.canEqual:(Ljava/lang/Object;)Z]
CachedBatch.copy ( byte[ ][ ] buffers, org.apache.spark.sql.Row stats ) : CachedBatch
[mangled: org/apache/spark/sql/columnar/CachedBatch.copy:([[BLorg/apache/spark/sql/Row;)Lorg/apache/spark/sql/columnar/CachedBatch;]
CachedBatch.curried ( ) [static] : scala.Function1<byte[ ][ ],scala.Function1<org.apache.spark.sql.Row,CachedBatch>>
[mangled: org/apache/spark/sql/columnar/CachedBatch.curried:()Lscala/Function1;]
CachedBatch.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/columnar/CachedBatch.equals:(Ljava/lang/Object;)Z]
CachedBatch.hashCode ( ) : int
[mangled: org/apache/spark/sql/columnar/CachedBatch.hashCode:()I]
CachedBatch.productArity ( ) : int
[mangled: org/apache/spark/sql/columnar/CachedBatch.productArity:()I]
CachedBatch.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/columnar/CachedBatch.productElement:(I)Ljava/lang/Object;]
CachedBatch.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/columnar/CachedBatch.productIterator:()Lscala/collection/Iterator;]
CachedBatch.productPrefix ( ) : String
[mangled: org/apache/spark/sql/columnar/CachedBatch.productPrefix:()Ljava/lang/String;]
CachedBatch.stats ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/columnar/CachedBatch.stats:()Lorg/apache/spark/sql/Row;]
CachedBatch.toString ( ) : String
[mangled: org/apache/spark/sql/columnar/CachedBatch.toString:()Ljava/lang/String;]
CachedBatch.tupled ( ) [static] : scala.Function1<scala.Tuple2<byte[ ][ ],org.apache.spark.sql.Row>,CachedBatch>
[mangled: org/apache/spark/sql/columnar/CachedBatch.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CachedData.class
package org.apache.spark.sql
CachedData.CachedData ( catalyst.plans.logical.LogicalPlan plan, columnar.InMemoryRelation cachedRepresentation )
[mangled: org/apache/spark/sql/CachedData."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/columnar/InMemoryRelation;)V]
CachedData.cachedRepresentation ( ) : columnar.InMemoryRelation
[mangled: org/apache/spark/sql/CachedData.cachedRepresentation:()Lorg/apache/spark/sql/columnar/InMemoryRelation;]
CachedData.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/CachedData.canEqual:(Ljava/lang/Object;)Z]
CachedData.copy ( catalyst.plans.logical.LogicalPlan plan, columnar.InMemoryRelation cachedRepresentation ) : CachedData
[mangled: org/apache/spark/sql/CachedData.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/columnar/InMemoryRelation;)Lorg/apache/spark/sql/CachedData;]
CachedData.curried ( ) [static] : scala.Function1<catalyst.plans.logical.LogicalPlan,scala.Function1<columnar.InMemoryRelation,CachedData>>
[mangled: org/apache/spark/sql/CachedData.curried:()Lscala/Function1;]
CachedData.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/CachedData.equals:(Ljava/lang/Object;)Z]
CachedData.hashCode ( ) : int
[mangled: org/apache/spark/sql/CachedData.hashCode:()I]
CachedData.plan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/CachedData.plan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
CachedData.productArity ( ) : int
[mangled: org/apache/spark/sql/CachedData.productArity:()I]
CachedData.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/CachedData.productElement:(I)Ljava/lang/Object;]
CachedData.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/CachedData.productIterator:()Lscala/collection/Iterator;]
CachedData.productPrefix ( ) : String
[mangled: org/apache/spark/sql/CachedData.productPrefix:()Ljava/lang/String;]
CachedData.toString ( ) : String
[mangled: org/apache/spark/sql/CachedData.toString:()Ljava/lang/String;]
CachedData.tupled ( ) [static] : scala.Function1<scala.Tuple2<catalyst.plans.logical.LogicalPlan,columnar.InMemoryRelation>,CachedData>
[mangled: org/apache/spark/sql/CachedData.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CacheManager.class
package org.apache.spark.sql
CacheManager.CacheManager ( SQLContext sqlContext )
[mangled: org/apache/spark/sql/CacheManager."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
CacheManager.cacheQuery ( DataFrame query, scala.Option<String> tableName, org.apache.spark.storage.StorageLevel storageLevel ) : void
[mangled: org/apache/spark/sql/CacheManager.cacheQuery:(Lorg/apache/spark/sql/DataFrame;Lscala/Option;Lorg/apache/spark/storage/StorageLevel;)V]
CacheManager.cacheTable ( String tableName ) : void
[mangled: org/apache/spark/sql/CacheManager.cacheTable:(Ljava/lang/String;)V]
CacheManager.clearCache ( ) : void
[mangled: org/apache/spark/sql/CacheManager.clearCache:()V]
CacheManager.invalidateCache ( catalyst.plans.logical.LogicalPlan plan ) : void
[mangled: org/apache/spark/sql/CacheManager.invalidateCache:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
CacheManager.isCached ( String tableName ) : boolean
[mangled: org/apache/spark/sql/CacheManager.isCached:(Ljava/lang/String;)Z]
CacheManager.tryUncacheQuery ( DataFrame query, boolean blocking ) : boolean
[mangled: org/apache/spark/sql/CacheManager.tryUncacheQuery:(Lorg/apache/spark/sql/DataFrame;Z)Z]
CacheManager.uncacheTable ( String tableName ) : void
[mangled: org/apache/spark/sql/CacheManager.uncacheTable:(Ljava/lang/String;)V]
CacheManager.useCachedData ( catalyst.plans.logical.LogicalPlan plan ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/CacheManager.useCachedData:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
spark-sql_2.10-1.3.0.jar, CacheTableCommand.class
package org.apache.spark.sql.execution
CacheTableCommand.CacheTableCommand ( String tableName, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> plan, boolean isLazy )
[mangled: org/apache/spark/sql/execution/CacheTableCommand."<init>":(Ljava/lang/String;Lscala/Option;Z)V]
CacheTableCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/CacheTableCommand.canEqual:(Ljava/lang/Object;)Z]
CacheTableCommand.copy ( String tableName, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> plan, boolean isLazy ) : CacheTableCommand
[mangled: org/apache/spark/sql/execution/CacheTableCommand.copy:(Ljava/lang/String;Lscala/Option;Z)Lorg/apache/spark/sql/execution/CacheTableCommand;]
CacheTableCommand.curried ( ) [static] : scala.Function1<String,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,scala.Function1<Object,CacheTableCommand>>>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.curried:()Lscala/Function1;]
CacheTableCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/CacheTableCommand.equals:(Ljava/lang/Object;)Z]
CacheTableCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/CacheTableCommand.hashCode:()I]
CacheTableCommand.isLazy ( ) : boolean
[mangled: org/apache/spark/sql/execution/CacheTableCommand.isLazy:()Z]
CacheTableCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.output:()Lscala/collection/Seq;]
CacheTableCommand.plan ( ) : scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.plan:()Lscala/Option;]
CacheTableCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productArity:()I]
CacheTableCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productElement:(I)Ljava/lang/Object;]
CacheTableCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productIterator:()Lscala/collection/Iterator;]
CacheTableCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/CacheTableCommand.productPrefix:()Ljava/lang/String;]
CacheTableCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
CacheTableCommand.tableName ( ) : String
[mangled: org/apache/spark/sql/execution/CacheTableCommand.tableName:()Ljava/lang/String;]
CacheTableCommand.tupled ( ) [static] : scala.Function1<scala.Tuple3<String,scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,Object>,CacheTableCommand>
[mangled: org/apache/spark/sql/execution/CacheTableCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CartesianProduct.class
package org.apache.spark.sql.execution.joins
CartesianProduct.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.canEqual:(Ljava/lang/Object;)Z]
CartesianProduct.CartesianProduct ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
CartesianProduct.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.children:()Lscala/collection/Seq;]
CartesianProduct.copy ( org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : CartesianProduct
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/CartesianProduct;]
CartesianProduct.curried ( ) [static] : scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,CartesianProduct>>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.curried:()Lscala/Function1;]
CartesianProduct.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.equals:(Ljava/lang/Object;)Z]
CartesianProduct.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.execute:()Lorg/apache/spark/rdd/RDD;]
CartesianProduct.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.hashCode:()I]
CartesianProduct.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
CartesianProduct.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
CartesianProduct.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.output:()Lscala/collection/Seq;]
CartesianProduct.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productArity:()I]
CartesianProduct.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productElement:(I)Ljava/lang/Object;]
CartesianProduct.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productIterator:()Lscala/collection/Iterator;]
CartesianProduct.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.productPrefix:()Ljava/lang/String;]
CartesianProduct.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
CartesianProduct.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
CartesianProduct.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,CartesianProduct>
[mangled: org/apache/spark/sql/execution/joins/CartesianProduct.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CaseInsensitiveMap.class
package org.apache.spark.sql.sources
CaseInsensitiveMap.CaseInsensitiveMap ( scala.collection.immutable.Map<String,String> map )
[mangled: org/apache/spark/sql/sources/CaseInsensitiveMap."<init>":(Lscala/collection/immutable/Map;)V]
spark-sql_2.10-1.3.0.jar, CatalystArrayContainsNullConverter.class
package org.apache.spark.sql.parquet
CatalystArrayContainsNullConverter.CatalystArrayContainsNullConverter ( org.apache.spark.sql.types.DataType elementType, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystArrayContainsNullConverter."<init>":(Lorg/apache/spark/sql/types/DataType;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, CatalystArrayConverter.class
package org.apache.spark.sql.parquet
CatalystArrayConverter.CatalystArrayConverter ( org.apache.spark.sql.types.DataType elementType, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystArrayConverter."<init>":(Lorg/apache/spark/sql/types/DataType;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, CatalystConverter.class
package org.apache.spark.sql.parquet
CatalystConverter.ARRAY_CONTAINS_NULL_BAG_SCHEMA_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.ARRAY_CONTAINS_NULL_BAG_SCHEMA_NAME:()Ljava/lang/String;]
CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME:()Ljava/lang/String;]
CatalystConverter.CatalystConverter ( )
[mangled: org/apache/spark/sql/parquet/CatalystConverter."<init>":()V]
CatalystConverter.clearBuffer ( ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.clearBuffer:()V]
CatalystConverter.getCurrentRecord ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/parquet/CatalystConverter.getCurrentRecord:()Lorg/apache/spark/sql/Row;]
CatalystConverter.index ( ) [abstract] : int
[mangled: org/apache/spark/sql/parquet/CatalystConverter.index:()I]
CatalystConverter.isRootConverter ( ) : boolean
[mangled: org/apache/spark/sql/parquet/CatalystConverter.isRootConverter:()Z]
CatalystConverter.MAP_KEY_SCHEMA_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.MAP_KEY_SCHEMA_NAME:()Ljava/lang/String;]
CatalystConverter.MAP_SCHEMA_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.MAP_SCHEMA_NAME:()Ljava/lang/String;]
CatalystConverter.MAP_VALUE_SCHEMA_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.MAP_VALUE_SCHEMA_NAME:()Ljava/lang/String;]
CatalystConverter.parent ( ) [abstract] : CatalystConverter
[mangled: org/apache/spark/sql/parquet/CatalystConverter.parent:()Lorg/apache/spark/sql/parquet/CatalystConverter;]
CatalystConverter.readDecimal ( org.apache.spark.sql.types.Decimal dest, parquet.io.api.Binary value, org.apache.spark.sql.types.DecimalType ctype ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.readDecimal:(Lorg/apache/spark/sql/types/Decimal;Lparquet/io/api/Binary;Lorg/apache/spark/sql/types/DecimalType;)V]
CatalystConverter.readTimestamp ( parquet.io.api.Binary value ) : java.sql.Timestamp
[mangled: org/apache/spark/sql/parquet/CatalystConverter.readTimestamp:(Lparquet/io/api/Binary;)Ljava/sql/Timestamp;]
CatalystConverter.size ( ) [abstract] : int
[mangled: org/apache/spark/sql/parquet/CatalystConverter.size:()I]
CatalystConverter.THRIFT_ARRAY_ELEMENTS_SCHEMA_NAME_SUFFIX ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/CatalystConverter.THRIFT_ARRAY_ELEMENTS_SCHEMA_NAME_SUFFIX:()Ljava/lang/String;]
CatalystConverter.updateBinary ( int fieldIndex, parquet.io.api.Binary value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateBinary:(ILparquet/io/api/Binary;)V]
CatalystConverter.updateBoolean ( int fieldIndex, boolean value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateBoolean:(IZ)V]
CatalystConverter.updateByte ( int fieldIndex, byte value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateByte:(IB)V]
CatalystConverter.updateDecimal ( int fieldIndex, parquet.io.api.Binary value, org.apache.spark.sql.types.DecimalType ctype ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateDecimal:(ILparquet/io/api/Binary;Lorg/apache/spark/sql/types/DecimalType;)V]
CatalystConverter.updateDouble ( int fieldIndex, double value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateDouble:(ID)V]
CatalystConverter.updateField ( int p1, Object p2 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateField:(ILjava/lang/Object;)V]
CatalystConverter.updateFloat ( int fieldIndex, float value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateFloat:(IF)V]
CatalystConverter.updateInt ( int fieldIndex, int value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateInt:(II)V]
CatalystConverter.updateLong ( int fieldIndex, long value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateLong:(IJ)V]
CatalystConverter.updateShort ( int fieldIndex, short value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateShort:(IS)V]
CatalystConverter.updateString ( int fieldIndex, String value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateString:(ILjava/lang/String;)V]
CatalystConverter.updateTimestamp ( int fieldIndex, parquet.io.api.Binary value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystConverter.updateTimestamp:(ILparquet/io/api/Binary;)V]
spark-sql_2.10-1.3.0.jar, CatalystGroupConverter.class
package org.apache.spark.sql.parquet
CatalystGroupConverter.buffer ( ) : scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.buffer:()Lscala/collection/mutable/ArrayBuffer;]
CatalystGroupConverter.buffer_.eq ( scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.Row> p1 ) : void
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.buffer_.eq:(Lscala/collection/mutable/ArrayBuffer;)V]
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.catalyst.expressions.Attribute[ ] attributes )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/catalyst/expressions/Attribute;)V]
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
CatalystGroupConverter.CatalystGroupConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent, scala.collection.mutable.ArrayBuffer<Object> current, scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.Row> buffer )
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;Lscala/collection/mutable/ArrayBuffer;Lscala/collection/mutable/ArrayBuffer;)V]
CatalystGroupConverter.clearBuffer ( ) : void
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.clearBuffer:()V]
CatalystGroupConverter.current ( ) : scala.collection.mutable.ArrayBuffer<Object>
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.current:()Lscala/collection/mutable/ArrayBuffer;]
CatalystGroupConverter.current_.eq ( scala.collection.mutable.ArrayBuffer<Object> p1 ) : void
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.current_.eq:(Lscala/collection/mutable/ArrayBuffer;)V]
CatalystGroupConverter.getCurrentRecord ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.getCurrentRecord:()Lorg/apache/spark/sql/Row;]
CatalystGroupConverter.index ( ) : int
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.index:()I]
CatalystGroupConverter.parent ( ) : CatalystConverter
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.parent:()Lorg/apache/spark/sql/parquet/CatalystConverter;]
CatalystGroupConverter.schema ( ) : org.apache.spark.sql.types.StructField[ ]
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.schema:()[Lorg/apache/spark/sql/types/StructField;]
CatalystGroupConverter.size ( ) : int
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.size:()I]
CatalystGroupConverter.updateField ( int fieldIndex, Object value ) : void
[mangled: org/apache/spark/sql/parquet/CatalystGroupConverter.updateField:(ILjava/lang/Object;)V]
spark-sql_2.10-1.3.0.jar, CatalystMapConverter.class
package org.apache.spark.sql.parquet
CatalystMapConverter.CatalystMapConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystMapConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, CatalystNativeArrayConverter.class
package org.apache.spark.sql.parquet
CatalystNativeArrayConverter.CatalystNativeArrayConverter ( org.apache.spark.sql.types.NativeType elementType, int index, CatalystConverter parent, int capacity )
[mangled: org/apache/spark/sql/parquet/CatalystNativeArrayConverter."<init>":(Lorg/apache/spark/sql/types/NativeType;ILorg/apache/spark/sql/parquet/CatalystConverter;I)V]
spark-sql_2.10-1.3.0.jar, CatalystPrimitiveConverter.class
package org.apache.spark.sql.parquet
CatalystPrimitiveConverter.CatalystPrimitiveConverter ( CatalystConverter parent, int fieldIndex )
[mangled: org/apache/spark/sql/parquet/CatalystPrimitiveConverter."<init>":(Lorg/apache/spark/sql/parquet/CatalystConverter;I)V]
spark-sql_2.10-1.3.0.jar, CatalystPrimitiveRowConverter.class
package org.apache.spark.sql.parquet
CatalystPrimitiveRowConverter.CatalystPrimitiveRowConverter ( org.apache.spark.sql.catalyst.expressions.Attribute[ ] attributes )
[mangled: org/apache/spark/sql/parquet/CatalystPrimitiveRowConverter."<init>":([Lorg/apache/spark/sql/catalyst/expressions/Attribute;)V]
spark-sql_2.10-1.3.0.jar, CatalystPrimitiveStringConverter.class
package org.apache.spark.sql.parquet
CatalystPrimitiveStringConverter.CatalystPrimitiveStringConverter ( CatalystConverter parent, int fieldIndex )
[mangled: org/apache/spark/sql/parquet/CatalystPrimitiveStringConverter."<init>":(Lorg/apache/spark/sql/parquet/CatalystConverter;I)V]
spark-sql_2.10-1.3.0.jar, CatalystScan.class
package org.apache.spark.sql.sources
CatalystScan.buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> p1, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> p2 ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/CatalystScan.buildScan:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/rdd/RDD;]
spark-sql_2.10-1.3.0.jar, CatalystStructConverter.class
package org.apache.spark.sql.parquet
CatalystStructConverter.CatalystStructConverter ( org.apache.spark.sql.types.StructField[ ] schema, int index, CatalystConverter parent )
[mangled: org/apache/spark/sql/parquet/CatalystStructConverter."<init>":([Lorg/apache/spark/sql/types/StructField;ILorg/apache/spark/sql/parquet/CatalystConverter;)V]
spark-sql_2.10-1.3.0.jar, Column.class
package org.apache.spark.sql
Column.and ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.and:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.apply ( catalyst.expressions.Expression p1 ) [static] : Column
[mangled: org/apache/spark/sql/Column.apply:(Lorg/apache/spark/sql/catalyst/expressions/Expression;)Lorg/apache/spark/sql/Column;]
Column.apply ( String p1 ) [static] : Column
[mangled: org/apache/spark/sql/Column.apply:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.as ( scala.Symbol alias ) : Column
[mangled: org/apache/spark/sql/Column.as:(Lscala/Symbol;)Lorg/apache/spark/sql/Column;]
Column.as ( String alias ) : Column
[mangled: org/apache/spark/sql/Column.as:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.asc ( ) : Column
[mangled: org/apache/spark/sql/Column.asc:()Lorg/apache/spark/sql/Column;]
Column.cast ( types.DataType to ) : Column
[mangled: org/apache/spark/sql/Column.cast:(Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/Column;]
Column.cast ( String to ) : Column
[mangled: org/apache/spark/sql/Column.cast:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.Column ( catalyst.expressions.Expression expr )
[mangled: org/apache/spark/sql/Column."<init>":(Lorg/apache/spark/sql/catalyst/expressions/Expression;)V]
Column.Column ( String name )
[mangled: org/apache/spark/sql/Column."<init>":(Ljava/lang/String;)V]
Column.contains ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.contains:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.desc ( ) : Column
[mangled: org/apache/spark/sql/Column.desc:()Lorg/apache/spark/sql/Column;]
Column.divide ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.divide:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.endsWith ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.endsWith:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.endsWith ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.endsWith:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.eqNullSafe ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.eqNullSafe:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.equalTo ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.equalTo:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.explain ( boolean extended ) : void
[mangled: org/apache/spark/sql/Column.explain:(Z)V]
Column.expr ( ) : catalyst.expressions.Expression
[mangled: org/apache/spark/sql/Column.expr:()Lorg/apache/spark/sql/catalyst/expressions/Expression;]
Column.geq ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.geq:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.getField ( String fieldName ) : Column
[mangled: org/apache/spark/sql/Column.getField:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.getItem ( int ordinal ) : Column
[mangled: org/apache/spark/sql/Column.getItem:(I)Lorg/apache/spark/sql/Column;]
Column.gt ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.gt:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.in ( Column... list ) : Column
[mangled: org/apache/spark/sql/Column.in:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.in ( scala.collection.Seq<Column> list ) : Column
[mangled: org/apache/spark/sql/Column.in:(Lscala/collection/Seq;)Lorg/apache/spark/sql/Column;]
Column.isNotNull ( ) : Column
[mangled: org/apache/spark/sql/Column.isNotNull:()Lorg/apache/spark/sql/Column;]
Column.isNull ( ) : Column
[mangled: org/apache/spark/sql/Column.isNull:()Lorg/apache/spark/sql/Column;]
Column.leq ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.leq:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.like ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.like:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.lt ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.lt:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.minus ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.minus:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.mod ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.mod:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.multiply ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.multiply:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.notEqual ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.notEqual:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.or ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.or:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.plus ( Object other ) : Column
[mangled: org/apache/spark/sql/Column.plus:(Ljava/lang/Object;)Lorg/apache/spark/sql/Column;]
Column.rlike ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.rlike:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.startsWith ( Column other ) : Column
[mangled: org/apache/spark/sql/Column.startsWith:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.startsWith ( String literal ) : Column
[mangled: org/apache/spark/sql/Column.startsWith:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
Column.substr ( int startPos, int len ) : Column
[mangled: org/apache/spark/sql/Column.substr:(II)Lorg/apache/spark/sql/Column;]
Column.substr ( Column startPos, Column len ) : Column
[mangled: org/apache/spark/sql/Column.substr:(Lorg/apache/spark/sql/Column;Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/Column;]
Column.toString ( ) : String
[mangled: org/apache/spark/sql/Column.toString:()Ljava/lang/String;]
Column.unapply ( Column p1 ) [static] : scala.Option<catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/Column.unapply:(Lorg/apache/spark/sql/Column;)Lscala/Option;]
Column.unary_.bang ( ) : Column
[mangled: org/apache/spark/sql/Column.unary_.bang:()Lorg/apache/spark/sql/Column;]
Column.unary_.minus ( ) : Column
[mangled: org/apache/spark/sql/Column.unary_.minus:()Lorg/apache/spark/sql/Column;]
spark-sql_2.10-1.3.0.jar, ColumnBuilder.class
package org.apache.spark.sql.columnar
ColumnBuilder.appendFrom ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnBuilder.appendFrom:(Lorg/apache/spark/sql/Row;I)V]
spark-sql_2.10-1.3.0.jar, ColumnName.class
package org.apache.spark.sql
ColumnName.ColumnName ( String name )
[mangled: org/apache/spark/sql/ColumnName."<init>":(Ljava/lang/String;)V]
spark-sql_2.10-1.3.0.jar, ColumnStats.class
package org.apache.spark.sql.columnar
ColumnStats.collectedStatistics ( ) [abstract] : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/columnar/ColumnStats.collectedStatistics:()Lorg/apache/spark/sql/Row;]
ColumnStats.count ( ) [abstract] : int
[mangled: org/apache/spark/sql/columnar/ColumnStats.count:()I]
ColumnStats.count_.eq ( int p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.count_.eq:(I)V]
ColumnStats.gatherStats ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.gatherStats:(Lorg/apache/spark/sql/Row;I)V]
ColumnStats.nullCount ( ) [abstract] : int
[mangled: org/apache/spark/sql/columnar/ColumnStats.nullCount:()I]
ColumnStats.nullCount_.eq ( int p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.nullCount_.eq:(I)V]
ColumnStats.sizeInBytes ( ) [abstract] : long
[mangled: org/apache/spark/sql/columnar/ColumnStats.sizeInBytes:()J]
ColumnStats.sizeInBytes_.eq ( long p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/ColumnStats.sizeInBytes_.eq:(J)V]
spark-sql_2.10-1.3.0.jar, CompressionScheme.class
package org.apache.spark.sql.columnar.compression
CompressionScheme.encoder ( org.apache.spark.sql.columnar.NativeColumnType<T> p1 ) [abstract] : Encoder<T>
[mangled: org/apache/spark/sql/columnar/compression/CompressionScheme.encoder:(Lorg/apache/spark/sql/columnar/NativeColumnType;)Lorg/apache/spark/sql/columnar/compression/Encoder;]
spark-sql_2.10-1.3.0.jar, CreatableRelationProvider.class
package org.apache.spark.sql.sources
CreatableRelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, org.apache.spark.sql.SaveMode p2, scala.collection.immutable.Map<String,String> p3, org.apache.spark.sql.DataFrame p4 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/CreatableRelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.3.0.jar, CreateTableUsing.class
package org.apache.spark.sql.sources
CreateTableUsing.allowExisting ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.allowExisting:()Z]
CreateTableUsing.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.canEqual:(Ljava/lang/Object;)Z]
CreateTableUsing.copy ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, boolean temporary, scala.collection.immutable.Map<String,String> options, boolean allowExisting, boolean managedIfNoPath ) : CreateTableUsing
[mangled: org/apache/spark/sql/sources/CreateTableUsing.copy:(Ljava/lang/String;Lscala/Option;Ljava/lang/String;ZLscala/collection/immutable/Map;ZZ)Lorg/apache/spark/sql/sources/CreateTableUsing;]
CreateTableUsing.CreateTableUsing ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, boolean temporary, scala.collection.immutable.Map<String,String> options, boolean allowExisting, boolean managedIfNoPath )
[mangled: org/apache/spark/sql/sources/CreateTableUsing."<init>":(Ljava/lang/String;Lscala/Option;Ljava/lang/String;ZLscala/collection/immutable/Map;ZZ)V]
CreateTableUsing.curried ( ) [static] : scala.Function1<String,scala.Function1<scala.Option<org.apache.spark.sql.types.StructType>,scala.Function1<String,scala.Function1<Object,scala.Function1<scala.collection.immutable.Map<String,String>,scala.Function1<Object,scala.Function1<Object,CreateTableUsing>>>>>>>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.curried:()Lscala/Function1;]
CreateTableUsing.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.equals:(Ljava/lang/Object;)Z]
CreateTableUsing.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsing.hashCode:()I]
CreateTableUsing.managedIfNoPath ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.managedIfNoPath:()Z]
CreateTableUsing.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.options:()Lscala/collection/immutable/Map;]
CreateTableUsing.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productArity:()I]
CreateTableUsing.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productElement:(I)Ljava/lang/Object;]
CreateTableUsing.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productIterator:()Lscala/collection/Iterator;]
CreateTableUsing.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsing.productPrefix:()Ljava/lang/String;]
CreateTableUsing.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsing.provider:()Ljava/lang/String;]
CreateTableUsing.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsing.tableName:()Ljava/lang/String;]
CreateTableUsing.temporary ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsing.temporary:()Z]
CreateTableUsing.tupled ( ) [static] : scala.Function1<scala.Tuple7<String,scala.Option<org.apache.spark.sql.types.StructType>,String,Object,scala.collection.immutable.Map<String,String>,Object,Object>,CreateTableUsing>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.tupled:()Lscala/Function1;]
CreateTableUsing.userSpecifiedSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/sources/CreateTableUsing.userSpecifiedSchema:()Lscala/Option;]
spark-sql_2.10-1.3.0.jar, CreateTableUsingAsSelect.class
package org.apache.spark.sql.sources
CreateTableUsingAsSelect.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.canEqual:(Ljava/lang/Object;)Z]
CreateTableUsingAsSelect.child ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.child:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
CreateTableUsingAsSelect.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
CreateTableUsingAsSelect.copy ( String tableName, String provider, boolean temporary, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child ) : CreateTableUsingAsSelect
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.copy:(Ljava/lang/String;Ljava/lang/String;ZLorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/sources/CreateTableUsingAsSelect;]
CreateTableUsingAsSelect.CreateTableUsingAsSelect ( String tableName, String provider, boolean temporary, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child )
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect."<init>":(Ljava/lang/String;Ljava/lang/String;ZLorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
CreateTableUsingAsSelect.curried ( ) [static] : scala.Function1<String,scala.Function1<String,scala.Function1<Object,scala.Function1<org.apache.spark.sql.SaveMode,scala.Function1<scala.collection.immutable.Map<String,String>,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,CreateTableUsingAsSelect>>>>>>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.curried:()Lscala/Function1;]
CreateTableUsingAsSelect.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.equals:(Ljava/lang/Object;)Z]
CreateTableUsingAsSelect.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.hashCode:()I]
CreateTableUsingAsSelect.mode ( ) : org.apache.spark.sql.SaveMode
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.mode:()Lorg/apache/spark/sql/SaveMode;]
CreateTableUsingAsSelect.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.options:()Lscala/collection/immutable/Map;]
CreateTableUsingAsSelect.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.output:()Lscala/collection/Seq;]
CreateTableUsingAsSelect.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productArity:()I]
CreateTableUsingAsSelect.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productElement:(I)Ljava/lang/Object;]
CreateTableUsingAsSelect.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productIterator:()Lscala/collection/Iterator;]
CreateTableUsingAsSelect.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.productPrefix:()Ljava/lang/String;]
CreateTableUsingAsSelect.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.provider:()Ljava/lang/String;]
CreateTableUsingAsSelect.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.tableName:()Ljava/lang/String;]
CreateTableUsingAsSelect.temporary ( ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.temporary:()Z]
CreateTableUsingAsSelect.tupled ( ) [static] : scala.Function1<scala.Tuple6<String,String,Object,org.apache.spark.sql.SaveMode,scala.collection.immutable.Map<String,String>,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,CreateTableUsingAsSelect>
[mangled: org/apache/spark/sql/sources/CreateTableUsingAsSelect.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, CreateTempTableUsing.class
package org.apache.spark.sql.sources
CreateTempTableUsing.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.canEqual:(Ljava/lang/Object;)Z]
CreateTempTableUsing.copy ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, scala.collection.immutable.Map<String,String> options ) : CreateTempTableUsing
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.copy:(Ljava/lang/String;Lscala/Option;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/CreateTempTableUsing;]
CreateTempTableUsing.CreateTempTableUsing ( String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, scala.collection.immutable.Map<String,String> options )
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing."<init>":(Ljava/lang/String;Lscala/Option;Ljava/lang/String;Lscala/collection/immutable/Map;)V]
CreateTempTableUsing.curried ( ) [static] : scala.Function1<String,scala.Function1<scala.Option<org.apache.spark.sql.types.StructType>,scala.Function1<String,scala.Function1<scala.collection.immutable.Map<String,String>,CreateTempTableUsing>>>>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.curried:()Lscala/Function1;]
CreateTempTableUsing.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.equals:(Ljava/lang/Object;)Z]
CreateTempTableUsing.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.hashCode:()I]
CreateTempTableUsing.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.options:()Lscala/collection/immutable/Map;]
CreateTempTableUsing.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productArity:()I]
CreateTempTableUsing.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productElement:(I)Ljava/lang/Object;]
CreateTempTableUsing.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productIterator:()Lscala/collection/Iterator;]
CreateTempTableUsing.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.productPrefix:()Ljava/lang/String;]
CreateTempTableUsing.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.provider:()Ljava/lang/String;]
CreateTempTableUsing.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<scala.runtime.Nothing.>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
CreateTempTableUsing.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.tableName:()Ljava/lang/String;]
CreateTempTableUsing.tupled ( ) [static] : scala.Function1<scala.Tuple4<String,scala.Option<org.apache.spark.sql.types.StructType>,String,scala.collection.immutable.Map<String,String>>,CreateTempTableUsing>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.tupled:()Lscala/Function1;]
CreateTempTableUsing.userSpecifiedSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsing.userSpecifiedSchema:()Lscala/Option;]
spark-sql_2.10-1.3.0.jar, CreateTempTableUsingAsSelect.class
package org.apache.spark.sql.sources
CreateTempTableUsingAsSelect.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.canEqual:(Ljava/lang/Object;)Z]
CreateTempTableUsingAsSelect.copy ( String tableName, String provider, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query ) : CreateTempTableUsingAsSelect
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.copy:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Lorg/apache/spark/sql/sources/CreateTempTableUsingAsSelect;]
CreateTempTableUsingAsSelect.CreateTempTableUsingAsSelect ( String tableName, String provider, org.apache.spark.sql.SaveMode mode, scala.collection.immutable.Map<String,String> options, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query )
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect."<init>":(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
CreateTempTableUsingAsSelect.curried ( ) [static] : scala.Function1<String,scala.Function1<String,scala.Function1<org.apache.spark.sql.SaveMode,scala.Function1<scala.collection.immutable.Map<String,String>,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,CreateTempTableUsingAsSelect>>>>>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.curried:()Lscala/Function1;]
CreateTempTableUsingAsSelect.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.equals:(Ljava/lang/Object;)Z]
CreateTempTableUsingAsSelect.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.hashCode:()I]
CreateTempTableUsingAsSelect.mode ( ) : org.apache.spark.sql.SaveMode
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.mode:()Lorg/apache/spark/sql/SaveMode;]
CreateTempTableUsingAsSelect.options ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.options:()Lscala/collection/immutable/Map;]
CreateTempTableUsingAsSelect.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productArity:()I]
CreateTempTableUsingAsSelect.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productElement:(I)Ljava/lang/Object;]
CreateTempTableUsingAsSelect.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productIterator:()Lscala/collection/Iterator;]
CreateTempTableUsingAsSelect.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.productPrefix:()Ljava/lang/String;]
CreateTempTableUsingAsSelect.provider ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.provider:()Ljava/lang/String;]
CreateTempTableUsingAsSelect.query ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.query:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
CreateTempTableUsingAsSelect.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<scala.runtime.Nothing.>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
CreateTempTableUsingAsSelect.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.tableName:()Ljava/lang/String;]
CreateTempTableUsingAsSelect.tupled ( ) [static] : scala.Function1<scala.Tuple5<String,String,org.apache.spark.sql.SaveMode,scala.collection.immutable.Map<String,String>,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>,CreateTempTableUsingAsSelect>
[mangled: org/apache/spark/sql/sources/CreateTempTableUsingAsSelect.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, DataFrame.class
package org.apache.spark.sql
DataFrame.agg ( java.util.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( Column expr, Column... exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lorg/apache/spark/sql/Column;[Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( Column expr, scala.collection.Seq<Column> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lorg/apache/spark/sql/Column;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( scala.collection.immutable.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.agg ( scala.Tuple2<String,String> aggExpr, scala.collection.Seq<scala.Tuple2<String,String>> aggExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.agg:(Lscala/Tuple2;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.apply ( String colName ) : Column
[mangled: org/apache/spark/sql/DataFrame.apply:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
DataFrame.as ( scala.Symbol alias ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.as:(Lscala/Symbol;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.as ( String alias ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.as:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.cache ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.cache:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.cache ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.cache:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.col ( String colName ) : Column
[mangled: org/apache/spark/sql/DataFrame.col:(Ljava/lang/String;)Lorg/apache/spark/sql/Column;]
DataFrame.collect ( ) : Object
[mangled: org/apache/spark/sql/DataFrame.collect:()Ljava/lang/Object;]
DataFrame.collect ( ) : Row[ ]
[mangled: org/apache/spark/sql/DataFrame.collect:()[Lorg/apache/spark/sql/Row;]
DataFrame.collectAsList ( ) : java.util.List<Row>
[mangled: org/apache/spark/sql/DataFrame.collectAsList:()Ljava/util/List;]
DataFrame.columns ( ) : String[ ]
[mangled: org/apache/spark/sql/DataFrame.columns:()[Ljava/lang/String;]
DataFrame.count ( ) : long
[mangled: org/apache/spark/sql/DataFrame.count:()J]
DataFrame.createJDBCTable ( String url, String table, boolean allowExisting ) : void
[mangled: org/apache/spark/sql/DataFrame.createJDBCTable:(Ljava/lang/String;Ljava/lang/String;Z)V]
DataFrame.DataFrame ( SQLContext sqlContext, catalyst.plans.logical.LogicalPlan logicalPlan )
[mangled: org/apache/spark/sql/DataFrame."<init>":(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
DataFrame.DataFrame ( SQLContext sqlContext, SQLContext.QueryExecution queryExecution )
[mangled: org/apache/spark/sql/DataFrame."<init>":(Lorg/apache/spark/sql/SQLContext;Lorg/apache/spark/sql/SQLContext$QueryExecution;)V]
DataFrame.distinct ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.distinct:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.dtypes ( ) : scala.Tuple2<String,String>[ ]
[mangled: org/apache/spark/sql/DataFrame.dtypes:()[Lscala/Tuple2;]
DataFrame.except ( DataFrame other ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.except:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.explain ( ) : void
[mangled: org/apache/spark/sql/DataFrame.explain:()V]
DataFrame.explain ( boolean extended ) : void
[mangled: org/apache/spark/sql/DataFrame.explain:(Z)V]
DataFrame.explode ( scala.collection.Seq<Column> input, scala.Function1<Row,scala.collection.TraversableOnce<A>> f, scala.reflect.api.TypeTags.TypeTag<A> p3 ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.explode:(Lscala/collection/Seq;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.explode ( String inputColumn, String outputColumn, scala.Function1<A,scala.collection.TraversableOnce<B>> f, scala.reflect.api.TypeTags.TypeTag<B> p4 ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.explode:(Ljava/lang/String;Ljava/lang/String;Lscala/Function1;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.filter ( Column condition ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.filter:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.filter ( String conditionExpr ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.filter:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.first ( ) : Object
[mangled: org/apache/spark/sql/DataFrame.first:()Ljava/lang/Object;]
DataFrame.first ( ) : Row
[mangled: org/apache/spark/sql/DataFrame.first:()Lorg/apache/spark/sql/Row;]
DataFrame.flatMap ( scala.Function1<Row,scala.collection.TraversableOnce<R>> f, scala.reflect.ClassTag<R> p2 ) : org.apache.spark.rdd.RDD<R>
[mangled: org/apache/spark/sql/DataFrame.flatMap:(Lscala/Function1;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
DataFrame.foreach ( scala.Function1<Row,scala.runtime.BoxedUnit> f ) : void
[mangled: org/apache/spark/sql/DataFrame.foreach:(Lscala/Function1;)V]
DataFrame.foreachPartition ( scala.Function1<scala.collection.Iterator<Row>,scala.runtime.BoxedUnit> f ) : void
[mangled: org/apache/spark/sql/DataFrame.foreachPartition:(Lscala/Function1;)V]
DataFrame.groupBy ( Column... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.groupBy ( scala.collection.Seq<Column> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:(Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.groupBy ( String col1, scala.collection.Seq<String> cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.groupBy ( String col1, String... cols ) : GroupedData
[mangled: org/apache/spark/sql/DataFrame.groupBy:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/GroupedData;]
DataFrame.head ( ) : Row
[mangled: org/apache/spark/sql/DataFrame.head:()Lorg/apache/spark/sql/Row;]
DataFrame.head ( int n ) : Row[ ]
[mangled: org/apache/spark/sql/DataFrame.head:(I)[Lorg/apache/spark/sql/Row;]
DataFrame.insertInto ( String tableName ) : void
[mangled: org/apache/spark/sql/DataFrame.insertInto:(Ljava/lang/String;)V]
DataFrame.insertInto ( String tableName, boolean overwrite ) : void
[mangled: org/apache/spark/sql/DataFrame.insertInto:(Ljava/lang/String;Z)V]
DataFrame.insertIntoJDBC ( String url, String table, boolean overwrite ) : void
[mangled: org/apache/spark/sql/DataFrame.insertIntoJDBC:(Ljava/lang/String;Ljava/lang/String;Z)V]
DataFrame.intersect ( DataFrame other ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.intersect:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.isLocal ( ) : boolean
[mangled: org/apache/spark/sql/DataFrame.isLocal:()Z]
DataFrame.javaRDD ( ) : org.apache.spark.api.java.JavaRDD<Row>
[mangled: org/apache/spark/sql/DataFrame.javaRDD:()Lorg/apache/spark/api/java/JavaRDD;]
DataFrame.javaToPython ( ) : org.apache.spark.api.java.JavaRDD<byte[ ]>
[mangled: org/apache/spark/sql/DataFrame.javaToPython:()Lorg/apache/spark/api/java/JavaRDD;]
DataFrame.join ( DataFrame right ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.join ( DataFrame right, Column joinExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.join ( DataFrame right, Column joinExprs, String joinType ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.join:(Lorg/apache/spark/sql/DataFrame;Lorg/apache/spark/sql/Column;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.limit ( int n ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.limit:(I)Lorg/apache/spark/sql/DataFrame;]
DataFrame.logicalPlan ( ) : catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/DataFrame.logicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
DataFrame.map ( scala.Function1<Row,R> f, scala.reflect.ClassTag<R> p2 ) : org.apache.spark.rdd.RDD<R>
[mangled: org/apache/spark/sql/DataFrame.map:(Lscala/Function1;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
DataFrame.mapPartitions ( scala.Function1<scala.collection.Iterator<Row>,scala.collection.Iterator<R>> f, scala.reflect.ClassTag<R> p2 ) : org.apache.spark.rdd.RDD<R>
[mangled: org/apache/spark/sql/DataFrame.mapPartitions:(Lscala/Function1;Lscala/reflect/ClassTag;)Lorg/apache/spark/rdd/RDD;]
DataFrame.numericColumns ( ) : scala.collection.Seq<catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/DataFrame.numericColumns:()Lscala/collection/Seq;]
DataFrame.orderBy ( Column... sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.orderBy ( scala.collection.Seq<Column> sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.orderBy ( String sortCol, scala.collection.Seq<String> sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.orderBy ( String sortCol, String... sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.orderBy:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.persist ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.persist:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.persist ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.persist:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.persist ( org.apache.spark.storage.StorageLevel newLevel ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.persist ( org.apache.spark.storage.StorageLevel newLevel ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.persist:(Lorg/apache/spark/storage/StorageLevel;)Lorg/apache/spark/sql/RDDApi;]
DataFrame.printSchema ( ) : void
[mangled: org/apache/spark/sql/DataFrame.printSchema:()V]
DataFrame.queryExecution ( ) : SQLContext.QueryExecution
[mangled: org/apache/spark/sql/DataFrame.queryExecution:()Lorg/apache/spark/sql/SQLContext$QueryExecution;]
DataFrame.rdd ( ) : org.apache.spark.rdd.RDD<Row>
[mangled: org/apache/spark/sql/DataFrame.rdd:()Lorg/apache/spark/rdd/RDD;]
DataFrame.registerTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/DataFrame.registerTempTable:(Ljava/lang/String;)V]
DataFrame.repartition ( int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.repartition:(I)Lorg/apache/spark/sql/DataFrame;]
DataFrame.resolve ( String colName ) : catalyst.expressions.NamedExpression
[mangled: org/apache/spark/sql/DataFrame.resolve:(Ljava/lang/String;)Lorg/apache/spark/sql/catalyst/expressions/NamedExpression;]
DataFrame.sample ( boolean withReplacement, double fraction ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sample:(ZD)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sample ( boolean withReplacement, double fraction, long seed ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sample:(ZDJ)Lorg/apache/spark/sql/DataFrame;]
DataFrame.save ( String path ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;)V]
DataFrame.save ( String path, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.save ( String path, String source ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Ljava/lang/String;)V]
DataFrame.save ( String path, String source, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.save ( String source, SaveMode mode, java.util.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Ljava/util/Map;)V]
DataFrame.save ( String source, SaveMode mode, scala.collection.immutable.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.save:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;)V]
DataFrame.saveAsParquetFile ( String path ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsParquetFile:(Ljava/lang/String;)V]
DataFrame.saveAsTable ( String tableName ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;)V]
DataFrame.saveAsTable ( String tableName, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.saveAsTable ( String tableName, String source ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;)V]
DataFrame.saveAsTable ( String tableName, String source, SaveMode mode ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;)V]
DataFrame.saveAsTable ( String tableName, String source, SaveMode mode, java.util.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Ljava/util/Map;)V]
DataFrame.saveAsTable ( String tableName, String source, SaveMode mode, scala.collection.immutable.Map<String,String> options ) : void
[mangled: org/apache/spark/sql/DataFrame.saveAsTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;)V]
DataFrame.schema ( ) : types.StructType
[mangled: org/apache/spark/sql/DataFrame.schema:()Lorg/apache/spark/sql/types/StructType;]
DataFrame.select ( Column... cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.select ( scala.collection.Seq<Column> cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.select ( String col, scala.collection.Seq<String> cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.select ( String col, String... cols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.select:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.selectExpr ( scala.collection.Seq<String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.selectExpr:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.selectExpr ( String... exprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.selectExpr:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.show ( ) : void
[mangled: org/apache/spark/sql/DataFrame.show:()V]
DataFrame.show ( int numRows ) : void
[mangled: org/apache/spark/sql/DataFrame.show:(I)V]
DataFrame.showString ( int numRows ) : String
[mangled: org/apache/spark/sql/DataFrame.showString:(I)Ljava/lang/String;]
DataFrame.sort ( Column... sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:([Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sort ( scala.collection.Seq<Column> sortExprs ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sort ( String sortCol, scala.collection.Seq<String> sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:(Ljava/lang/String;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sort ( String sortCol, String... sortCols ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.sort:(Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.sqlContext ( ) : SQLContext
[mangled: org/apache/spark/sql/DataFrame.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
DataFrame.take ( int n ) : Object
[mangled: org/apache/spark/sql/DataFrame.take:(I)Ljava/lang/Object;]
DataFrame.take ( int n ) : Row[ ]
[mangled: org/apache/spark/sql/DataFrame.take:(I)[Lorg/apache/spark/sql/Row;]
DataFrame.toDF ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.toDF:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.toDF ( scala.collection.Seq<String> colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.toDF:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.toDF ( String... colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.toDF:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.toJavaRDD ( ) : org.apache.spark.api.java.JavaRDD<Row>
[mangled: org/apache/spark/sql/DataFrame.toJavaRDD:()Lorg/apache/spark/api/java/JavaRDD;]
DataFrame.toJSON ( ) : org.apache.spark.rdd.RDD<String>
[mangled: org/apache/spark/sql/DataFrame.toJSON:()Lorg/apache/spark/rdd/RDD;]
DataFrame.toString ( ) : String
[mangled: org/apache/spark/sql/DataFrame.toString:()Ljava/lang/String;]
DataFrame.unionAll ( DataFrame other ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.unionAll:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.unpersist ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.unpersist:()Lorg/apache/spark/sql/DataFrame;]
DataFrame.unpersist ( ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.unpersist:()Lorg/apache/spark/sql/RDDApi;]
DataFrame.unpersist ( boolean blocking ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.unpersist:(Z)Lorg/apache/spark/sql/DataFrame;]
DataFrame.unpersist ( boolean blocking ) : RDDApi
[mangled: org/apache/spark/sql/DataFrame.unpersist:(Z)Lorg/apache/spark/sql/RDDApi;]
DataFrame.where ( Column condition ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.where:(Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.withColumn ( String colName, Column col ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.withColumn:(Ljava/lang/String;Lorg/apache/spark/sql/Column;)Lorg/apache/spark/sql/DataFrame;]
DataFrame.withColumnRenamed ( String existingName, String newName ) : DataFrame
[mangled: org/apache/spark/sql/DataFrame.withColumnRenamed:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
spark-sql_2.10-1.3.0.jar, DataFrameHolder.class
package org.apache.spark.sql
DataFrameHolder.andThen ( scala.Function1<DataFrameHolder,A> p1 ) [static] : scala.Function1<DataFrame,A>
[mangled: org/apache/spark/sql/DataFrameHolder.andThen:(Lscala/Function1;)Lscala/Function1;]
DataFrameHolder.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/DataFrameHolder.canEqual:(Ljava/lang/Object;)Z]
DataFrameHolder.compose ( scala.Function1<A,DataFrame> p1 ) [static] : scala.Function1<A,DataFrameHolder>
[mangled: org/apache/spark/sql/DataFrameHolder.compose:(Lscala/Function1;)Lscala/Function1;]
DataFrameHolder.copy ( DataFrame df ) : DataFrameHolder
[mangled: org/apache/spark/sql/DataFrameHolder.copy:(Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/DataFrameHolder;]
DataFrameHolder.DataFrameHolder ( DataFrame df )
[mangled: org/apache/spark/sql/DataFrameHolder."<init>":(Lorg/apache/spark/sql/DataFrame;)V]
DataFrameHolder.df ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrameHolder.df:()Lorg/apache/spark/sql/DataFrame;]
DataFrameHolder.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/DataFrameHolder.equals:(Ljava/lang/Object;)Z]
DataFrameHolder.hashCode ( ) : int
[mangled: org/apache/spark/sql/DataFrameHolder.hashCode:()I]
DataFrameHolder.productArity ( ) : int
[mangled: org/apache/spark/sql/DataFrameHolder.productArity:()I]
DataFrameHolder.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/DataFrameHolder.productElement:(I)Ljava/lang/Object;]
DataFrameHolder.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/DataFrameHolder.productIterator:()Lscala/collection/Iterator;]
DataFrameHolder.productPrefix ( ) : String
[mangled: org/apache/spark/sql/DataFrameHolder.productPrefix:()Ljava/lang/String;]
DataFrameHolder.toDF ( ) : DataFrame
[mangled: org/apache/spark/sql/DataFrameHolder.toDF:()Lorg/apache/spark/sql/DataFrame;]
DataFrameHolder.toDF ( scala.collection.Seq<String> colNames ) : DataFrame
[mangled: org/apache/spark/sql/DataFrameHolder.toDF:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
DataFrameHolder.toString ( ) : String
[mangled: org/apache/spark/sql/DataFrameHolder.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, DateColumnAccessor.class
package org.apache.spark.sql.columnar
DateColumnAccessor.DateColumnAccessor ( java.nio.ByteBuffer buffer )
[mangled: org/apache/spark/sql/columnar/DateColumnAccessor."<init>":(Ljava/nio/ByteBuffer;)V]
spark-sql_2.10-1.3.0.jar, DateColumnBuilder.class
package org.apache.spark.sql.columnar
DateColumnBuilder.DateColumnBuilder ( )
[mangled: org/apache/spark/sql/columnar/DateColumnBuilder."<init>":()V]
spark-sql_2.10-1.3.0.jar, DateColumnStats.class
package org.apache.spark.sql.columnar
DateColumnStats.DateColumnStats ( )
[mangled: org/apache/spark/sql/columnar/DateColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, DDLParser.class
package org.apache.spark.sql.sources
DDLParser.apply ( String input, boolean exceptionOnError ) : scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/sources/DDLParser.apply:(Ljava/lang/String;Z)Lscala/Option;]
DDLParser.DDLParser ( scala.Function1<String,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> parseQuery )
[mangled: org/apache/spark/sql/sources/DDLParser."<init>":(Lscala/Function1;)V]
spark-sql_2.10-1.3.0.jar, Decoder<T>.class
package org.apache.spark.sql.columnar.compression
Decoder<T>.hasNext ( ) [abstract] : boolean
[mangled: org/apache/spark/sql/columnar/compression/Decoder<T>.hasNext:()Z]
Decoder<T>.next ( org.apache.spark.sql.catalyst.expressions.MutableRow p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/compression/Decoder<T>.next:(Lorg/apache/spark/sql/catalyst/expressions/MutableRow;I)V]
spark-sql_2.10-1.3.0.jar, DescribeCommand.class
package org.apache.spark.sql.execution
DescribeCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/DescribeCommand.canEqual:(Ljava/lang/Object;)Z]
DescribeCommand.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/DescribeCommand.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
DescribeCommand.copy ( SparkPlan child, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean isExtended ) : DescribeCommand
[mangled: org/apache/spark/sql/execution/DescribeCommand.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lscala/collection/Seq;Z)Lorg/apache/spark/sql/execution/DescribeCommand;]
DescribeCommand.curried ( ) [static] : scala.Function1<SparkPlan,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<Object,DescribeCommand>>>
[mangled: org/apache/spark/sql/execution/DescribeCommand.curried:()Lscala/Function1;]
DescribeCommand.DescribeCommand ( SparkPlan child, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean isExtended )
[mangled: org/apache/spark/sql/execution/DescribeCommand."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lscala/collection/Seq;Z)V]
DescribeCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/DescribeCommand.equals:(Ljava/lang/Object;)Z]
DescribeCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/DescribeCommand.hashCode:()I]
DescribeCommand.isExtended ( ) : boolean
[mangled: org/apache/spark/sql/execution/DescribeCommand.isExtended:()Z]
DescribeCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/DescribeCommand.output:()Lscala/collection/Seq;]
DescribeCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/DescribeCommand.productArity:()I]
DescribeCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/DescribeCommand.productElement:(I)Ljava/lang/Object;]
DescribeCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/DescribeCommand.productIterator:()Lscala/collection/Iterator;]
DescribeCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/DescribeCommand.productPrefix:()Ljava/lang/String;]
DescribeCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/DescribeCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
DescribeCommand.tupled ( ) [static] : scala.Function1<scala.Tuple3<SparkPlan,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,Object>,DescribeCommand>
[mangled: org/apache/spark/sql/execution/DescribeCommand.tupled:()Lscala/Function1;]
package org.apache.spark.sql.sources
DescribeCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/DescribeCommand.canEqual:(Ljava/lang/Object;)Z]
DescribeCommand.copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan table, boolean isExtended ) : DescribeCommand
[mangled: org/apache/spark/sql/sources/DescribeCommand.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)Lorg/apache/spark/sql/sources/DescribeCommand;]
DescribeCommand.curried ( ) [static] : scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.Function1<Object,DescribeCommand>>
[mangled: org/apache/spark/sql/sources/DescribeCommand.curried:()Lscala/Function1;]
DescribeCommand.DescribeCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan table, boolean isExtended )
[mangled: org/apache/spark/sql/sources/DescribeCommand."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)V]
DescribeCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/DescribeCommand.equals:(Ljava/lang/Object;)Z]
DescribeCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/DescribeCommand.hashCode:()I]
DescribeCommand.isExtended ( ) : boolean
[mangled: org/apache/spark/sql/sources/DescribeCommand.isExtended:()Z]
DescribeCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/sources/DescribeCommand.output:()Lscala/collection/Seq;]
DescribeCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/DescribeCommand.productArity:()I]
DescribeCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/DescribeCommand.productElement:(I)Ljava/lang/Object;]
DescribeCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/DescribeCommand.productIterator:()Lscala/collection/Iterator;]
DescribeCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/DescribeCommand.productPrefix:()Ljava/lang/String;]
DescribeCommand.table ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/DescribeCommand.table:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
DescribeCommand.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,Object>,DescribeCommand>
[mangled: org/apache/spark/sql/sources/DescribeCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, Distinct.class
package org.apache.spark.sql.execution
Distinct.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Distinct.canEqual:(Ljava/lang/Object;)Z]
Distinct.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Distinct.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Distinct.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Distinct.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
Distinct.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/Distinct.children:()Lscala/collection/immutable/List;]
Distinct.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/Distinct.children:()Lscala/collection/Seq;]
Distinct.copy ( boolean partial, SparkPlan child ) : Distinct
[mangled: org/apache/spark/sql/execution/Distinct.copy:(ZLorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Distinct;]
Distinct.curried ( ) [static] : scala.Function1<Object,scala.Function1<SparkPlan,Distinct>>
[mangled: org/apache/spark/sql/execution/Distinct.curried:()Lscala/Function1;]
Distinct.Distinct ( boolean partial, SparkPlan child )
[mangled: org/apache/spark/sql/execution/Distinct."<init>":(ZLorg/apache/spark/sql/execution/SparkPlan;)V]
Distinct.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Distinct.equals:(Ljava/lang/Object;)Z]
Distinct.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/Distinct.execute:()Lorg/apache/spark/rdd/RDD;]
Distinct.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/Distinct.hashCode:()I]
Distinct.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Distinct.output:()Lscala/collection/Seq;]
Distinct.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/Distinct.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
Distinct.partial ( ) : boolean
[mangled: org/apache/spark/sql/execution/Distinct.partial:()Z]
Distinct.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/Distinct.productArity:()I]
Distinct.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/Distinct.productElement:(I)Ljava/lang/Object;]
Distinct.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/Distinct.productIterator:()Lscala/collection/Iterator;]
Distinct.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/Distinct.productPrefix:()Ljava/lang/String;]
Distinct.requiredChildDistribution ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution>
[mangled: org/apache/spark/sql/execution/Distinct.requiredChildDistribution:()Lscala/collection/Seq;]
Distinct.tupled ( ) [static] : scala.Function1<scala.Tuple2<Object,SparkPlan>,Distinct>
[mangled: org/apache/spark/sql/execution/Distinct.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, DriverQuirks.class
package org.apache.spark.sql.jdbc
DriverQuirks.DriverQuirks ( )
[mangled: org/apache/spark/sql/jdbc/DriverQuirks."<init>":()V]
DriverQuirks.get ( String p1 ) [static] : DriverQuirks
[mangled: org/apache/spark/sql/jdbc/DriverQuirks.get:(Ljava/lang/String;)Lorg/apache/spark/sql/jdbc/DriverQuirks;]
DriverQuirks.getCatalystType ( int p1, String p2, int p3, org.apache.spark.sql.types.MetadataBuilder p4 ) [abstract] : org.apache.spark.sql.types.DataType
[mangled: org/apache/spark/sql/jdbc/DriverQuirks.getCatalystType:(ILjava/lang/String;ILorg/apache/spark/sql/types/MetadataBuilder;)Lorg/apache/spark/sql/types/DataType;]
DriverQuirks.getJDBCType ( org.apache.spark.sql.types.DataType p1 ) [abstract] : scala.Tuple2<String,scala.Option<Object>>
[mangled: org/apache/spark/sql/jdbc/DriverQuirks.getJDBCType:(Lorg/apache/spark/sql/types/DataType;)Lscala/Tuple2;]
spark-sql_2.10-1.3.0.jar, Encoder<T>.class
package org.apache.spark.sql.columnar.compression
Encoder<T>.compress ( java.nio.ByteBuffer p1, java.nio.ByteBuffer p2 ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.compress:(Ljava/nio/ByteBuffer;Ljava/nio/ByteBuffer;)Ljava/nio/ByteBuffer;]
Encoder<T>.gatherCompressibilityStats ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/compression/Encoder<T>.gatherCompressibilityStats:(Lorg/apache/spark/sql/Row;I)V]
spark-sql_2.10-1.3.0.jar, EqualTo.class
package org.apache.spark.sql.sources
EqualTo.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/EqualTo.attribute:()Ljava/lang/String;]
EqualTo.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/EqualTo.canEqual:(Ljava/lang/Object;)Z]
EqualTo.copy ( String attribute, Object value ) : EqualTo
[mangled: org/apache/spark/sql/sources/EqualTo.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/EqualTo;]
EqualTo.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,EqualTo>>
[mangled: org/apache/spark/sql/sources/EqualTo.curried:()Lscala/Function1;]
EqualTo.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/EqualTo.equals:(Ljava/lang/Object;)Z]
EqualTo.EqualTo ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/EqualTo."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
EqualTo.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/EqualTo.hashCode:()I]
EqualTo.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/EqualTo.productArity:()I]
EqualTo.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/EqualTo.productElement:(I)Ljava/lang/Object;]
EqualTo.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/EqualTo.productIterator:()Lscala/collection/Iterator;]
EqualTo.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/EqualTo.productPrefix:()Ljava/lang/String;]
EqualTo.toString ( ) : String
[mangled: org/apache/spark/sql/sources/EqualTo.toString:()Ljava/lang/String;]
EqualTo.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,EqualTo>
[mangled: org/apache/spark/sql/sources/EqualTo.tupled:()Lscala/Function1;]
EqualTo.value ( ) : Object
[mangled: org/apache/spark/sql/sources/EqualTo.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, EvaluatePython.class
package org.apache.spark.sql.execution
EvaluatePython.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/EvaluatePython.canEqual:(Ljava/lang/Object;)Z]
EvaluatePython.child ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/execution/EvaluatePython.child:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
EvaluatePython.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/EvaluatePython.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
EvaluatePython.copy ( PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child, org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute ) : EvaluatePython
[mangled: org/apache/spark/sql/execution/EvaluatePython.copy:(Lorg/apache/spark/sql/execution/PythonUDF;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)Lorg/apache/spark/sql/execution/EvaluatePython;]
EvaluatePython.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/EvaluatePython.equals:(Ljava/lang/Object;)Z]
EvaluatePython.EvaluatePython ( PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child, org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute )
[mangled: org/apache/spark/sql/execution/EvaluatePython."<init>":(Lorg/apache/spark/sql/execution/PythonUDF;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;)V]
EvaluatePython.fromJava ( Object p1, org.apache.spark.sql.types.DataType p2 ) [static] : Object
[mangled: org/apache/spark/sql/execution/EvaluatePython.fromJava:(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)Ljava/lang/Object;]
EvaluatePython.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/EvaluatePython.hashCode:()I]
EvaluatePython.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/EvaluatePython.output:()Lscala/collection/Seq;]
EvaluatePython.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/EvaluatePython.productArity:()I]
EvaluatePython.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/EvaluatePython.productElement:(I)Ljava/lang/Object;]
EvaluatePython.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/EvaluatePython.productIterator:()Lscala/collection/Iterator;]
EvaluatePython.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/EvaluatePython.productPrefix:()Ljava/lang/String;]
EvaluatePython.references ( ) : org.apache.spark.sql.catalyst.expressions.AttributeSet
[mangled: org/apache/spark/sql/execution/EvaluatePython.references:()Lorg/apache/spark/sql/catalyst/expressions/AttributeSet;]
EvaluatePython.resultAttribute ( ) : org.apache.spark.sql.catalyst.expressions.AttributeReference
[mangled: org/apache/spark/sql/execution/EvaluatePython.resultAttribute:()Lorg/apache/spark/sql/catalyst/expressions/AttributeReference;]
EvaluatePython.rowToArray ( org.apache.spark.sql.Row p1, scala.collection.Seq<org.apache.spark.sql.types.DataType> p2 ) [static] : Object[ ]
[mangled: org/apache/spark/sql/execution/EvaluatePython.rowToArray:(Lorg/apache/spark/sql/Row;Lscala/collection/Seq;)[Ljava/lang/Object;]
EvaluatePython.toJava ( Object p1, org.apache.spark.sql.types.DataType p2 ) [static] : Object
[mangled: org/apache/spark/sql/execution/EvaluatePython.toJava:(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)Ljava/lang/Object;]
EvaluatePython.udf ( ) : PythonUDF
[mangled: org/apache/spark/sql/execution/EvaluatePython.udf:()Lorg/apache/spark/sql/execution/PythonUDF;]
spark-sql_2.10-1.3.0.jar, ExamplePoint.class
package org.apache.spark.sql.test
ExamplePoint.ExamplePoint ( double x, double y )
[mangled: org/apache/spark/sql/test/ExamplePoint."<init>":(DD)V]
ExamplePoint.x ( ) : double
[mangled: org/apache/spark/sql/test/ExamplePoint.x:()D]
ExamplePoint.y ( ) : double
[mangled: org/apache/spark/sql/test/ExamplePoint.y:()D]
spark-sql_2.10-1.3.0.jar, Except.class
package org.apache.spark.sql.execution
Except.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Except.canEqual:(Ljava/lang/Object;)Z]
Except.children ( ) : scala.collection.Seq<SparkPlan>
[mangled: org/apache/spark/sql/execution/Except.children:()Lscala/collection/Seq;]
Except.copy ( SparkPlan left, SparkPlan right ) : Except
[mangled: org/apache/spark/sql/execution/Except.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Except;]
Except.curried ( ) [static] : scala.Function1<SparkPlan,scala.Function1<SparkPlan,Except>>
[mangled: org/apache/spark/sql/execution/Except.curried:()Lscala/Function1;]
Except.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Except.equals:(Ljava/lang/Object;)Z]
Except.Except ( SparkPlan left, SparkPlan right )
[mangled: org/apache/spark/sql/execution/Except."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
Except.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/Except.execute:()Lorg/apache/spark/rdd/RDD;]
Except.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/Except.hashCode:()I]
Except.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Except.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Except.left ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Except.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
Except.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Except.output:()Lscala/collection/Seq;]
Except.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/Except.productArity:()I]
Except.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/Except.productElement:(I)Ljava/lang/Object;]
Except.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/Except.productIterator:()Lscala/collection/Iterator;]
Except.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/Except.productPrefix:()Ljava/lang/String;]
Except.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Except.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Except.right ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Except.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
Except.tupled ( ) [static] : scala.Function1<scala.Tuple2<SparkPlan,SparkPlan>,Except>
[mangled: org/apache/spark/sql/execution/Except.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, Exchange.class
package org.apache.spark.sql.execution
Exchange.Exchange..bypassMergeThreshold ( ) : int
[mangled: org/apache/spark/sql/execution/Exchange.org.apache.spark.sql.execution.Exchange..bypassMergeThreshold:()I]
Exchange.sortBasedShuffleOn ( ) : boolean
[mangled: org/apache/spark/sql/execution/Exchange.sortBasedShuffleOn:()Z]
spark-sql_2.10-1.3.0.jar, ExecutedCommand.class
package org.apache.spark.sql.execution
ExecutedCommand.andThen ( scala.Function1<ExecutedCommand,A> p1 ) [static] : scala.Function1<RunnableCommand,A>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.andThen:(Lscala/Function1;)Lscala/Function1;]
ExecutedCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExecutedCommand.canEqual:(Ljava/lang/Object;)Z]
ExecutedCommand.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/ExecutedCommand.children:()Lscala/collection/immutable/Nil$;]
ExecutedCommand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/ExecutedCommand.children:()Lscala/collection/Seq;]
ExecutedCommand.cmd ( ) : RunnableCommand
[mangled: org/apache/spark/sql/execution/ExecutedCommand.cmd:()Lorg/apache/spark/sql/execution/RunnableCommand;]
ExecutedCommand.compose ( scala.Function1<A,RunnableCommand> p1 ) [static] : scala.Function1<A,ExecutedCommand>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.compose:(Lscala/Function1;)Lscala/Function1;]
ExecutedCommand.copy ( RunnableCommand cmd ) : ExecutedCommand
[mangled: org/apache/spark/sql/execution/ExecutedCommand.copy:(Lorg/apache/spark/sql/execution/RunnableCommand;)Lorg/apache/spark/sql/execution/ExecutedCommand;]
ExecutedCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExecutedCommand.equals:(Ljava/lang/Object;)Z]
ExecutedCommand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.execute:()Lorg/apache/spark/rdd/RDD;]
ExecutedCommand.executeCollect ( ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/ExecutedCommand.executeCollect:()[Lorg/apache/spark/sql/Row;]
ExecutedCommand.ExecutedCommand ( RunnableCommand cmd )
[mangled: org/apache/spark/sql/execution/ExecutedCommand."<init>":(Lorg/apache/spark/sql/execution/RunnableCommand;)V]
ExecutedCommand.executeTake ( int limit ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/ExecutedCommand.executeTake:(I)[Lorg/apache/spark/sql/Row;]
ExecutedCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ExecutedCommand.hashCode:()I]
ExecutedCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.output:()Lscala/collection/Seq;]
ExecutedCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productArity:()I]
ExecutedCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productElement:(I)Ljava/lang/Object;]
ExecutedCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productIterator:()Lscala/collection/Iterator;]
ExecutedCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ExecutedCommand.productPrefix:()Ljava/lang/String;]
ExecutedCommand.sideEffectResult ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExecutedCommand.sideEffectResult:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, Expand.class
package org.apache.spark.sql.execution
Expand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Expand.canEqual:(Ljava/lang/Object;)Z]
Expand.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Expand.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Expand.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Expand.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
Expand.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/Expand.children:()Lscala/collection/immutable/List;]
Expand.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/Expand.children:()Lscala/collection/Seq;]
Expand.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression> projections, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child ) : Expand
[mangled: org/apache/spark/sql/execution/Expand.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Expand;]
Expand.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<SparkPlan,Expand>>>
[mangled: org/apache/spark/sql/execution/Expand.curried:()Lscala/Function1;]
Expand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Expand.equals:(Ljava/lang/Object;)Z]
Expand.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/Expand.execute:()Lorg/apache/spark/rdd/RDD;]
Expand.Expand ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression> projections, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child )
[mangled: org/apache/spark/sql/execution/Expand."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
Expand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/Expand.hashCode:()I]
Expand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Expand.output:()Lscala/collection/Seq;]
Expand.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/Expand.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
Expand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/Expand.productArity:()I]
Expand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/Expand.productElement:(I)Ljava/lang/Object;]
Expand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/Expand.productIterator:()Lscala/collection/Iterator;]
Expand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/Expand.productPrefix:()Ljava/lang/String;]
Expand.projections ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>
[mangled: org/apache/spark/sql/execution/Expand.projections:()Lscala/collection/Seq;]
Expand.tupled ( ) [static] : scala.Function1<scala.Tuple3<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,SparkPlan>,Expand>
[mangled: org/apache/spark/sql/execution/Expand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ExperimentalMethods.class
package org.apache.spark.sql
ExperimentalMethods.ExperimentalMethods ( SQLContext sqlContext )
[mangled: org/apache/spark/sql/ExperimentalMethods."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
ExperimentalMethods.extraStrategies ( ) : scala.collection.Seq<catalyst.planning.GenericStrategy<execution.SparkPlan>>
[mangled: org/apache/spark/sql/ExperimentalMethods.extraStrategies:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, ExplainCommand.class
package org.apache.spark.sql.execution
ExplainCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExplainCommand.canEqual:(Ljava/lang/Object;)Z]
ExplainCommand.copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan logicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean extended ) : ExplainCommand
[mangled: org/apache/spark/sql/execution/ExplainCommand.copy:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lscala/collection/Seq;Z)Lorg/apache/spark/sql/execution/ExplainCommand;]
ExplainCommand.curried ( ) [static] : scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<Object,ExplainCommand>>>
[mangled: org/apache/spark/sql/execution/ExplainCommand.curried:()Lscala/Function1;]
ExplainCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExplainCommand.equals:(Ljava/lang/Object;)Z]
ExplainCommand.ExplainCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan logicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean extended )
[mangled: org/apache/spark/sql/execution/ExplainCommand."<init>":(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lscala/collection/Seq;Z)V]
ExplainCommand.extended ( ) : boolean
[mangled: org/apache/spark/sql/execution/ExplainCommand.extended:()Z]
ExplainCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ExplainCommand.hashCode:()I]
ExplainCommand.logicalPlan ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/execution/ExplainCommand.logicalPlan:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
ExplainCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/ExplainCommand.output:()Lscala/collection/Seq;]
ExplainCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ExplainCommand.productArity:()I]
ExplainCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ExplainCommand.productElement:(I)Ljava/lang/Object;]
ExplainCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ExplainCommand.productIterator:()Lscala/collection/Iterator;]
ExplainCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ExplainCommand.productPrefix:()Ljava/lang/String;]
ExplainCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExplainCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
ExplainCommand.tupled ( ) [static] : scala.Function1<scala.Tuple3<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,Object>,ExplainCommand>
[mangled: org/apache/spark/sql/execution/ExplainCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ExternalSort.class
package org.apache.spark.sql.execution
ExternalSort.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExternalSort.canEqual:(Ljava/lang/Object;)Z]
ExternalSort.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/ExternalSort.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
ExternalSort.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/ExternalSort.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
ExternalSort.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/ExternalSort.children:()Lscala/collection/immutable/List;]
ExternalSort.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/ExternalSort.children:()Lscala/collection/Seq;]
ExternalSort.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, boolean global, SparkPlan child ) : ExternalSort
[mangled: org/apache/spark/sql/execution/ExternalSort.copy:(Lscala/collection/Seq;ZLorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/ExternalSort;]
ExternalSort.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>,scala.Function1<Object,scala.Function1<SparkPlan,ExternalSort>>>
[mangled: org/apache/spark/sql/execution/ExternalSort.curried:()Lscala/Function1;]
ExternalSort.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ExternalSort.equals:(Ljava/lang/Object;)Z]
ExternalSort.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ExternalSort.execute:()Lorg/apache/spark/rdd/RDD;]
ExternalSort.ExternalSort ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, boolean global, SparkPlan child )
[mangled: org/apache/spark/sql/execution/ExternalSort."<init>":(Lscala/collection/Seq;ZLorg/apache/spark/sql/execution/SparkPlan;)V]
ExternalSort.global ( ) : boolean
[mangled: org/apache/spark/sql/execution/ExternalSort.global:()Z]
ExternalSort.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ExternalSort.hashCode:()I]
ExternalSort.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/ExternalSort.output:()Lscala/collection/Seq;]
ExternalSort.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/ExternalSort.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
ExternalSort.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ExternalSort.productArity:()I]
ExternalSort.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ExternalSort.productElement:(I)Ljava/lang/Object;]
ExternalSort.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ExternalSort.productIterator:()Lscala/collection/Iterator;]
ExternalSort.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ExternalSort.productPrefix:()Ljava/lang/String;]
ExternalSort.requiredChildDistribution ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution>
[mangled: org/apache/spark/sql/execution/ExternalSort.requiredChildDistribution:()Lscala/collection/Seq;]
ExternalSort.sortOrder ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>
[mangled: org/apache/spark/sql/execution/ExternalSort.sortOrder:()Lscala/collection/Seq;]
ExternalSort.tupled ( ) [static] : scala.Function1<scala.Tuple3<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>,Object,SparkPlan>,ExternalSort>
[mangled: org/apache/spark/sql/execution/ExternalSort.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, Filter.class
package org.apache.spark.sql.execution
Filter.conditionEvaluator ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/Filter.conditionEvaluator:()Lscala/Function1;]
package org.apache.spark.sql.sources
Filter.Filter ( )
[mangled: org/apache/spark/sql/sources/Filter."<init>":()V]
spark-sql_2.10-1.3.0.jar, GeneralHashedRelation.class
package org.apache.spark.sql.execution.joins
GeneralHashedRelation.GeneralHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>> hashTable )
[mangled: org/apache/spark/sql/execution/joins/GeneralHashedRelation."<init>":(Ljava/util/HashMap;)V]
spark-sql_2.10-1.3.0.jar, Generate.class
package org.apache.spark.sql.execution
Generate.boundGenerator ( ) : org.apache.spark.sql.catalyst.expressions.Generator
[mangled: org/apache/spark/sql/execution/Generate.boundGenerator:()Lorg/apache/spark/sql/catalyst/expressions/Generator;]
Generate.generatorOutput ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Generate.generatorOutput:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, GeneratedAggregate.class
package org.apache.spark.sql.execution
GeneratedAggregate.aggregateExpressions ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.aggregateExpressions:()Lscala/collection/Seq;]
GeneratedAggregate.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.canEqual:(Ljava/lang/Object;)Z]
GeneratedAggregate.child ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.child:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
GeneratedAggregate.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
GeneratedAggregate.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.children:()Lscala/collection/immutable/List;]
GeneratedAggregate.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.children:()Lscala/collection/Seq;]
GeneratedAggregate.copy ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, SparkPlan child ) : GeneratedAggregate
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.copy:(ZLscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/GeneratedAggregate;]
GeneratedAggregate.curried ( ) [static] : scala.Function1<Object,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>,scala.Function1<SparkPlan,GeneratedAggregate>>>>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.curried:()Lscala/Function1;]
GeneratedAggregate.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.equals:(Ljava/lang/Object;)Z]
GeneratedAggregate.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.execute:()Lorg/apache/spark/rdd/RDD;]
GeneratedAggregate.GeneratedAggregate ( boolean partial, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupingExpressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> aggregateExpressions, SparkPlan child )
[mangled: org/apache/spark/sql/execution/GeneratedAggregate."<init>":(ZLscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
GeneratedAggregate.groupingExpressions ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.groupingExpressions:()Lscala/collection/Seq;]
GeneratedAggregate.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.hashCode:()I]
GeneratedAggregate.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.output:()Lscala/collection/Seq;]
GeneratedAggregate.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
GeneratedAggregate.partial ( ) : boolean
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.partial:()Z]
GeneratedAggregate.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.productArity:()I]
GeneratedAggregate.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.productElement:(I)Ljava/lang/Object;]
GeneratedAggregate.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.productIterator:()Lscala/collection/Iterator;]
GeneratedAggregate.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.productPrefix:()Ljava/lang/String;]
GeneratedAggregate.requiredChildDistribution ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.requiredChildDistribution:()Lscala/collection/Seq;]
GeneratedAggregate.tupled ( ) [static] : scala.Function1<scala.Tuple4<Object,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>,SparkPlan>,GeneratedAggregate>
[mangled: org/apache/spark/sql/execution/GeneratedAggregate.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, GenericColumnStats.class
package org.apache.spark.sql.columnar
GenericColumnStats.GenericColumnStats ( )
[mangled: org/apache/spark/sql/columnar/GenericColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, GreaterThan.class
package org.apache.spark.sql.sources
GreaterThan.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThan.attribute:()Ljava/lang/String;]
GreaterThan.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThan.canEqual:(Ljava/lang/Object;)Z]
GreaterThan.copy ( String attribute, Object value ) : GreaterThan
[mangled: org/apache/spark/sql/sources/GreaterThan.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/GreaterThan;]
GreaterThan.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,GreaterThan>>
[mangled: org/apache/spark/sql/sources/GreaterThan.curried:()Lscala/Function1;]
GreaterThan.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThan.equals:(Ljava/lang/Object;)Z]
GreaterThan.GreaterThan ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/GreaterThan."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
GreaterThan.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThan.hashCode:()I]
GreaterThan.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThan.productArity:()I]
GreaterThan.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThan.productElement:(I)Ljava/lang/Object;]
GreaterThan.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/GreaterThan.productIterator:()Lscala/collection/Iterator;]
GreaterThan.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThan.productPrefix:()Ljava/lang/String;]
GreaterThan.toString ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThan.toString:()Ljava/lang/String;]
GreaterThan.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,GreaterThan>
[mangled: org/apache/spark/sql/sources/GreaterThan.tupled:()Lscala/Function1;]
GreaterThan.value ( ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThan.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, GreaterThanOrEqual.class
package org.apache.spark.sql.sources
GreaterThanOrEqual.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.attribute:()Ljava/lang/String;]
GreaterThanOrEqual.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.canEqual:(Ljava/lang/Object;)Z]
GreaterThanOrEqual.copy ( String attribute, Object value ) : GreaterThanOrEqual
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/GreaterThanOrEqual;]
GreaterThanOrEqual.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,GreaterThanOrEqual>>
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.curried:()Lscala/Function1;]
GreaterThanOrEqual.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.equals:(Ljava/lang/Object;)Z]
GreaterThanOrEqual.GreaterThanOrEqual ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
GreaterThanOrEqual.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.hashCode:()I]
GreaterThanOrEqual.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productArity:()I]
GreaterThanOrEqual.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productElement:(I)Ljava/lang/Object;]
GreaterThanOrEqual.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productIterator:()Lscala/collection/Iterator;]
GreaterThanOrEqual.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.productPrefix:()Ljava/lang/String;]
GreaterThanOrEqual.toString ( ) : String
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.toString:()Ljava/lang/String;]
GreaterThanOrEqual.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,GreaterThanOrEqual>
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.tupled:()Lscala/Function1;]
GreaterThanOrEqual.value ( ) : Object
[mangled: org/apache/spark/sql/sources/GreaterThanOrEqual.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, GroupedData.class
package org.apache.spark.sql
GroupedData.agg ( java.util.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.agg ( Column expr, scala.collection.Seq<Column> exprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Lorg/apache/spark/sql/Column;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.agg ( scala.collection.immutable.Map<String,String> exprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.agg ( scala.Tuple2<String,String> aggExpr, scala.collection.Seq<scala.Tuple2<String,String>> aggExprs ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.agg:(Lscala/Tuple2;Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
GroupedData.count ( ) : DataFrame
[mangled: org/apache/spark/sql/GroupedData.count:()Lorg/apache/spark/sql/DataFrame;]
GroupedData.GroupedData ( DataFrame df, scala.collection.Seq<catalyst.expressions.Expression> groupingExprs )
[mangled: org/apache/spark/sql/GroupedData."<init>":(Lorg/apache/spark/sql/DataFrame;Lscala/collection/Seq;)V]
spark-sql_2.10-1.3.0.jar, HashedRelation.class
package org.apache.spark.sql.execution.joins
HashedRelation.get ( org.apache.spark.sql.Row p1 ) [abstract] : org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashedRelation.get:(Lorg/apache/spark/sql/Row;)Lorg/apache/spark/util/collection/CompactBuffer;]
spark-sql_2.10-1.3.0.jar, HashJoin.class
package org.apache.spark.sql.execution.joins
HashJoin.buildKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildKeys:()Lscala/collection/Seq;]
HashJoin.buildPlan ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.buildSide ( ) [abstract] : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
HashJoin.buildSideKeyGenerator ( ) [abstract] : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/HashJoin.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
HashJoin.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> p1, HashedRelation p2 ) [abstract] : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
HashJoin.left ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.leftKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.leftKeys:()Lscala/collection/Seq;]
HashJoin.output ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.output:()Lscala/collection/Seq;]
HashJoin.right ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.rightKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.rightKeys:()Lscala/collection/Seq;]
HashJoin.streamedKeys ( ) [abstract] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.streamedKeys:()Lscala/collection/Seq;]
HashJoin.streamedPlan ( ) [abstract] : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashJoin.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashJoin.streamSideKeyGenerator ( ) [abstract] : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/HashJoin.streamSideKeyGenerator:()Lscala/Function0;]
spark-sql_2.10-1.3.0.jar, HashOuterJoin.class
package org.apache.spark.sql.execution.joins
HashOuterJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.canEqual:(Ljava/lang/Object;)Z]
HashOuterJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.children:()Lscala/collection/Seq;]
HashOuterJoin.condition ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.condition:()Lscala/Option;]
HashOuterJoin.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : HashOuterJoin
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/HashOuterJoin;]
HashOuterJoin.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.catalyst.plans.JoinType,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,HashOuterJoin>>>>>>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.curried:()Lscala/Function1;]
HashOuterJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.equals:(Ljava/lang/Object;)Z]
HashOuterJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.execute:()Lorg/apache/spark/rdd/RDD;]
HashOuterJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.hashCode:()I]
HashOuterJoin.HashOuterJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.catalyst.plans.JoinType joinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/catalyst/plans/JoinType;Lscala/Option;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
HashOuterJoin.joinType ( ) : org.apache.spark.sql.catalyst.plans.JoinType
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.joinType:()Lorg/apache/spark/sql/catalyst/plans/JoinType;]
HashOuterJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
HashOuterJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashOuterJoin.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.leftKeys:()Lscala/collection/Seq;]
HashOuterJoin.HashOuterJoin..boundCondition ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..boundCondition:()Lscala/Function1;]
HashOuterJoin.HashOuterJoin..buildHashTable ( scala.collection.Iterator<org.apache.spark.sql.Row> iter, org.apache.spark.sql.catalyst.expressions.package.Projection keyGenerator ) : java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..buildHashTable:(Lscala/collection/Iterator;Lorg/apache/spark/sql/catalyst/expressions/package$Projection;)Ljava/util/HashMap;]
HashOuterJoin.HashOuterJoin..DUMMY_LIST ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..DUMMY_LIST:()Lscala/collection/Seq;]
HashOuterJoin.HashOuterJoin..EMPTY_LIST ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..EMPTY_LIST:()Lscala/collection/Seq;]
HashOuterJoin.HashOuterJoin..fullOuterIterator ( org.apache.spark.sql.Row key, scala.collection.Iterable<org.apache.spark.sql.Row> leftIter, scala.collection.Iterable<org.apache.spark.sql.Row> rightIter, org.apache.spark.sql.catalyst.expressions.JoinedRow joinedRow ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..fullOuterIterator:(Lorg/apache/spark/sql/Row;Lscala/collection/Iterable;Lscala/collection/Iterable;Lorg/apache/spark/sql/catalyst/expressions/JoinedRow;)Lscala/collection/Iterator;]
HashOuterJoin.HashOuterJoin..leftNullRow ( ) : org.apache.spark.sql.catalyst.expressions.GenericRow
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..leftNullRow:()Lorg/apache/spark/sql/catalyst/expressions/GenericRow;]
HashOuterJoin.HashOuterJoin..leftOuterIterator ( org.apache.spark.sql.Row key, org.apache.spark.sql.catalyst.expressions.JoinedRow joinedRow, scala.collection.Iterable<org.apache.spark.sql.Row> rightIter ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..leftOuterIterator:(Lorg/apache/spark/sql/Row;Lorg/apache/spark/sql/catalyst/expressions/JoinedRow;Lscala/collection/Iterable;)Lscala/collection/Iterator;]
HashOuterJoin.HashOuterJoin..rightNullRow ( ) : org.apache.spark.sql.catalyst.expressions.GenericRow
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..rightNullRow:()Lorg/apache/spark/sql/catalyst/expressions/GenericRow;]
HashOuterJoin.HashOuterJoin..rightOuterIterator ( org.apache.spark.sql.Row key, scala.collection.Iterable<org.apache.spark.sql.Row> leftIter, org.apache.spark.sql.catalyst.expressions.JoinedRow joinedRow ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.org.apache.spark.sql.execution.joins.HashOuterJoin..rightOuterIterator:(Lorg/apache/spark/sql/Row;Lscala/collection/Iterable;Lorg/apache/spark/sql/catalyst/expressions/JoinedRow;)Lscala/collection/Iterator;]
HashOuterJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.output:()Lscala/collection/Seq;]
HashOuterJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
HashOuterJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productArity:()I]
HashOuterJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productElement:(I)Ljava/lang/Object;]
HashOuterJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productIterator:()Lscala/collection/Iterator;]
HashOuterJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.productPrefix:()Ljava/lang/String;]
HashOuterJoin.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.requiredChildDistribution:()Lscala/collection/immutable/List;]
HashOuterJoin.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.requiredChildDistribution:()Lscala/collection/Seq;]
HashOuterJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
HashOuterJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
HashOuterJoin.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.rightKeys:()Lscala/collection/Seq;]
HashOuterJoin.tupled ( ) [static] : scala.Function1<scala.Tuple6<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.catalyst.plans.JoinType,scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,HashOuterJoin>
[mangled: org/apache/spark/sql/execution/joins/HashOuterJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, In.class
package org.apache.spark.sql.sources
In.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/In.attribute:()Ljava/lang/String;]
In.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/In.canEqual:(Ljava/lang/Object;)Z]
In.copy ( String attribute, Object[ ] values ) : In
[mangled: org/apache/spark/sql/sources/In.copy:(Ljava/lang/String;[Ljava/lang/Object;)Lorg/apache/spark/sql/sources/In;]
In.curried ( ) [static] : scala.Function1<String,scala.Function1<Object[ ],In>>
[mangled: org/apache/spark/sql/sources/In.curried:()Lscala/Function1;]
In.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/In.equals:(Ljava/lang/Object;)Z]
In.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/In.hashCode:()I]
In.In ( String attribute, Object[ ] values )
[mangled: org/apache/spark/sql/sources/In."<init>":(Ljava/lang/String;[Ljava/lang/Object;)V]
In.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/In.productArity:()I]
In.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/In.productElement:(I)Ljava/lang/Object;]
In.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/In.productIterator:()Lscala/collection/Iterator;]
In.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/In.productPrefix:()Ljava/lang/String;]
In.toString ( ) : String
[mangled: org/apache/spark/sql/sources/In.toString:()Ljava/lang/String;]
In.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object[ ]>,In>
[mangled: org/apache/spark/sql/sources/In.tupled:()Lscala/Function1;]
In.values ( ) : Object[ ]
[mangled: org/apache/spark/sql/sources/In.values:()[Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, InMemoryColumnarTableScan.class
package org.apache.spark.sql.columnar
InMemoryColumnarTableScan.buildFilter ( ) : scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.buildFilter:()Lscala/PartialFunction;]
InMemoryColumnarTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> predicates, InMemoryRelation relation ) : InMemoryColumnarTableScan
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/columnar/InMemoryRelation;)Lorg/apache/spark/sql/columnar/InMemoryColumnarTableScan;]
InMemoryColumnarTableScan.InMemoryColumnarTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> predicates, InMemoryRelation relation )
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/columnar/InMemoryRelation;)V]
InMemoryColumnarTableScan.InMemoryColumnarTableScan..inMemoryPartitionPruningEnabled ( ) : boolean
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.org.apache.spark.sql.columnar.InMemoryColumnarTableScan..inMemoryPartitionPruningEnabled:()Z]
InMemoryColumnarTableScan.InMemoryColumnarTableScan..statsFor ( org.apache.spark.sql.catalyst.expressions.Attribute a ) : ColumnStatisticsSchema
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.org.apache.spark.sql.columnar.InMemoryColumnarTableScan..statsFor:(Lorg/apache/spark/sql/catalyst/expressions/Attribute;)Lorg/apache/spark/sql/columnar/ColumnStatisticsSchema;]
InMemoryColumnarTableScan.partitionFilters ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.partitionFilters:()Lscala/collection/Seq;]
InMemoryColumnarTableScan.predicates ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.predicates:()Lscala/collection/Seq;]
InMemoryColumnarTableScan.readBatches ( ) : org.apache.spark.Accumulator<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.readBatches:()Lorg/apache/spark/Accumulator;]
InMemoryColumnarTableScan.readPartitions ( ) : org.apache.spark.Accumulator<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.readPartitions:()Lorg/apache/spark/Accumulator;]
InMemoryColumnarTableScan.relation ( ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryColumnarTableScan.relation:()Lorg/apache/spark/sql/columnar/InMemoryRelation;]
spark-sql_2.10-1.3.0.jar, InMemoryRelation.class
package org.apache.spark.sql.columnar
InMemoryRelation.batchSize ( ) : int
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.batchSize:()I]
InMemoryRelation.cachedColumnBuffers ( ) : org.apache.spark.rdd.RDD<CachedBatch>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.cachedColumnBuffers:()Lorg/apache/spark/rdd/RDD;]
InMemoryRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.canEqual:(Ljava/lang/Object;)Z]
InMemoryRelation.child ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
InMemoryRelation.children ( ) : scala.collection.Seq<scala.runtime.Nothing.>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.children:()Lscala/collection/Seq;]
InMemoryRelation.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean useCompression, int batchSize, org.apache.spark.storage.StorageLevel storageLevel, org.apache.spark.sql.execution.SparkPlan child, scala.Option<String> tableName, org.apache.spark.rdd.RDD<CachedBatch> _cachedColumnBuffers, org.apache.spark.sql.catalyst.plans.logical.Statistics _statistics ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.copy:(Lscala/collection/Seq;ZILorg/apache/spark/storage/StorageLevel;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;)Lorg/apache/spark/sql/columnar/InMemoryRelation;]
InMemoryRelation.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.equals:(Ljava/lang/Object;)Z]
InMemoryRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.hashCode:()I]
InMemoryRelation.InMemoryRelation ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, boolean useCompression, int batchSize, org.apache.spark.storage.StorageLevel storageLevel, org.apache.spark.sql.execution.SparkPlan child, scala.Option<String> tableName, org.apache.spark.rdd.RDD<CachedBatch> _cachedColumnBuffers, org.apache.spark.sql.catalyst.plans.logical.Statistics _statistics )
[mangled: org/apache/spark/sql/columnar/InMemoryRelation."<init>":(Lscala/collection/Seq;ZILorg/apache/spark/storage/StorageLevel;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;)V]
InMemoryRelation.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
InMemoryRelation.newInstance ( ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.newInstance:()Lorg/apache/spark/sql/columnar/InMemoryRelation;]
InMemoryRelation.InMemoryRelation..batchStats ( ) : org.apache.spark.Accumulable<scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.Row>,org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.org.apache.spark.sql.columnar.InMemoryRelation..batchStats:()Lorg/apache/spark/Accumulable;]
InMemoryRelation.otherCopyArgs ( ) : scala.collection.Seq<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.otherCopyArgs:()Lscala/collection/Seq;]
InMemoryRelation.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.output:()Lscala/collection/Seq;]
InMemoryRelation.partitionStatistics ( ) : PartitionStatistics
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.partitionStatistics:()Lorg/apache/spark/sql/columnar/PartitionStatistics;]
InMemoryRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.productArity:()I]
InMemoryRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.productElement:(I)Ljava/lang/Object;]
InMemoryRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.productIterator:()Lscala/collection/Iterator;]
InMemoryRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.productPrefix:()Ljava/lang/String;]
InMemoryRelation.recache ( ) : void
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.recache:()V]
InMemoryRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
InMemoryRelation.storageLevel ( ) : org.apache.spark.storage.StorageLevel
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.storageLevel:()Lorg/apache/spark/storage/StorageLevel;]
InMemoryRelation.tableName ( ) : scala.Option<String>
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.tableName:()Lscala/Option;]
InMemoryRelation.useCompression ( ) : boolean
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.useCompression:()Z]
InMemoryRelation.withOutput ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> newOutput ) : InMemoryRelation
[mangled: org/apache/spark/sql/columnar/InMemoryRelation.withOutput:(Lscala/collection/Seq;)Lorg/apache/spark/sql/columnar/InMemoryRelation;]
spark-sql_2.10-1.3.0.jar, InsertableRelation.class
package org.apache.spark.sql.sources
InsertableRelation.insert ( org.apache.spark.sql.DataFrame p1, boolean p2 ) [abstract] : void
[mangled: org/apache/spark/sql/sources/InsertableRelation.insert:(Lorg/apache/spark/sql/DataFrame;Z)V]
spark-sql_2.10-1.3.0.jar, InsertIntoDataSource.class
package org.apache.spark.sql.sources
InsertIntoDataSource.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.canEqual:(Ljava/lang/Object;)Z]
InsertIntoDataSource.copy ( LogicalRelation logicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, boolean overwrite ) : InsertIntoDataSource
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.copy:(Lorg/apache/spark/sql/sources/LogicalRelation;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)Lorg/apache/spark/sql/sources/InsertIntoDataSource;]
InsertIntoDataSource.curried ( ) [static] : scala.Function1<LogicalRelation,scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,scala.Function1<Object,InsertIntoDataSource>>>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.curried:()Lscala/Function1;]
InsertIntoDataSource.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.equals:(Ljava/lang/Object;)Z]
InsertIntoDataSource.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.hashCode:()I]
InsertIntoDataSource.InsertIntoDataSource ( LogicalRelation logicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, boolean overwrite )
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource."<init>":(Lorg/apache/spark/sql/sources/LogicalRelation;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Z)V]
InsertIntoDataSource.logicalRelation ( ) : LogicalRelation
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.logicalRelation:()Lorg/apache/spark/sql/sources/LogicalRelation;]
InsertIntoDataSource.overwrite ( ) : boolean
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.overwrite:()Z]
InsertIntoDataSource.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productArity:()I]
InsertIntoDataSource.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productElement:(I)Ljava/lang/Object;]
InsertIntoDataSource.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productIterator:()Lscala/collection/Iterator;]
InsertIntoDataSource.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.productPrefix:()Ljava/lang/String;]
InsertIntoDataSource.query ( ) : org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.query:()Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;]
InsertIntoDataSource.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
InsertIntoDataSource.tupled ( ) [static] : scala.Function1<scala.Tuple3<LogicalRelation,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,Object>,InsertIntoDataSource>
[mangled: org/apache/spark/sql/sources/InsertIntoDataSource.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, InsertIntoParquetTable.class
package org.apache.spark.sql.parquet
InsertIntoParquetTable.copy ( ParquetRelation relation, org.apache.spark.sql.execution.SparkPlan child, boolean overwrite ) : InsertIntoParquetTable
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.copy:(Lorg/apache/spark/sql/parquet/ParquetRelation;Lorg/apache/spark/sql/execution/SparkPlan;Z)Lorg/apache/spark/sql/parquet/InsertIntoParquetTable;]
InsertIntoParquetTable.curried ( ) [static] : scala.Function1<ParquetRelation,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<Object,InsertIntoParquetTable>>>
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.curried:()Lscala/Function1;]
InsertIntoParquetTable.InsertIntoParquetTable ( ParquetRelation relation, org.apache.spark.sql.execution.SparkPlan child, boolean overwrite )
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable."<init>":(Lorg/apache/spark/sql/parquet/ParquetRelation;Lorg/apache/spark/sql/execution/SparkPlan;Z)V]
InsertIntoParquetTable.tupled ( ) [static] : scala.Function1<scala.Tuple3<ParquetRelation,org.apache.spark.sql.execution.SparkPlan,Object>,InsertIntoParquetTable>
[mangled: org/apache/spark/sql/parquet/InsertIntoParquetTable.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, IntColumnStats.class
package org.apache.spark.sql.columnar
IntColumnStats.collectedStatistics ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/columnar/IntColumnStats.collectedStatistics:()Lorg/apache/spark/sql/Row;]
IntColumnStats.count ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.count:()I]
IntColumnStats.count_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.count_.eq:(I)V]
IntColumnStats.gatherStats ( org.apache.spark.sql.Row row, int ordinal ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.gatherStats:(Lorg/apache/spark/sql/Row;I)V]
IntColumnStats.lower ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.lower:()I]
IntColumnStats.lower_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.lower_.eq:(I)V]
IntColumnStats.nullCount ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.nullCount:()I]
IntColumnStats.nullCount_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.nullCount_.eq:(I)V]
IntColumnStats.sizeInBytes ( ) : long
[mangled: org/apache/spark/sql/columnar/IntColumnStats.sizeInBytes:()J]
IntColumnStats.sizeInBytes_.eq ( long p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.sizeInBytes_.eq:(J)V]
IntColumnStats.upper ( ) : int
[mangled: org/apache/spark/sql/columnar/IntColumnStats.upper:()I]
IntColumnStats.upper_.eq ( int p1 ) : void
[mangled: org/apache/spark/sql/columnar/IntColumnStats.upper_.eq:(I)V]
spark-sql_2.10-1.3.0.jar, IntegerHashSetSerializer.class
package org.apache.spark.sql.execution
IntegerHashSetSerializer.IntegerHashSetSerializer ( )
[mangled: org/apache/spark/sql/execution/IntegerHashSetSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, Intersect.class
package org.apache.spark.sql.execution
Intersect.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Intersect.canEqual:(Ljava/lang/Object;)Z]
Intersect.children ( ) : scala.collection.Seq<SparkPlan>
[mangled: org/apache/spark/sql/execution/Intersect.children:()Lscala/collection/Seq;]
Intersect.copy ( SparkPlan left, SparkPlan right ) : Intersect
[mangled: org/apache/spark/sql/execution/Intersect.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Intersect;]
Intersect.curried ( ) [static] : scala.Function1<SparkPlan,scala.Function1<SparkPlan,Intersect>>
[mangled: org/apache/spark/sql/execution/Intersect.curried:()Lscala/Function1;]
Intersect.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/Intersect.equals:(Ljava/lang/Object;)Z]
Intersect.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/Intersect.execute:()Lorg/apache/spark/rdd/RDD;]
Intersect.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/Intersect.hashCode:()I]
Intersect.Intersect ( SparkPlan left, SparkPlan right )
[mangled: org/apache/spark/sql/execution/Intersect."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
Intersect.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Intersect.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Intersect.left ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Intersect.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
Intersect.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/Intersect.output:()Lscala/collection/Seq;]
Intersect.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/Intersect.productArity:()I]
Intersect.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/Intersect.productElement:(I)Ljava/lang/Object;]
Intersect.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/Intersect.productIterator:()Lscala/collection/Iterator;]
Intersect.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/Intersect.productPrefix:()Ljava/lang/String;]
Intersect.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/Intersect.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
Intersect.right ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/Intersect.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
Intersect.tupled ( ) [static] : scala.Function1<scala.Tuple2<SparkPlan,SparkPlan>,Intersect>
[mangled: org/apache/spark/sql/execution/Intersect.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, IsNotNull.class
package org.apache.spark.sql.sources
IsNotNull.andThen ( scala.Function1<IsNotNull,A> p1 ) [static] : scala.Function1<String,A>
[mangled: org/apache/spark/sql/sources/IsNotNull.andThen:(Lscala/Function1;)Lscala/Function1;]
IsNotNull.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/IsNotNull.attribute:()Ljava/lang/String;]
IsNotNull.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNotNull.canEqual:(Ljava/lang/Object;)Z]
IsNotNull.compose ( scala.Function1<A,String> p1 ) [static] : scala.Function1<A,IsNotNull>
[mangled: org/apache/spark/sql/sources/IsNotNull.compose:(Lscala/Function1;)Lscala/Function1;]
IsNotNull.copy ( String attribute ) : IsNotNull
[mangled: org/apache/spark/sql/sources/IsNotNull.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/sources/IsNotNull;]
IsNotNull.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNotNull.equals:(Ljava/lang/Object;)Z]
IsNotNull.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/IsNotNull.hashCode:()I]
IsNotNull.IsNotNull ( String attribute )
[mangled: org/apache/spark/sql/sources/IsNotNull."<init>":(Ljava/lang/String;)V]
IsNotNull.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/IsNotNull.productArity:()I]
IsNotNull.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/IsNotNull.productElement:(I)Ljava/lang/Object;]
IsNotNull.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/IsNotNull.productIterator:()Lscala/collection/Iterator;]
IsNotNull.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/IsNotNull.productPrefix:()Ljava/lang/String;]
IsNotNull.toString ( ) : String
[mangled: org/apache/spark/sql/sources/IsNotNull.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, IsNull.class
package org.apache.spark.sql.sources
IsNull.andThen ( scala.Function1<IsNull,A> p1 ) [static] : scala.Function1<String,A>
[mangled: org/apache/spark/sql/sources/IsNull.andThen:(Lscala/Function1;)Lscala/Function1;]
IsNull.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/IsNull.attribute:()Ljava/lang/String;]
IsNull.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNull.canEqual:(Ljava/lang/Object;)Z]
IsNull.compose ( scala.Function1<A,String> p1 ) [static] : scala.Function1<A,IsNull>
[mangled: org/apache/spark/sql/sources/IsNull.compose:(Lscala/Function1;)Lscala/Function1;]
IsNull.copy ( String attribute ) : IsNull
[mangled: org/apache/spark/sql/sources/IsNull.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/sources/IsNull;]
IsNull.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/IsNull.equals:(Ljava/lang/Object;)Z]
IsNull.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/IsNull.hashCode:()I]
IsNull.IsNull ( String attribute )
[mangled: org/apache/spark/sql/sources/IsNull."<init>":(Ljava/lang/String;)V]
IsNull.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/IsNull.productArity:()I]
IsNull.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/IsNull.productElement:(I)Ljava/lang/Object;]
IsNull.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/IsNull.productIterator:()Lscala/collection/Iterator;]
IsNull.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/IsNull.productPrefix:()Ljava/lang/String;]
IsNull.toString ( ) : String
[mangled: org/apache/spark/sql/sources/IsNull.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, JavaBigDecimalSerializer.class
package org.apache.spark.sql.execution
JavaBigDecimalSerializer.JavaBigDecimalSerializer ( )
[mangled: org/apache/spark/sql/execution/JavaBigDecimalSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, JDBCPartition.class
package org.apache.spark.sql.jdbc
JDBCPartition.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.canEqual:(Ljava/lang/Object;)Z]
JDBCPartition.copy ( String whereClause, int idx ) : JDBCPartition
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.copy:(Ljava/lang/String;I)Lorg/apache/spark/sql/jdbc/JDBCPartition;]
JDBCPartition.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,JDBCPartition>>
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.curried:()Lscala/Function1;]
JDBCPartition.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.equals:(Ljava/lang/Object;)Z]
JDBCPartition.hashCode ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.hashCode:()I]
JDBCPartition.idx ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.idx:()I]
JDBCPartition.index ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.index:()I]
JDBCPartition.JDBCPartition ( String whereClause, int idx )
[mangled: org/apache/spark/sql/jdbc/JDBCPartition."<init>":(Ljava/lang/String;I)V]
JDBCPartition.productArity ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productArity:()I]
JDBCPartition.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productElement:(I)Ljava/lang/Object;]
JDBCPartition.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productIterator:()Lscala/collection/Iterator;]
JDBCPartition.productPrefix ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.productPrefix:()Ljava/lang/String;]
JDBCPartition.toString ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.toString:()Ljava/lang/String;]
JDBCPartition.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,JDBCPartition>
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.tupled:()Lscala/Function1;]
JDBCPartition.whereClause ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartition.whereClause:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, JDBCPartitioningInfo.class
package org.apache.spark.sql.jdbc
JDBCPartitioningInfo.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.canEqual:(Ljava/lang/Object;)Z]
JDBCPartitioningInfo.column ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.column:()Ljava/lang/String;]
JDBCPartitioningInfo.copy ( String column, long lowerBound, long upperBound, int numPartitions ) : JDBCPartitioningInfo
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.copy:(Ljava/lang/String;JJI)Lorg/apache/spark/sql/jdbc/JDBCPartitioningInfo;]
JDBCPartitioningInfo.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,scala.Function1<Object,scala.Function1<Object,JDBCPartitioningInfo>>>>
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.curried:()Lscala/Function1;]
JDBCPartitioningInfo.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.equals:(Ljava/lang/Object;)Z]
JDBCPartitioningInfo.hashCode ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.hashCode:()I]
JDBCPartitioningInfo.JDBCPartitioningInfo ( String column, long lowerBound, long upperBound, int numPartitions )
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo."<init>":(Ljava/lang/String;JJI)V]
JDBCPartitioningInfo.lowerBound ( ) : long
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.lowerBound:()J]
JDBCPartitioningInfo.numPartitions ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.numPartitions:()I]
JDBCPartitioningInfo.productArity ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productArity:()I]
JDBCPartitioningInfo.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productElement:(I)Ljava/lang/Object;]
JDBCPartitioningInfo.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productIterator:()Lscala/collection/Iterator;]
JDBCPartitioningInfo.productPrefix ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.productPrefix:()Ljava/lang/String;]
JDBCPartitioningInfo.toString ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.toString:()Ljava/lang/String;]
JDBCPartitioningInfo.tupled ( ) [static] : scala.Function1<scala.Tuple4<String,Object,Object,Object>,JDBCPartitioningInfo>
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.tupled:()Lscala/Function1;]
JDBCPartitioningInfo.upperBound ( ) : long
[mangled: org/apache/spark/sql/jdbc/JDBCPartitioningInfo.upperBound:()J]
spark-sql_2.10-1.3.0.jar, JDBCRDD.class
package org.apache.spark.sql.jdbc
JDBCRDD.BinaryConversion ( ) : JDBCRDD.BinaryConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.BinaryConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$BinaryConversion$;]
JDBCRDD.BinaryLongConversion ( ) : JDBCRDD.BinaryLongConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.BinaryLongConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$BinaryLongConversion$;]
JDBCRDD.BooleanConversion ( ) : JDBCRDD.BooleanConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.BooleanConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$BooleanConversion$;]
JDBCRDD.compute ( org.apache.spark.Partition thePart, org.apache.spark.TaskContext context ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.compute:(Lorg/apache/spark/Partition;Lorg/apache/spark/TaskContext;)Lscala/collection/Iterator;]
JDBCRDD.DateConversion ( ) : JDBCRDD.DateConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.DateConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$DateConversion$;]
JDBCRDD.DecimalConversion ( ) : JDBCRDD.DecimalConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.DecimalConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$DecimalConversion$;]
JDBCRDD.DoubleConversion ( ) : JDBCRDD.DoubleConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.DoubleConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$DoubleConversion$;]
JDBCRDD.FloatConversion ( ) : JDBCRDD.FloatConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.FloatConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$FloatConversion$;]
JDBCRDD.getConnector ( String p1, String p2 ) [static] : scala.Function0<java.sql.Connection>
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.getConnector:(Ljava/lang/String;Ljava/lang/String;)Lscala/Function0;]
JDBCRDD.getConversions ( org.apache.spark.sql.types.StructType schema ) : JDBCRDD.JDBCConversion[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.getConversions:(Lorg/apache/spark/sql/types/StructType;)[Lorg/apache/spark/sql/jdbc/JDBCRDD$JDBCConversion;]
JDBCRDD.getPartitions ( ) : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.getPartitions:()[Lorg/apache/spark/Partition;]
JDBCRDD.IntegerConversion ( ) : JDBCRDD.IntegerConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.IntegerConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$IntegerConversion$;]
JDBCRDD.JDBCRDD ( org.apache.spark.SparkContext sc, scala.Function0<java.sql.Connection> getConnection, org.apache.spark.sql.types.StructType schema, String fqTable, String[ ] columns, org.apache.spark.sql.sources.Filter[ ] filters, org.apache.spark.Partition[ ] partitions )
[mangled: org/apache/spark/sql/jdbc/JDBCRDD."<init>":(Lorg/apache/spark/SparkContext;Lscala/Function0;Lorg/apache/spark/sql/types/StructType;Ljava/lang/String;[Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;[Lorg/apache/spark/Partition;)V]
JDBCRDD.LongConversion ( ) : JDBCRDD.LongConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.LongConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$LongConversion$;]
JDBCRDD.JDBCRDD..columnList ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.org.apache.spark.sql.jdbc.JDBCRDD..columnList:()Ljava/lang/String;]
JDBCRDD.JDBCRDD..compileFilter ( org.apache.spark.sql.sources.Filter f ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.org.apache.spark.sql.jdbc.JDBCRDD..compileFilter:(Lorg/apache/spark/sql/sources/Filter;)Ljava/lang/String;]
JDBCRDD.JDBCRDD..getWhereClause ( JDBCPartition part ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.org.apache.spark.sql.jdbc.JDBCRDD..getWhereClause:(Lorg/apache/spark/sql/jdbc/JDBCPartition;)Ljava/lang/String;]
JDBCRDD.resolveTable ( String p1, String p2 ) [static] : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.resolveTable:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/types/StructType;]
JDBCRDD.scanTable ( org.apache.spark.SparkContext p1, org.apache.spark.sql.types.StructType p2, String p3, String p4, String p5, String[ ] p6, org.apache.spark.sql.sources.Filter[ ] p7, org.apache.spark.Partition[ ] p8 ) [static] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.scanTable:(Lorg/apache/spark/SparkContext;Lorg/apache/spark/sql/types/StructType;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;[Lorg/apache/spark/Partition;)Lorg/apache/spark/rdd/RDD;]
JDBCRDD.StringConversion ( ) : JDBCRDD.StringConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.StringConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$StringConversion$;]
JDBCRDD.TimestampConversion ( ) : JDBCRDD.TimestampConversion.
[mangled: org/apache/spark/sql/jdbc/JDBCRDD.TimestampConversion:()Lorg/apache/spark/sql/jdbc/JDBCRDD$TimestampConversion$;]
spark-sql_2.10-1.3.0.jar, JDBCRelation.class
package org.apache.spark.sql.jdbc
JDBCRelation.buildScan ( String[ ] requiredColumns, org.apache.spark.sql.sources.Filter[ ] filters ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.buildScan:([Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/rdd/RDD;]
JDBCRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.canEqual:(Ljava/lang/Object;)Z]
JDBCRelation.columnPartition ( JDBCPartitioningInfo p1 ) [static] : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.columnPartition:(Lorg/apache/spark/sql/jdbc/JDBCPartitioningInfo;)[Lorg/apache/spark/Partition;]
JDBCRelation.copy ( String url, String table, org.apache.spark.Partition[ ] parts, org.apache.spark.sql.SQLContext sqlContext ) : JDBCRelation
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.copy:(Ljava/lang/String;Ljava/lang/String;[Lorg/apache/spark/Partition;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/jdbc/JDBCRelation;]
JDBCRelation.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.equals:(Ljava/lang/Object;)Z]
JDBCRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.hashCode:()I]
JDBCRelation.JDBCRelation ( String url, String table, org.apache.spark.Partition[ ] parts, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/jdbc/JDBCRelation."<init>":(Ljava/lang/String;Ljava/lang/String;[Lorg/apache/spark/Partition;Lorg/apache/spark/sql/SQLContext;)V]
JDBCRelation.parts ( ) : org.apache.spark.Partition[ ]
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.parts:()[Lorg/apache/spark/Partition;]
JDBCRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productArity:()I]
JDBCRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productElement:(I)Ljava/lang/Object;]
JDBCRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productIterator:()Lscala/collection/Iterator;]
JDBCRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.productPrefix:()Ljava/lang/String;]
JDBCRelation.schema ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.schema:()Lorg/apache/spark/sql/types/StructType;]
JDBCRelation.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
JDBCRelation.table ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.table:()Ljava/lang/String;]
JDBCRelation.toString ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.toString:()Ljava/lang/String;]
JDBCRelation.url ( ) : String
[mangled: org/apache/spark/sql/jdbc/JDBCRelation.url:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, JSONRelation.class
package org.apache.spark.sql.json
JSONRelation.buildScan ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/json/JSONRelation.buildScan:()Lorg/apache/spark/rdd/RDD;]
JSONRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/json/JSONRelation.canEqual:(Ljava/lang/Object;)Z]
JSONRelation.copy ( String path, double samplingRatio, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, org.apache.spark.sql.SQLContext sqlContext ) : JSONRelation
[mangled: org/apache/spark/sql/json/JSONRelation.copy:(Ljava/lang/String;DLscala/Option;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/json/JSONRelation;]
JSONRelation.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/json/JSONRelation.equals:(Ljava/lang/Object;)Z]
JSONRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/json/JSONRelation.hashCode:()I]
JSONRelation.insert ( org.apache.spark.sql.DataFrame data, boolean overwrite ) : void
[mangled: org/apache/spark/sql/json/JSONRelation.insert:(Lorg/apache/spark/sql/DataFrame;Z)V]
JSONRelation.JSONRelation ( String path, double samplingRatio, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/json/JSONRelation."<init>":(Ljava/lang/String;DLscala/Option;Lorg/apache/spark/sql/SQLContext;)V]
JSONRelation.JSONRelation..baseRDD ( ) : org.apache.spark.rdd.RDD<String>
[mangled: org/apache/spark/sql/json/JSONRelation.org.apache.spark.sql.json.JSONRelation..baseRDD:()Lorg/apache/spark/rdd/RDD;]
JSONRelation.path ( ) : String
[mangled: org/apache/spark/sql/json/JSONRelation.path:()Ljava/lang/String;]
JSONRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/json/JSONRelation.productArity:()I]
JSONRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/json/JSONRelation.productElement:(I)Ljava/lang/Object;]
JSONRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/json/JSONRelation.productIterator:()Lscala/collection/Iterator;]
JSONRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/json/JSONRelation.productPrefix:()Ljava/lang/String;]
JSONRelation.samplingRatio ( ) : double
[mangled: org/apache/spark/sql/json/JSONRelation.samplingRatio:()D]
JSONRelation.schema ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/json/JSONRelation.schema:()Lorg/apache/spark/sql/types/StructType;]
JSONRelation.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/json/JSONRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
JSONRelation.toString ( ) : String
[mangled: org/apache/spark/sql/json/JSONRelation.toString:()Ljava/lang/String;]
JSONRelation.userSpecifiedSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/json/JSONRelation.userSpecifiedSchema:()Lscala/Option;]
spark-sql_2.10-1.3.0.jar, KryoResourcePool.class
package org.apache.spark.sql.execution
KryoResourcePool.KryoResourcePool ( int size )
[mangled: org/apache/spark/sql/execution/KryoResourcePool."<init>":(I)V]
spark-sql_2.10-1.3.0.jar, LeftSemiJoinBNL.class
package org.apache.spark.sql.execution.joins
LeftSemiJoinBNL.broadcast ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.broadcast:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.canEqual:(Ljava/lang/Object;)Z]
LeftSemiJoinBNL.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.children:()Lscala/collection/Seq;]
LeftSemiJoinBNL.condition ( ) : scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.condition:()Lscala/Option;]
LeftSemiJoinBNL.copy ( org.apache.spark.sql.execution.SparkPlan streamed, org.apache.spark.sql.execution.SparkPlan broadcast, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition ) : LeftSemiJoinBNL
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.copy:(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;)Lorg/apache/spark/sql/execution/joins/LeftSemiJoinBNL;]
LeftSemiJoinBNL.curried ( ) [static] : scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>,LeftSemiJoinBNL>>>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.curried:()Lscala/Function1;]
LeftSemiJoinBNL.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.equals:(Ljava/lang/Object;)Z]
LeftSemiJoinBNL.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.execute:()Lorg/apache/spark/rdd/RDD;]
LeftSemiJoinBNL.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.hashCode:()I]
LeftSemiJoinBNL.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinBNL.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.LeftSemiJoinBNL ( org.apache.spark.sql.execution.SparkPlan streamed, org.apache.spark.sql.execution.SparkPlan broadcast, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition )
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL."<init>":(Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;Lscala/Option;)V]
LeftSemiJoinBNL.LeftSemiJoinBNL..boundCondition ( ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.org.apache.spark.sql.execution.joins.LeftSemiJoinBNL..boundCondition:()Lscala/Function1;]
LeftSemiJoinBNL.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.output:()Lscala/collection/Seq;]
LeftSemiJoinBNL.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
LeftSemiJoinBNL.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productArity:()I]
LeftSemiJoinBNL.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productElement:(I)Ljava/lang/Object;]
LeftSemiJoinBNL.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productIterator:()Lscala/collection/Iterator;]
LeftSemiJoinBNL.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.productPrefix:()Ljava/lang/String;]
LeftSemiJoinBNL.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinBNL.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.streamed ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.streamed:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinBNL.tupled ( ) [static] : scala.Function1<scala.Tuple3<org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan,scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>>,LeftSemiJoinBNL>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LeftSemiJoinHash.class
package org.apache.spark.sql.execution.joins
LeftSemiJoinHash.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.buildSide ( ) : package.BuildRight.
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildRight$;]
LeftSemiJoinHash.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
LeftSemiJoinHash.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
LeftSemiJoinHash.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.canEqual:(Ljava/lang/Object;)Z]
LeftSemiJoinHash.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.children:()Lscala/collection/Seq;]
LeftSemiJoinHash.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : LeftSemiJoinHash
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/LeftSemiJoinHash;]
LeftSemiJoinHash.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,LeftSemiJoinHash>>>>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.curried:()Lscala/Function1;]
LeftSemiJoinHash.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.equals:(Ljava/lang/Object;)Z]
LeftSemiJoinHash.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.execute:()Lorg/apache/spark/rdd/RDD;]
LeftSemiJoinHash.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.hashCode:()I]
LeftSemiJoinHash.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
LeftSemiJoinHash.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinHash.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.leftKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.LeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
LeftSemiJoinHash.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.output:()Lscala/collection/Seq;]
LeftSemiJoinHash.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productArity:()I]
LeftSemiJoinHash.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productElement:(I)Ljava/lang/Object;]
LeftSemiJoinHash.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productIterator:()Lscala/collection/Iterator;]
LeftSemiJoinHash.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.productPrefix:()Ljava/lang/String;]
LeftSemiJoinHash.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.requiredChildDistribution:()Lscala/collection/immutable/List;]
LeftSemiJoinHash.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.requiredChildDistribution:()Lscala/collection/Seq;]
LeftSemiJoinHash.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
LeftSemiJoinHash.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.rightKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.streamedKeys:()Lscala/collection/Seq;]
LeftSemiJoinHash.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
LeftSemiJoinHash.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.streamSideKeyGenerator:()Lscala/Function0;]
LeftSemiJoinHash.tupled ( ) [static] : scala.Function1<scala.Tuple4<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,LeftSemiJoinHash>
[mangled: org/apache/spark/sql/execution/joins/LeftSemiJoinHash.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LessThan.class
package org.apache.spark.sql.sources
LessThan.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/LessThan.attribute:()Ljava/lang/String;]
LessThan.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThan.canEqual:(Ljava/lang/Object;)Z]
LessThan.copy ( String attribute, Object value ) : LessThan
[mangled: org/apache/spark/sql/sources/LessThan.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/LessThan;]
LessThan.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,LessThan>>
[mangled: org/apache/spark/sql/sources/LessThan.curried:()Lscala/Function1;]
LessThan.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThan.equals:(Ljava/lang/Object;)Z]
LessThan.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/LessThan.hashCode:()I]
LessThan.LessThan ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/LessThan."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
LessThan.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/LessThan.productArity:()I]
LessThan.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/LessThan.productElement:(I)Ljava/lang/Object;]
LessThan.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/LessThan.productIterator:()Lscala/collection/Iterator;]
LessThan.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/LessThan.productPrefix:()Ljava/lang/String;]
LessThan.toString ( ) : String
[mangled: org/apache/spark/sql/sources/LessThan.toString:()Ljava/lang/String;]
LessThan.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,LessThan>
[mangled: org/apache/spark/sql/sources/LessThan.tupled:()Lscala/Function1;]
LessThan.value ( ) : Object
[mangled: org/apache/spark/sql/sources/LessThan.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, LessThanOrEqual.class
package org.apache.spark.sql.sources
LessThanOrEqual.attribute ( ) : String
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.attribute:()Ljava/lang/String;]
LessThanOrEqual.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.canEqual:(Ljava/lang/Object;)Z]
LessThanOrEqual.copy ( String attribute, Object value ) : LessThanOrEqual
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.copy:(Ljava/lang/String;Ljava/lang/Object;)Lorg/apache/spark/sql/sources/LessThanOrEqual;]
LessThanOrEqual.curried ( ) [static] : scala.Function1<String,scala.Function1<Object,LessThanOrEqual>>
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.curried:()Lscala/Function1;]
LessThanOrEqual.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.equals:(Ljava/lang/Object;)Z]
LessThanOrEqual.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.hashCode:()I]
LessThanOrEqual.LessThanOrEqual ( String attribute, Object value )
[mangled: org/apache/spark/sql/sources/LessThanOrEqual."<init>":(Ljava/lang/String;Ljava/lang/Object;)V]
LessThanOrEqual.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productArity:()I]
LessThanOrEqual.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productElement:(I)Ljava/lang/Object;]
LessThanOrEqual.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productIterator:()Lscala/collection/Iterator;]
LessThanOrEqual.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.productPrefix:()Ljava/lang/String;]
LessThanOrEqual.toString ( ) : String
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.toString:()Ljava/lang/String;]
LessThanOrEqual.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,Object>,LessThanOrEqual>
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.tupled:()Lscala/Function1;]
LessThanOrEqual.value ( ) : Object
[mangled: org/apache/spark/sql/sources/LessThanOrEqual.value:()Ljava/lang/Object;]
spark-sql_2.10-1.3.0.jar, Limit.class
package org.apache.spark.sql.execution
Limit.copy ( int limit, SparkPlan child ) : Limit
[mangled: org/apache/spark/sql/execution/Limit.copy:(ILorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/Limit;]
Limit.curried ( ) [static] : scala.Function1<Object,scala.Function1<SparkPlan,Limit>>
[mangled: org/apache/spark/sql/execution/Limit.curried:()Lscala/Function1;]
Limit.Limit ( int limit, SparkPlan child )
[mangled: org/apache/spark/sql/execution/Limit."<init>":(ILorg/apache/spark/sql/execution/SparkPlan;)V]
Limit.tupled ( ) [static] : scala.Function1<scala.Tuple2<Object,SparkPlan>,Limit>
[mangled: org/apache/spark/sql/execution/Limit.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LocalTableScan.class
package org.apache.spark.sql.execution
LocalTableScan.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LocalTableScan.canEqual:(Ljava/lang/Object;)Z]
LocalTableScan.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/LocalTableScan.children:()Lscala/collection/immutable/Nil$;]
LocalTableScan.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/LocalTableScan.children:()Lscala/collection/Seq;]
LocalTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows ) : LocalTableScan
[mangled: org/apache/spark/sql/execution/LocalTableScan.copy:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/LocalTableScan;]
LocalTableScan.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.Row>,LocalTableScan>>
[mangled: org/apache/spark/sql/execution/LocalTableScan.curried:()Lscala/Function1;]
LocalTableScan.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LocalTableScan.equals:(Ljava/lang/Object;)Z]
LocalTableScan.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LocalTableScan.execute:()Lorg/apache/spark/rdd/RDD;]
LocalTableScan.executeCollect ( ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/LocalTableScan.executeCollect:()[Lorg/apache/spark/sql/Row;]
LocalTableScan.executeTake ( int limit ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/LocalTableScan.executeTake:(I)[Lorg/apache/spark/sql/Row;]
LocalTableScan.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/LocalTableScan.hashCode:()I]
LocalTableScan.LocalTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows )
[mangled: org/apache/spark/sql/execution/LocalTableScan."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;)V]
LocalTableScan.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/LocalTableScan.output:()Lscala/collection/Seq;]
LocalTableScan.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/LocalTableScan.productArity:()I]
LocalTableScan.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/LocalTableScan.productElement:(I)Ljava/lang/Object;]
LocalTableScan.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/LocalTableScan.productIterator:()Lscala/collection/Iterator;]
LocalTableScan.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/LocalTableScan.productPrefix:()Ljava/lang/String;]
LocalTableScan.rows ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LocalTableScan.rows:()Lscala/collection/Seq;]
LocalTableScan.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.collection.Seq<org.apache.spark.sql.Row>>,LocalTableScan>
[mangled: org/apache/spark/sql/execution/LocalTableScan.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, LogicalLocalTable.class
package org.apache.spark.sql.execution
LogicalLocalTable.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.canEqual:(Ljava/lang/Object;)Z]
LogicalLocalTable.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.children:()Lscala/collection/immutable/Nil$;]
LogicalLocalTable.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.children:()Lscala/collection/Seq;]
LogicalLocalTable.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows, org.apache.spark.sql.SQLContext sqlContext ) : LogicalLocalTable
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/LogicalLocalTable;]
LogicalLocalTable.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.equals:(Ljava/lang/Object;)Z]
LogicalLocalTable.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.hashCode:()I]
LogicalLocalTable.LogicalLocalTable ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.Row> rows, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/execution/LogicalLocalTable."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/SQLContext;)V]
LogicalLocalTable.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
LogicalLocalTable.newInstance ( ) : LogicalLocalTable
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.newInstance:()Lorg/apache/spark/sql/execution/LogicalLocalTable;]
LogicalLocalTable.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.output:()Lscala/collection/Seq;]
LogicalLocalTable.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productArity:()I]
LogicalLocalTable.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productElement:(I)Ljava/lang/Object;]
LogicalLocalTable.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productIterator:()Lscala/collection/Iterator;]
LogicalLocalTable.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.productPrefix:()Ljava/lang/String;]
LogicalLocalTable.rows ( ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.rows:()Lscala/collection/Seq;]
LogicalLocalTable.sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.sameResult:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Z]
LogicalLocalTable.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/execution/LogicalLocalTable.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, LogicalRDD.class
package org.apache.spark.sql.execution
LogicalRDD.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalRDD.canEqual:(Ljava/lang/Object;)Z]
LogicalRDD.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/LogicalRDD.children:()Lscala/collection/immutable/Nil$;]
LogicalRDD.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/LogicalRDD.children:()Lscala/collection/Seq;]
LogicalRDD.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd, org.apache.spark.sql.SQLContext sqlContext ) : LogicalRDD
[mangled: org/apache/spark/sql/execution/LogicalRDD.copy:(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/execution/LogicalRDD;]
LogicalRDD.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalRDD.equals:(Ljava/lang/Object;)Z]
LogicalRDD.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalRDD.hashCode:()I]
LogicalRDD.LogicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/execution/LogicalRDD."<init>":(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/SQLContext;)V]
LogicalRDD.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/execution/LogicalRDD.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
LogicalRDD.newInstance ( ) : LogicalRDD
[mangled: org/apache/spark/sql/execution/LogicalRDD.newInstance:()Lorg/apache/spark/sql/execution/LogicalRDD;]
LogicalRDD.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/LogicalRDD.output:()Lscala/collection/Seq;]
LogicalRDD.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/LogicalRDD.productArity:()I]
LogicalRDD.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/LogicalRDD.productElement:(I)Ljava/lang/Object;]
LogicalRDD.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/LogicalRDD.productIterator:()Lscala/collection/Iterator;]
LogicalRDD.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/LogicalRDD.productPrefix:()Ljava/lang/String;]
LogicalRDD.rdd ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/LogicalRDD.rdd:()Lorg/apache/spark/rdd/RDD;]
LogicalRDD.sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan ) : boolean
[mangled: org/apache/spark/sql/execution/LogicalRDD.sameResult:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Z]
LogicalRDD.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/execution/LogicalRDD.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, LogicalRelation.class
package org.apache.spark.sql.sources
LogicalRelation.andThen ( scala.Function1<LogicalRelation,A> p1 ) [static] : scala.Function1<BaseRelation,A>
[mangled: org/apache/spark/sql/sources/LogicalRelation.andThen:(Lscala/Function1;)Lscala/Function1;]
LogicalRelation.attributeMap ( ) : org.apache.spark.sql.catalyst.expressions.AttributeMap<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/sources/LogicalRelation.attributeMap:()Lorg/apache/spark/sql/catalyst/expressions/AttributeMap;]
LogicalRelation.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/LogicalRelation.canEqual:(Ljava/lang/Object;)Z]
LogicalRelation.compose ( scala.Function1<A,BaseRelation> p1 ) [static] : scala.Function1<A,LogicalRelation>
[mangled: org/apache/spark/sql/sources/LogicalRelation.compose:(Lscala/Function1;)Lscala/Function1;]
LogicalRelation.copy ( BaseRelation relation ) : LogicalRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.copy:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/sources/LogicalRelation;]
LogicalRelation.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/sources/LogicalRelation.equals:(Ljava/lang/Object;)Z]
LogicalRelation.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/LogicalRelation.hashCode:()I]
LogicalRelation.LogicalRelation ( BaseRelation relation )
[mangled: org/apache/spark/sql/sources/LogicalRelation."<init>":(Lorg/apache/spark/sql/sources/BaseRelation;)V]
LogicalRelation.newInstance ( ) : org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.newInstance:()Lorg/apache/spark/sql/catalyst/analysis/MultiInstanceRelation;]
LogicalRelation.newInstance ( ) : LogicalRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.newInstance:()Lorg/apache/spark/sql/sources/LogicalRelation;]
LogicalRelation.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/sources/LogicalRelation.output:()Lscala/collection/Seq;]
LogicalRelation.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/LogicalRelation.productArity:()I]
LogicalRelation.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/LogicalRelation.productElement:(I)Ljava/lang/Object;]
LogicalRelation.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/LogicalRelation.productIterator:()Lscala/collection/Iterator;]
LogicalRelation.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/LogicalRelation.productPrefix:()Ljava/lang/String;]
LogicalRelation.relation ( ) : BaseRelation
[mangled: org/apache/spark/sql/sources/LogicalRelation.relation:()Lorg/apache/spark/sql/sources/BaseRelation;]
LogicalRelation.sameResult ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan otherPlan ) : boolean
[mangled: org/apache/spark/sql/sources/LogicalRelation.sameResult:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)Z]
LogicalRelation.simpleString ( ) : String
[mangled: org/apache/spark/sql/sources/LogicalRelation.simpleString:()Ljava/lang/String;]
LogicalRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/sources/LogicalRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, LongHashSetSerializer.class
package org.apache.spark.sql.execution
LongHashSetSerializer.LongHashSetSerializer ( )
[mangled: org/apache/spark/sql/execution/LongHashSetSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, MySQLQuirks.class
package org.apache.spark.sql.jdbc
MySQLQuirks.MySQLQuirks ( )
[mangled: org/apache/spark/sql/jdbc/MySQLQuirks."<init>":()V]
spark-sql_2.10-1.3.0.jar, NanoTime.class
package org.apache.spark.sql.parquet.timestamp
NanoTime.getJulianDay ( ) : int
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.getJulianDay:()I]
NanoTime.getTimeOfDayNanos ( ) : long
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.getTimeOfDayNanos:()J]
NanoTime.NanoTime ( )
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime."<init>":()V]
NanoTime.set ( int julianDay, long timeOfDayNanos ) : NanoTime
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.set:(IJ)Lorg/apache/spark/sql/parquet/timestamp/NanoTime;]
NanoTime.toBinary ( ) : parquet.io.api.Binary
[mangled: org/apache/spark/sql/parquet/timestamp/NanoTime.toBinary:()Lparquet/io/api/Binary;]
spark-sql_2.10-1.3.0.jar, NativeColumnType<T>.class
package org.apache.spark.sql.columnar
NativeColumnType<T>.dataType ( ) : T
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.dataType:()Lorg/apache/spark/sql/types/NativeType;]
NativeColumnType<T>.NativeColumnType ( T dataType, int typeId, int defaultSize ) : public
[mangled: org/apache/spark/sql/columnar/NativeColumnType<T>.org.apache.spark.sql.columnar.NativeColumnType:(Lorg/apache/spark/sql/types/NativeType;II)V]
spark-sql_2.10-1.3.0.jar, NoQuirks.class
package org.apache.spark.sql.jdbc
NoQuirks.NoQuirks ( )
[mangled: org/apache/spark/sql/jdbc/NoQuirks."<init>":()V]
spark-sql_2.10-1.3.0.jar, Not.class
package org.apache.spark.sql.sources
Not.andThen ( scala.Function1<Not,A> p1 ) [static] : scala.Function1<Filter,A>
[mangled: org/apache/spark/sql/sources/Not.andThen:(Lscala/Function1;)Lscala/Function1;]
Not.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Not.canEqual:(Ljava/lang/Object;)Z]
Not.child ( ) : Filter
[mangled: org/apache/spark/sql/sources/Not.child:()Lorg/apache/spark/sql/sources/Filter;]
Not.compose ( scala.Function1<A,Filter> p1 ) [static] : scala.Function1<A,Not>
[mangled: org/apache/spark/sql/sources/Not.compose:(Lscala/Function1;)Lscala/Function1;]
Not.copy ( Filter child ) : Not
[mangled: org/apache/spark/sql/sources/Not.copy:(Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/sql/sources/Not;]
Not.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Not.equals:(Ljava/lang/Object;)Z]
Not.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/Not.hashCode:()I]
Not.Not ( Filter child )
[mangled: org/apache/spark/sql/sources/Not."<init>":(Lorg/apache/spark/sql/sources/Filter;)V]
Not.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/Not.productArity:()I]
Not.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/Not.productElement:(I)Ljava/lang/Object;]
Not.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/Not.productIterator:()Lscala/collection/Iterator;]
Not.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/Not.productPrefix:()Ljava/lang/String;]
Not.toString ( ) : String
[mangled: org/apache/spark/sql/sources/Not.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, NullableColumnBuilder.class
package org.apache.spark.sql.columnar
NullableColumnBuilder.appendFrom ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.appendFrom:(Lorg/apache/spark/sql/Row;I)V]
NullableColumnBuilder.buildNonNulls ( ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.buildNonNulls:()Ljava/nio/ByteBuffer;]
NullableColumnBuilder.nullCount ( ) [abstract] : int
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.nullCount:()I]
NullableColumnBuilder.nullCount_.eq ( int p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.nullCount_.eq:(I)V]
NullableColumnBuilder.nulls ( ) [abstract] : java.nio.ByteBuffer
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.nulls:()Ljava/nio/ByteBuffer;]
NullableColumnBuilder.nulls_.eq ( java.nio.ByteBuffer p1 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.nulls_.eq:(Ljava/nio/ByteBuffer;)V]
NullableColumnBuilder.NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.Row p1, int p2 ) [abstract] : void
[mangled: org/apache/spark/sql/columnar/NullableColumnBuilder.org.apache.spark.sql.columnar.NullableColumnBuilder..super.appendFrom:(Lorg/apache/spark/sql/Row;I)V]
spark-sql_2.10-1.3.0.jar, OpenHashSetSerializer.class
package org.apache.spark.sql.execution
OpenHashSetSerializer.OpenHashSetSerializer ( )
[mangled: org/apache/spark/sql/execution/OpenHashSetSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, Or.class
package org.apache.spark.sql.sources
Or.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Or.canEqual:(Ljava/lang/Object;)Z]
Or.copy ( Filter left, Filter right ) : Or
[mangled: org/apache/spark/sql/sources/Or.copy:(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/sql/sources/Or;]
Or.curried ( ) [static] : scala.Function1<Filter,scala.Function1<Filter,Or>>
[mangled: org/apache/spark/sql/sources/Or.curried:()Lscala/Function1;]
Or.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/Or.equals:(Ljava/lang/Object;)Z]
Or.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/Or.hashCode:()I]
Or.left ( ) : Filter
[mangled: org/apache/spark/sql/sources/Or.left:()Lorg/apache/spark/sql/sources/Filter;]
Or.Or ( Filter left, Filter right )
[mangled: org/apache/spark/sql/sources/Or."<init>":(Lorg/apache/spark/sql/sources/Filter;Lorg/apache/spark/sql/sources/Filter;)V]
Or.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/Or.productArity:()I]
Or.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/Or.productElement:(I)Ljava/lang/Object;]
Or.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/Or.productIterator:()Lscala/collection/Iterator;]
Or.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/Or.productPrefix:()Ljava/lang/String;]
Or.right ( ) : Filter
[mangled: org/apache/spark/sql/sources/Or.right:()Lorg/apache/spark/sql/sources/Filter;]
Or.toString ( ) : String
[mangled: org/apache/spark/sql/sources/Or.toString:()Ljava/lang/String;]
Or.tupled ( ) [static] : scala.Function1<scala.Tuple2<Filter,Filter>,Or>
[mangled: org/apache/spark/sql/sources/Or.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, OutputFaker.class
package org.apache.spark.sql.execution
OutputFaker.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/OutputFaker.canEqual:(Ljava/lang/Object;)Z]
OutputFaker.child ( ) : SparkPlan
[mangled: org/apache/spark/sql/execution/OutputFaker.child:()Lorg/apache/spark/sql/execution/SparkPlan;]
OutputFaker.children ( ) : scala.collection.immutable.List<SparkPlan>
[mangled: org/apache/spark/sql/execution/OutputFaker.children:()Lscala/collection/immutable/List;]
OutputFaker.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/OutputFaker.children:()Lscala/collection/Seq;]
OutputFaker.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child ) : OutputFaker
[mangled: org/apache/spark/sql/execution/OutputFaker.copy:(Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/OutputFaker;]
OutputFaker.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<SparkPlan,OutputFaker>>
[mangled: org/apache/spark/sql/execution/OutputFaker.curried:()Lscala/Function1;]
OutputFaker.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/OutputFaker.equals:(Ljava/lang/Object;)Z]
OutputFaker.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/OutputFaker.execute:()Lorg/apache/spark/rdd/RDD;]
OutputFaker.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/OutputFaker.hashCode:()I]
OutputFaker.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/OutputFaker.output:()Lscala/collection/Seq;]
OutputFaker.OutputFaker ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, SparkPlan child )
[mangled: org/apache/spark/sql/execution/OutputFaker."<init>":(Lscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
OutputFaker.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/OutputFaker.productArity:()I]
OutputFaker.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/OutputFaker.productElement:(I)Ljava/lang/Object;]
OutputFaker.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/OutputFaker.productIterator:()Lscala/collection/Iterator;]
OutputFaker.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/OutputFaker.productPrefix:()Ljava/lang/String;]
OutputFaker.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,SparkPlan>,OutputFaker>
[mangled: org/apache/spark/sql/execution/OutputFaker.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ParquetRelation.class
package org.apache.spark.sql.parquet
ParquetRelation.attributeMap ( ) : org.apache.spark.sql.catalyst.expressions.AttributeMap<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/ParquetRelation.attributeMap:()Lorg/apache/spark/sql/catalyst/expressions/AttributeMap;]
ParquetRelation.conf ( ) : scala.Option<org.apache.hadoop.conf.Configuration>
[mangled: org/apache/spark/sql/parquet/ParquetRelation.conf:()Lscala/Option;]
ParquetRelation.copy ( String path, scala.Option<org.apache.hadoop.conf.Configuration> conf, org.apache.spark.sql.SQLContext sqlContext, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> partitioningAttributes ) : ParquetRelation
[mangled: org/apache/spark/sql/parquet/ParquetRelation.copy:(Ljava/lang/String;Lscala/Option;Lorg/apache/spark/sql/SQLContext;Lscala/collection/Seq;)Lorg/apache/spark/sql/parquet/ParquetRelation;]
ParquetRelation.create ( String p1, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan p2, org.apache.hadoop.conf.Configuration p3, org.apache.spark.sql.SQLContext p4 ) [static] : ParquetRelation
[mangled: org/apache/spark/sql/parquet/ParquetRelation.create:(Ljava/lang/String;Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;Lorg/apache/hadoop/conf/Configuration;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/parquet/ParquetRelation;]
ParquetRelation.createEmpty ( String p1, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> p2, boolean p3, org.apache.hadoop.conf.Configuration p4, org.apache.spark.sql.SQLContext p5 ) [static] : ParquetRelation
[mangled: org/apache/spark/sql/parquet/ParquetRelation.createEmpty:(Ljava/lang/String;Lscala/collection/Seq;ZLorg/apache/hadoop/conf/Configuration;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/parquet/ParquetRelation;]
ParquetRelation.ParquetRelation ( String path, scala.Option<org.apache.hadoop.conf.Configuration> conf, org.apache.spark.sql.SQLContext sqlContext, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> partitioningAttributes )
[mangled: org/apache/spark/sql/parquet/ParquetRelation."<init>":(Ljava/lang/String;Lscala/Option;Lorg/apache/spark/sql/SQLContext;Lscala/collection/Seq;)V]
ParquetRelation.partitioningAttributes ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/ParquetRelation.partitioningAttributes:()Lscala/collection/Seq;]
ParquetRelation.shortParquetCompressionCodecNames ( ) [static] : scala.collection.immutable.Map<String,parquet.hadoop.metadata.CompressionCodecName>
[mangled: org/apache/spark/sql/parquet/ParquetRelation.shortParquetCompressionCodecNames:()Lscala/collection/immutable/Map;]
ParquetRelation.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
ParquetRelation.statistics ( ) : org.apache.spark.sql.catalyst.plans.logical.Statistics
[mangled: org/apache/spark/sql/parquet/ParquetRelation.statistics:()Lorg/apache/spark/sql/catalyst/plans/logical/Statistics;]
spark-sql_2.10-1.3.0.jar, ParquetRelation2.class
package org.apache.spark.sql.parquet
ParquetRelation2.buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> predicates ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.buildScan:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/rdd/RDD;]
ParquetRelation2.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.canEqual:(Ljava/lang/Object;)Z]
ParquetRelation2.copy ( scala.collection.Seq<String> paths, scala.collection.immutable.Map<String,String> parameters, scala.Option<org.apache.spark.sql.types.StructType> maybeSchema, scala.Option<PartitionSpec> maybePartitionSpec, org.apache.spark.sql.SQLContext sqlContext ) : ParquetRelation2
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.copy:(Lscala/collection/Seq;Lscala/collection/immutable/Map;Lscala/Option;Lscala/Option;Lorg/apache/spark/sql/SQLContext;)Lorg/apache/spark/sql/parquet/ParquetRelation2;]
ParquetRelation2.DEFAULT_PARTITION_NAME ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.DEFAULT_PARTITION_NAME:()Ljava/lang/String;]
ParquetRelation2.equals ( Object other ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.equals:(Ljava/lang/Object;)Z]
ParquetRelation2.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.hashCode:()I]
ParquetRelation2.insert ( org.apache.spark.sql.DataFrame data, boolean overwrite ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.insert:(Lorg/apache/spark/sql/DataFrame;Z)V]
ParquetRelation2.isPartitioned ( ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.isPartitioned:()Z]
ParquetRelation2.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.isTraceEnabled:()Z]
ParquetRelation2.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.log:()Lorg/slf4j/Logger;]
ParquetRelation2.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logDebug:(Lscala/Function0;)V]
ParquetRelation2.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logError:(Lscala/Function0;)V]
ParquetRelation2.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logInfo:(Lscala/Function0;)V]
ParquetRelation2.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logName ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logName:()Ljava/lang/String;]
ParquetRelation2.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logTrace:(Lscala/Function0;)V]
ParquetRelation2.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logWarning:(Lscala/Function0;)V]
ParquetRelation2.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
ParquetRelation2.maybePartitionSpec ( ) : scala.Option<PartitionSpec>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.maybePartitionSpec:()Lscala/Option;]
ParquetRelation2.maybeSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.maybeSchema:()Lscala/Option;]
ParquetRelation2.MERGE_SCHEMA ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.MERGE_SCHEMA:()Ljava/lang/String;]
ParquetRelation2.newJobContext ( org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.mapreduce.JobID jobId ) : org.apache.hadoop.mapreduce.JobContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.newJobContext:(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/mapreduce/JobID;)Lorg/apache/hadoop/mapreduce/JobContext;]
ParquetRelation2.newTaskAttemptContext ( org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.mapreduce.TaskAttemptID attemptId ) : org.apache.hadoop.mapreduce.TaskAttemptContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.newTaskAttemptContext:(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/mapreduce/TaskAttemptID;)Lorg/apache/hadoop/mapreduce/TaskAttemptContext;]
ParquetRelation2.newTaskAttemptID ( String jtIdentifier, int jobId, boolean isMap, int taskId, int attemptId ) : org.apache.hadoop.mapreduce.TaskAttemptID
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.newTaskAttemptID:(Ljava/lang/String;IZII)Lorg/apache/hadoop/mapreduce/TaskAttemptID;]
ParquetRelation2.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
ParquetRelation2.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
ParquetRelation2.ParquetRelation2..defaultPartitionName ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..defaultPartitionName:()Ljava/lang/String;]
ParquetRelation2.ParquetRelation2..isSummaryFile ( org.apache.hadoop.fs.Path file ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..isSummaryFile:(Lorg/apache/hadoop/fs/Path;)Z]
ParquetRelation2.ParquetRelation2..maybeMetastoreSchema ( ) : scala.Option<org.apache.spark.sql.types.StructType>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..maybeMetastoreSchema:()Lscala/Option;]
ParquetRelation2.ParquetRelation2..metadataCache ( ) : ParquetRelation2.MetadataCache
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..metadataCache:()Lorg/apache/spark/sql/parquet/ParquetRelation2$MetadataCache;]
ParquetRelation2.ParquetRelation2..shouldMergeSchemas ( ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.org.apache.spark.sql.parquet.ParquetRelation2..shouldMergeSchemas:()Z]
ParquetRelation2.parameters ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.parameters:()Lscala/collection/immutable/Map;]
ParquetRelation2.ParquetRelation2 ( scala.collection.Seq<String> paths, scala.collection.immutable.Map<String,String> parameters, scala.Option<org.apache.spark.sql.types.StructType> maybeSchema, scala.Option<PartitionSpec> maybePartitionSpec, org.apache.spark.sql.SQLContext sqlContext )
[mangled: org/apache/spark/sql/parquet/ParquetRelation2."<init>":(Lscala/collection/Seq;Lscala/collection/immutable/Map;Lscala/Option;Lscala/Option;Lorg/apache/spark/sql/SQLContext;)V]
ParquetRelation2.partitionColumns ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.partitionColumns:()Lorg/apache/spark/sql/types/StructType;]
ParquetRelation2.partitions ( ) : scala.collection.Seq<Partition>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.partitions:()Lscala/collection/Seq;]
ParquetRelation2.partitionSpec ( ) : PartitionSpec
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.partitionSpec:()Lorg/apache/spark/sql/parquet/PartitionSpec;]
ParquetRelation2.paths ( ) : scala.collection.Seq<String>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.paths:()Lscala/collection/Seq;]
ParquetRelation2.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productArity:()I]
ParquetRelation2.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productElement:(I)Ljava/lang/Object;]
ParquetRelation2.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productIterator:()Lscala/collection/Iterator;]
ParquetRelation2.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.productPrefix:()Ljava/lang/String;]
ParquetRelation2.schema ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.schema:()Lorg/apache/spark/sql/types/StructType;]
ParquetRelation2.sizeInBytes ( ) : long
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.sizeInBytes:()J]
ParquetRelation2.sparkContext ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.sparkContext:()Lorg/apache/spark/SparkContext;]
ParquetRelation2.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
ParquetRelation2.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetRelation2.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, ParquetTableScan.class
package org.apache.spark.sql.parquet
ParquetTableScan.attributes ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.attributes:()Lscala/collection/Seq;]
ParquetTableScan.columnPruningPred ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.columnPruningPred:()Lscala/collection/Seq;]
ParquetTableScan.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, ParquetRelation relation, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> columnPruningPred ) : ParquetTableScan
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.copy:(Lscala/collection/Seq;Lorg/apache/spark/sql/parquet/ParquetRelation;Lscala/collection/Seq;)Lorg/apache/spark/sql/parquet/ParquetTableScan;]
ParquetTableScan.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<ParquetRelation,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,ParquetTableScan>>>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.curried:()Lscala/Function1;]
ParquetTableScan.ParquetTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes, ParquetRelation relation, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> columnPruningPred )
[mangled: org/apache/spark/sql/parquet/ParquetTableScan."<init>":(Lscala/collection/Seq;Lorg/apache/spark/sql/parquet/ParquetRelation;Lscala/collection/Seq;)V]
ParquetTableScan.requestedPartitionOrdinals ( ) : scala.Tuple2<Object,Object>[ ]
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.requestedPartitionOrdinals:()[Lscala/Tuple2;]
ParquetTableScan.tupled ( ) [static] : scala.Function1<scala.Tuple3<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,ParquetRelation,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>>,ParquetTableScan>
[mangled: org/apache/spark/sql/parquet/ParquetTableScan.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ParquetTest.class
package org.apache.spark.sql.parquet
ParquetTest.configuration ( ) [abstract] : org.apache.hadoop.conf.Configuration
[mangled: org/apache/spark/sql/parquet/ParquetTest.configuration:()Lorg/apache/hadoop/conf/Configuration;]
ParquetTest.makeParquetFile ( org.apache.spark.sql.DataFrame p1, java.io.File p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.makeParquetFile:(Lorg/apache/spark/sql/DataFrame;Ljava/io/File;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.makeParquetFile ( scala.collection.Seq<T> p1, java.io.File p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.makeParquetFile:(Lscala/collection/Seq;Ljava/io/File;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.makePartitionDir ( java.io.File p1, String p2, scala.collection.Seq<scala.Tuple2<String,Object>> p3 ) [abstract] : java.io.File
[mangled: org/apache/spark/sql/parquet/ParquetTest.makePartitionDir:(Ljava/io/File;Ljava/lang/String;Lscala/collection/Seq;)Ljava/io/File;]
ParquetTest.sqlContext ( ) [abstract] : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/parquet/ParquetTest.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
ParquetTest.withParquetDataFrame ( scala.collection.Seq<T> p1, scala.Function1<org.apache.spark.sql.DataFrame,scala.runtime.BoxedUnit> p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withParquetDataFrame:(Lscala/collection/Seq;Lscala/Function1;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.withParquetFile ( scala.collection.Seq<T> p1, scala.Function1<String,scala.runtime.BoxedUnit> p2, scala.reflect.ClassTag<T> p3, scala.reflect.api.TypeTags.TypeTag<T> p4 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withParquetFile:(Lscala/collection/Seq;Lscala/Function1;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.withParquetTable ( scala.collection.Seq<T> p1, String p2, scala.Function0<scala.runtime.BoxedUnit> p3, scala.reflect.ClassTag<T> p4, scala.reflect.api.TypeTags.TypeTag<T> p5 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withParquetTable:(Lscala/collection/Seq;Ljava/lang/String;Lscala/Function0;Lscala/reflect/ClassTag;Lscala/reflect/api/TypeTags$TypeTag;)V]
ParquetTest.withSQLConf ( scala.collection.Seq<scala.Tuple2<String,String>> p1, scala.Function0<scala.runtime.BoxedUnit> p2 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withSQLConf:(Lscala/collection/Seq;Lscala/Function0;)V]
ParquetTest.withTempDir ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> p1 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withTempDir:(Lscala/Function1;)V]
ParquetTest.withTempPath ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> p1 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withTempPath:(Lscala/Function1;)V]
ParquetTest.withTempTable ( String p1, scala.Function0<scala.runtime.BoxedUnit> p2 ) [abstract] : void
[mangled: org/apache/spark/sql/parquet/ParquetTest.withTempTable:(Ljava/lang/String;Lscala/Function0;)V]
spark-sql_2.10-1.3.0.jar, ParquetTypeInfo.class
package org.apache.spark.sql.parquet
ParquetTypeInfo.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.canEqual:(Ljava/lang/Object;)Z]
ParquetTypeInfo.copy ( parquet.schema.PrimitiveType.PrimitiveTypeName primitiveType, scala.Option<parquet.schema.OriginalType> originalType, scala.Option<parquet.schema.DecimalMetadata> decimalMetadata, scala.Option<Object> length ) : ParquetTypeInfo
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.copy:(Lparquet/schema/PrimitiveType$PrimitiveTypeName;Lscala/Option;Lscala/Option;Lscala/Option;)Lorg/apache/spark/sql/parquet/ParquetTypeInfo;]
ParquetTypeInfo.curried ( ) [static] : scala.Function1<parquet.schema.PrimitiveType.PrimitiveTypeName,scala.Function1<scala.Option<parquet.schema.OriginalType>,scala.Function1<scala.Option<parquet.schema.DecimalMetadata>,scala.Function1<scala.Option<Object>,ParquetTypeInfo>>>>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.curried:()Lscala/Function1;]
ParquetTypeInfo.decimalMetadata ( ) : scala.Option<parquet.schema.DecimalMetadata>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.decimalMetadata:()Lscala/Option;]
ParquetTypeInfo.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.equals:(Ljava/lang/Object;)Z]
ParquetTypeInfo.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.hashCode:()I]
ParquetTypeInfo.length ( ) : scala.Option<Object>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.length:()Lscala/Option;]
ParquetTypeInfo.originalType ( ) : scala.Option<parquet.schema.OriginalType>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.originalType:()Lscala/Option;]
ParquetTypeInfo.ParquetTypeInfo ( parquet.schema.PrimitiveType.PrimitiveTypeName primitiveType, scala.Option<parquet.schema.OriginalType> originalType, scala.Option<parquet.schema.DecimalMetadata> decimalMetadata, scala.Option<Object> length )
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo."<init>":(Lparquet/schema/PrimitiveType$PrimitiveTypeName;Lscala/Option;Lscala/Option;Lscala/Option;)V]
ParquetTypeInfo.primitiveType ( ) : parquet.schema.PrimitiveType.PrimitiveTypeName
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.primitiveType:()Lparquet/schema/PrimitiveType$PrimitiveTypeName;]
ParquetTypeInfo.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productArity:()I]
ParquetTypeInfo.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productElement:(I)Ljava/lang/Object;]
ParquetTypeInfo.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productIterator:()Lscala/collection/Iterator;]
ParquetTypeInfo.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.productPrefix:()Ljava/lang/String;]
ParquetTypeInfo.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.toString:()Ljava/lang/String;]
ParquetTypeInfo.tupled ( ) [static] : scala.Function1<scala.Tuple4<parquet.schema.PrimitiveType.PrimitiveTypeName,scala.Option<parquet.schema.OriginalType>,scala.Option<parquet.schema.DecimalMetadata>,scala.Option<Object>>,ParquetTypeInfo>
[mangled: org/apache/spark/sql/parquet/ParquetTypeInfo.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, Partition.class
package org.apache.spark.sql.parquet
Partition.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/Partition.canEqual:(Ljava/lang/Object;)Z]
Partition.copy ( org.apache.spark.sql.Row values, String path ) : Partition
[mangled: org/apache/spark/sql/parquet/Partition.copy:(Lorg/apache/spark/sql/Row;Ljava/lang/String;)Lorg/apache/spark/sql/parquet/Partition;]
Partition.curried ( ) [static] : scala.Function1<org.apache.spark.sql.Row,scala.Function1<String,Partition>>
[mangled: org/apache/spark/sql/parquet/Partition.curried:()Lscala/Function1;]
Partition.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/Partition.equals:(Ljava/lang/Object;)Z]
Partition.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/Partition.hashCode:()I]
Partition.Partition ( org.apache.spark.sql.Row values, String path )
[mangled: org/apache/spark/sql/parquet/Partition."<init>":(Lorg/apache/spark/sql/Row;Ljava/lang/String;)V]
Partition.path ( ) : String
[mangled: org/apache/spark/sql/parquet/Partition.path:()Ljava/lang/String;]
Partition.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/Partition.productArity:()I]
Partition.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/Partition.productElement:(I)Ljava/lang/Object;]
Partition.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/Partition.productIterator:()Lscala/collection/Iterator;]
Partition.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/Partition.productPrefix:()Ljava/lang/String;]
Partition.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/Partition.toString:()Ljava/lang/String;]
Partition.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.Row,String>,Partition>
[mangled: org/apache/spark/sql/parquet/Partition.tupled:()Lscala/Function1;]
Partition.values ( ) : org.apache.spark.sql.Row
[mangled: org/apache/spark/sql/parquet/Partition.values:()Lorg/apache/spark/sql/Row;]
spark-sql_2.10-1.3.0.jar, PartitionSpec.class
package org.apache.spark.sql.parquet
PartitionSpec.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/PartitionSpec.canEqual:(Ljava/lang/Object;)Z]
PartitionSpec.copy ( org.apache.spark.sql.types.StructType partitionColumns, scala.collection.Seq<Partition> partitions ) : PartitionSpec
[mangled: org/apache/spark/sql/parquet/PartitionSpec.copy:(Lorg/apache/spark/sql/types/StructType;Lscala/collection/Seq;)Lorg/apache/spark/sql/parquet/PartitionSpec;]
PartitionSpec.curried ( ) [static] : scala.Function1<org.apache.spark.sql.types.StructType,scala.Function1<scala.collection.Seq<Partition>,PartitionSpec>>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.curried:()Lscala/Function1;]
PartitionSpec.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/parquet/PartitionSpec.equals:(Ljava/lang/Object;)Z]
PartitionSpec.hashCode ( ) : int
[mangled: org/apache/spark/sql/parquet/PartitionSpec.hashCode:()I]
PartitionSpec.partitionColumns ( ) : org.apache.spark.sql.types.StructType
[mangled: org/apache/spark/sql/parquet/PartitionSpec.partitionColumns:()Lorg/apache/spark/sql/types/StructType;]
PartitionSpec.partitions ( ) : scala.collection.Seq<Partition>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.partitions:()Lscala/collection/Seq;]
PartitionSpec.PartitionSpec ( org.apache.spark.sql.types.StructType partitionColumns, scala.collection.Seq<Partition> partitions )
[mangled: org/apache/spark/sql/parquet/PartitionSpec."<init>":(Lorg/apache/spark/sql/types/StructType;Lscala/collection/Seq;)V]
PartitionSpec.productArity ( ) : int
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productArity:()I]
PartitionSpec.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productElement:(I)Ljava/lang/Object;]
PartitionSpec.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productIterator:()Lscala/collection/Iterator;]
PartitionSpec.productPrefix ( ) : String
[mangled: org/apache/spark/sql/parquet/PartitionSpec.productPrefix:()Ljava/lang/String;]
PartitionSpec.toString ( ) : String
[mangled: org/apache/spark/sql/parquet/PartitionSpec.toString:()Ljava/lang/String;]
PartitionSpec.tupled ( ) [static] : scala.Function1<scala.Tuple2<org.apache.spark.sql.types.StructType,scala.collection.Seq<Partition>>,PartitionSpec>
[mangled: org/apache/spark/sql/parquet/PartitionSpec.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, PartitionStatistics.class
package org.apache.spark.sql.columnar
PartitionStatistics.forAttribute ( ) : org.apache.spark.sql.catalyst.expressions.AttributeMap<ColumnStatisticsSchema>
[mangled: org/apache/spark/sql/columnar/PartitionStatistics.forAttribute:()Lorg/apache/spark/sql/catalyst/expressions/AttributeMap;]
PartitionStatistics.PartitionStatistics ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> tableSchema )
[mangled: org/apache/spark/sql/columnar/PartitionStatistics."<init>":(Lscala/collection/Seq;)V]
PartitionStatistics.schema ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/columnar/PartitionStatistics.schema:()Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, PhysicalRDD.class
package org.apache.spark.sql.execution
PhysicalRDD.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/PhysicalRDD.canEqual:(Ljava/lang/Object;)Z]
PhysicalRDD.children ( ) : scala.collection.immutable.Nil.
[mangled: org/apache/spark/sql/execution/PhysicalRDD.children:()Lscala/collection/immutable/Nil$;]
PhysicalRDD.children ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/PhysicalRDD.children:()Lscala/collection/Seq;]
PhysicalRDD.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd ) : PhysicalRDD
[mangled: org/apache/spark/sql/execution/PhysicalRDD.copy:(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/execution/PhysicalRDD;]
PhysicalRDD.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,scala.Function1<org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>,PhysicalRDD>>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.curried:()Lscala/Function1;]
PhysicalRDD.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/PhysicalRDD.equals:(Ljava/lang/Object;)Z]
PhysicalRDD.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.execute:()Lorg/apache/spark/rdd/RDD;]
PhysicalRDD.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/PhysicalRDD.hashCode:()I]
PhysicalRDD.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.output:()Lscala/collection/Seq;]
PhysicalRDD.PhysicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> rdd )
[mangled: org/apache/spark/sql/execution/PhysicalRDD."<init>":(Lscala/collection/Seq;Lorg/apache/spark/rdd/RDD;)V]
PhysicalRDD.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productArity:()I]
PhysicalRDD.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productElement:(I)Ljava/lang/Object;]
PhysicalRDD.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productIterator:()Lscala/collection/Iterator;]
PhysicalRDD.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/PhysicalRDD.productPrefix:()Ljava/lang/String;]
PhysicalRDD.rdd ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.rdd:()Lorg/apache/spark/rdd/RDD;]
PhysicalRDD.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>>,PhysicalRDD>
[mangled: org/apache/spark/sql/execution/PhysicalRDD.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, PostgresQuirks.class
package org.apache.spark.sql.jdbc
PostgresQuirks.PostgresQuirks ( )
[mangled: org/apache/spark/sql/jdbc/PostgresQuirks."<init>":()V]
spark-sql_2.10-1.3.0.jar, PreWriteCheck.class
package org.apache.spark.sql.sources
PreWriteCheck.andThen ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcDJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcDJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcFJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcFJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcID.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcID.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcIF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcIF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcII.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcII.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcIJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcIJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcJJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcJJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVD.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVF.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVI.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcVJ.sp ( scala.Function1<scala.runtime.BoxedUnit,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcVJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZD.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZF.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZI.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.andThen.mcZJ.sp ( scala.Function1<Object,A> g ) : scala.Function1<Object,A>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.andThen.mcZJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.apply ( Object v1 ) : Object
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply:(Ljava/lang/Object;)Ljava/lang/Object;]
PreWriteCheck.apply ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply:(Lorg/apache/spark/sql/catalyst/plans/logical/LogicalPlan;)V]
PreWriteCheck.apply.mcDD.sp ( double v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDD.sp:(D)D]
PreWriteCheck.apply.mcDF.sp ( float v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDF.sp:(F)D]
PreWriteCheck.apply.mcDI.sp ( int v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDI.sp:(I)D]
PreWriteCheck.apply.mcDJ.sp ( long v1 ) : double
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcDJ.sp:(J)D]
PreWriteCheck.apply.mcFD.sp ( double v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFD.sp:(D)F]
PreWriteCheck.apply.mcFF.sp ( float v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFF.sp:(F)F]
PreWriteCheck.apply.mcFI.sp ( int v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFI.sp:(I)F]
PreWriteCheck.apply.mcFJ.sp ( long v1 ) : float
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcFJ.sp:(J)F]
PreWriteCheck.apply.mcID.sp ( double v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcID.sp:(D)I]
PreWriteCheck.apply.mcIF.sp ( float v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcIF.sp:(F)I]
PreWriteCheck.apply.mcII.sp ( int v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcII.sp:(I)I]
PreWriteCheck.apply.mcIJ.sp ( long v1 ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcIJ.sp:(J)I]
PreWriteCheck.apply.mcJD.sp ( double v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJD.sp:(D)J]
PreWriteCheck.apply.mcJF.sp ( float v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJF.sp:(F)J]
PreWriteCheck.apply.mcJI.sp ( int v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJI.sp:(I)J]
PreWriteCheck.apply.mcJJ.sp ( long v1 ) : long
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcJJ.sp:(J)J]
PreWriteCheck.apply.mcVD.sp ( double v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVD.sp:(D)V]
PreWriteCheck.apply.mcVF.sp ( float v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVF.sp:(F)V]
PreWriteCheck.apply.mcVI.sp ( int v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVI.sp:(I)V]
PreWriteCheck.apply.mcVJ.sp ( long v1 ) : void
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcVJ.sp:(J)V]
PreWriteCheck.apply.mcZD.sp ( double v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZD.sp:(D)Z]
PreWriteCheck.apply.mcZF.sp ( float v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZF.sp:(F)Z]
PreWriteCheck.apply.mcZI.sp ( int v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZI.sp:(I)Z]
PreWriteCheck.apply.mcZJ.sp ( long v1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.apply.mcZJ.sp:(J)Z]
PreWriteCheck.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.canEqual:(Ljava/lang/Object;)Z]
PreWriteCheck.catalog ( ) : org.apache.spark.sql.catalyst.analysis.Catalog
[mangled: org/apache/spark/sql/sources/PreWriteCheck.catalog:()Lorg/apache/spark/sql/catalyst/analysis/Catalog;]
PreWriteCheck.compose ( scala.Function1<A,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcDJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcDJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcFJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcFJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcID.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcID.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcIF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcIF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcII.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcII.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcIJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcIJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcJJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcJJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcVJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,scala.runtime.BoxedUnit>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcVJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZD.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZD.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZF.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZF.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZI.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZI.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.compose.mcZJ.sp ( scala.Function1<A,Object> g ) : scala.Function1<A,Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.compose.mcZJ.sp:(Lscala/Function1;)Lscala/Function1;]
PreWriteCheck.copy ( org.apache.spark.sql.catalyst.analysis.Catalog catalog ) : PreWriteCheck
[mangled: org/apache/spark/sql/sources/PreWriteCheck.copy:(Lorg/apache/spark/sql/catalyst/analysis/Catalog;)Lorg/apache/spark/sql/sources/PreWriteCheck;]
PreWriteCheck.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/PreWriteCheck.equals:(Ljava/lang/Object;)Z]
PreWriteCheck.failAnalysis ( String msg ) : scala.runtime.Nothing.
[mangled: org/apache/spark/sql/sources/PreWriteCheck.failAnalysis:(Ljava/lang/String;)Lscala/runtime/Nothing$;]
PreWriteCheck.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.hashCode:()I]
PreWriteCheck.PreWriteCheck ( org.apache.spark.sql.catalyst.analysis.Catalog catalog )
[mangled: org/apache/spark/sql/sources/PreWriteCheck."<init>":(Lorg/apache/spark/sql/catalyst/analysis/Catalog;)V]
PreWriteCheck.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productArity:()I]
PreWriteCheck.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productElement:(I)Ljava/lang/Object;]
PreWriteCheck.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productIterator:()Lscala/collection/Iterator;]
PreWriteCheck.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/PreWriteCheck.productPrefix:()Ljava/lang/String;]
PreWriteCheck.toString ( ) : String
[mangled: org/apache/spark/sql/sources/PreWriteCheck.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, Project.class
package org.apache.spark.sql.execution
Project.buildProjection ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/Project.buildProjection:()Lscala/Function0;]
spark-sql_2.10-1.3.0.jar, PrunedFilteredScan.class
package org.apache.spark.sql.sources
PrunedFilteredScan.buildScan ( String[ ] p1, Filter[ ] p2 ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/PrunedFilteredScan.buildScan:([Ljava/lang/String;[Lorg/apache/spark/sql/sources/Filter;)Lorg/apache/spark/rdd/RDD;]
spark-sql_2.10-1.3.0.jar, PythonUDF.class
package org.apache.spark.sql.execution
PythonUDF.accumulator ( ) : org.apache.spark.Accumulator<java.util.List<byte[ ]>>
[mangled: org/apache/spark/sql/execution/PythonUDF.accumulator:()Lorg/apache/spark/Accumulator;]
PythonUDF.broadcastVars ( ) : java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>
[mangled: org/apache/spark/sql/execution/PythonUDF.broadcastVars:()Ljava/util/List;]
PythonUDF.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/PythonUDF.canEqual:(Ljava/lang/Object;)Z]
PythonUDF.children ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/PythonUDF.children:()Lscala/collection/Seq;]
PythonUDF.command ( ) : byte[ ]
[mangled: org/apache/spark/sql/execution/PythonUDF.command:()[B]
PythonUDF.copy ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, org.apache.spark.sql.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children ) : PythonUDF
[mangled: org/apache/spark/sql/execution/PythonUDF.copy:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/PythonUDF;]
PythonUDF.curried ( ) [static] : scala.Function1<String,scala.Function1<byte[ ],scala.Function1<java.util.Map<String,String>,scala.Function1<java.util.List<String>,scala.Function1<String,scala.Function1<java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>,scala.Function1<org.apache.spark.Accumulator<java.util.List<byte[ ]>>,scala.Function1<org.apache.spark.sql.types.DataType,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,PythonUDF>>>>>>>>>
[mangled: org/apache/spark/sql/execution/PythonUDF.curried:()Lscala/Function1;]
PythonUDF.dataType ( ) : org.apache.spark.sql.types.DataType
[mangled: org/apache/spark/sql/execution/PythonUDF.dataType:()Lorg/apache/spark/sql/types/DataType;]
PythonUDF.envVars ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/execution/PythonUDF.envVars:()Ljava/util/Map;]
PythonUDF.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/PythonUDF.equals:(Ljava/lang/Object;)Z]
PythonUDF.eval ( org.apache.spark.sql.Row input ) : Object
[mangled: org/apache/spark/sql/execution/PythonUDF.eval:(Lorg/apache/spark/sql/Row;)Ljava/lang/Object;]
PythonUDF.eval ( org.apache.spark.sql.Row input ) : scala.runtime.Nothing.
[mangled: org/apache/spark/sql/execution/PythonUDF.eval:(Lorg/apache/spark/sql/Row;)Lscala/runtime/Nothing$;]
PythonUDF.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/PythonUDF.hashCode:()I]
PythonUDF.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/execution/PythonUDF.isTraceEnabled:()Z]
PythonUDF.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/execution/PythonUDF.log:()Lorg/slf4j/Logger;]
PythonUDF.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logDebug:(Lscala/Function0;)V]
PythonUDF.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
PythonUDF.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logError:(Lscala/Function0;)V]
PythonUDF.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
PythonUDF.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logInfo:(Lscala/Function0;)V]
PythonUDF.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
PythonUDF.logName ( ) : String
[mangled: org/apache/spark/sql/execution/PythonUDF.logName:()Ljava/lang/String;]
PythonUDF.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logTrace:(Lscala/Function0;)V]
PythonUDF.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
PythonUDF.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logWarning:(Lscala/Function0;)V]
PythonUDF.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
PythonUDF.name ( ) : String
[mangled: org/apache/spark/sql/execution/PythonUDF.name:()Ljava/lang/String;]
PythonUDF.nullable ( ) : boolean
[mangled: org/apache/spark/sql/execution/PythonUDF.nullable:()Z]
PythonUDF.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/execution/PythonUDF.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
PythonUDF.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/execution/PythonUDF.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
PythonUDF.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/PythonUDF.productArity:()I]
PythonUDF.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/PythonUDF.productElement:(I)Ljava/lang/Object;]
PythonUDF.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/PythonUDF.productIterator:()Lscala/collection/Iterator;]
PythonUDF.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/PythonUDF.productPrefix:()Ljava/lang/String;]
PythonUDF.pythonExec ( ) : String
[mangled: org/apache/spark/sql/execution/PythonUDF.pythonExec:()Ljava/lang/String;]
PythonUDF.pythonIncludes ( ) : java.util.List<String>
[mangled: org/apache/spark/sql/execution/PythonUDF.pythonIncludes:()Ljava/util/List;]
PythonUDF.PythonUDF ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, org.apache.spark.sql.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children )
[mangled: org/apache/spark/sql/execution/PythonUDF."<init>":(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;Lscala/collection/Seq;)V]
PythonUDF.toString ( ) : String
[mangled: org/apache/spark/sql/execution/PythonUDF.toString:()Ljava/lang/String;]
PythonUDF.tupled ( ) [static] : scala.Function1<scala.Tuple9<String,byte[ ],java.util.Map<String,String>,java.util.List<String>,String,java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>,org.apache.spark.Accumulator<java.util.List<byte[ ]>>,org.apache.spark.sql.types.DataType,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>>,PythonUDF>
[mangled: org/apache/spark/sql/execution/PythonUDF.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, RefreshTable.class
package org.apache.spark.sql.sources
RefreshTable.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/RefreshTable.canEqual:(Ljava/lang/Object;)Z]
RefreshTable.copy ( String databaseName, String tableName ) : RefreshTable
[mangled: org/apache/spark/sql/sources/RefreshTable.copy:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/sources/RefreshTable;]
RefreshTable.curried ( ) [static] : scala.Function1<String,scala.Function1<String,RefreshTable>>
[mangled: org/apache/spark/sql/sources/RefreshTable.curried:()Lscala/Function1;]
RefreshTable.databaseName ( ) : String
[mangled: org/apache/spark/sql/sources/RefreshTable.databaseName:()Ljava/lang/String;]
RefreshTable.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/RefreshTable.equals:(Ljava/lang/Object;)Z]
RefreshTable.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/RefreshTable.hashCode:()I]
RefreshTable.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/RefreshTable.productArity:()I]
RefreshTable.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/RefreshTable.productElement:(I)Ljava/lang/Object;]
RefreshTable.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/RefreshTable.productIterator:()Lscala/collection/Iterator;]
RefreshTable.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/RefreshTable.productPrefix:()Ljava/lang/String;]
RefreshTable.RefreshTable ( String databaseName, String tableName )
[mangled: org/apache/spark/sql/sources/RefreshTable."<init>":(Ljava/lang/String;Ljava/lang/String;)V]
RefreshTable.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/RefreshTable.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
RefreshTable.tableName ( ) : String
[mangled: org/apache/spark/sql/sources/RefreshTable.tableName:()Ljava/lang/String;]
RefreshTable.tupled ( ) [static] : scala.Function1<scala.Tuple2<String,String>,RefreshTable>
[mangled: org/apache/spark/sql/sources/RefreshTable.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, RelationProvider.class
package org.apache.spark.sql.sources
RelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, scala.collection.immutable.Map<String,String> p2 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/RelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.3.0.jar, ResolvedDataSource.class
package org.apache.spark.sql.sources
ResolvedDataSource.apply ( org.apache.spark.sql.SQLContext p1, scala.Option<org.apache.spark.sql.types.StructType> p2, String p3, scala.collection.immutable.Map<String,String> p4 ) [static] : ResolvedDataSource
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.apply:(Lorg/apache/spark/sql/SQLContext;Lscala/Option;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/sources/ResolvedDataSource;]
ResolvedDataSource.apply ( org.apache.spark.sql.SQLContext p1, String p2, org.apache.spark.sql.SaveMode p3, scala.collection.immutable.Map<String,String> p4, org.apache.spark.sql.DataFrame p5 ) [static] : ResolvedDataSource
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.apply:(Lorg/apache/spark/sql/SQLContext;Ljava/lang/String;Lorg/apache/spark/sql/SaveMode;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/DataFrame;)Lorg/apache/spark/sql/sources/ResolvedDataSource;]
ResolvedDataSource.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.canEqual:(Ljava/lang/Object;)Z]
ResolvedDataSource.copy ( Class<?> provider, BaseRelation relation ) : ResolvedDataSource
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.copy:(Ljava/lang/Class;Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/sources/ResolvedDataSource;]
ResolvedDataSource.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.equals:(Ljava/lang/Object;)Z]
ResolvedDataSource.hashCode ( ) : int
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.hashCode:()I]
ResolvedDataSource.lookupDataSource ( String p1 ) [static] : Class<?>
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.lookupDataSource:(Ljava/lang/String;)Ljava/lang/Class;]
ResolvedDataSource.productArity ( ) : int
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productArity:()I]
ResolvedDataSource.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productElement:(I)Ljava/lang/Object;]
ResolvedDataSource.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productIterator:()Lscala/collection/Iterator;]
ResolvedDataSource.productPrefix ( ) : String
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.productPrefix:()Ljava/lang/String;]
ResolvedDataSource.provider ( ) : Class<?>
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.provider:()Ljava/lang/Class;]
ResolvedDataSource.relation ( ) : BaseRelation
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.relation:()Lorg/apache/spark/sql/sources/BaseRelation;]
ResolvedDataSource.ResolvedDataSource ( Class<?> provider, BaseRelation relation )
[mangled: org/apache/spark/sql/sources/ResolvedDataSource."<init>":(Ljava/lang/Class;Lorg/apache/spark/sql/sources/BaseRelation;)V]
ResolvedDataSource.toString ( ) : String
[mangled: org/apache/spark/sql/sources/ResolvedDataSource.toString:()Ljava/lang/String;]
spark-sql_2.10-1.3.0.jar, RowRecordMaterializer.class
package org.apache.spark.sql.parquet
RowRecordMaterializer.RowRecordMaterializer ( parquet.schema.MessageType parquetSchema, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes )
[mangled: org/apache/spark/sql/parquet/RowRecordMaterializer."<init>":(Lparquet/schema/MessageType;Lscala/collection/Seq;)V]
spark-sql_2.10-1.3.0.jar, RowWriteSupport.class
package org.apache.spark.sql.parquet
RowWriteSupport.attributes ( ) : org.apache.spark.sql.catalyst.expressions.Attribute[ ]
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.attributes:()[Lorg/apache/spark/sql/catalyst/expressions/Attribute;]
RowWriteSupport.attributes_.eq ( org.apache.spark.sql.catalyst.expressions.Attribute[ ] p1 ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.attributes_.eq:([Lorg/apache/spark/sql/catalyst/expressions/Attribute;)V]
RowWriteSupport.getSchema ( org.apache.hadoop.conf.Configuration p1 ) [static] : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.getSchema:(Lorg/apache/hadoop/conf/Configuration;)Lscala/collection/Seq;]
RowWriteSupport.logName ( ) : String
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.logName:()Ljava/lang/String;]
RowWriteSupport.setSchema ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> p1, org.apache.hadoop.conf.Configuration p2 ) [static] : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.setSchema:(Lscala/collection/Seq;Lorg/apache/hadoop/conf/Configuration;)V]
RowWriteSupport.SPARK_ROW_SCHEMA ( ) [static] : String
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.SPARK_ROW_SCHEMA:()Ljava/lang/String;]
RowWriteSupport.write ( org.apache.spark.sql.Row record ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.write:(Lorg/apache/spark/sql/Row;)V]
RowWriteSupport.writeArray ( org.apache.spark.sql.types.ArrayType schema, scala.collection.Seq<Object> array ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeArray:(Lorg/apache/spark/sql/types/ArrayType;Lscala/collection/Seq;)V]
RowWriteSupport.writeDecimal ( org.apache.spark.sql.types.Decimal decimal, int precision ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeDecimal:(Lorg/apache/spark/sql/types/Decimal;I)V]
RowWriteSupport.writeMap ( org.apache.spark.sql.types.MapType schema, scala.collection.immutable.Map<?,Object> map ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeMap:(Lorg/apache/spark/sql/types/MapType;Lscala/collection/immutable/Map;)V]
RowWriteSupport.writePrimitive ( org.apache.spark.sql.types.DataType schema, Object value ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writePrimitive:(Lorg/apache/spark/sql/types/DataType;Ljava/lang/Object;)V]
RowWriteSupport.writeStruct ( org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.Row struct ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeStruct:(Lorg/apache/spark/sql/types/StructType;Lorg/apache/spark/sql/Row;)V]
RowWriteSupport.writeTimestamp ( java.sql.Timestamp ts ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeTimestamp:(Ljava/sql/Timestamp;)V]
RowWriteSupport.writeValue ( org.apache.spark.sql.types.DataType schema, Object value ) : void
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writeValue:(Lorg/apache/spark/sql/types/DataType;Ljava/lang/Object;)V]
spark-sql_2.10-1.3.0.jar, RunnableCommand.class
package org.apache.spark.sql.execution
RunnableCommand.run ( org.apache.spark.sql.SQLContext p1 ) [abstract] : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/RunnableCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
spark-sql_2.10-1.3.0.jar, SaveMode.class
package org.apache.spark.sql
SaveMode.valueOf ( String name ) [static] : SaveMode
[mangled: org/apache/spark/sql/SaveMode.valueOf:(Ljava/lang/String;)Lorg/apache/spark/sql/SaveMode;]
SaveMode.values ( ) [static] : SaveMode[ ]
[mangled: org/apache/spark/sql/SaveMode.values:()[Lorg/apache/spark/sql/SaveMode;]
spark-sql_2.10-1.3.0.jar, ScalaBigDecimalSerializer.class
package org.apache.spark.sql.execution
ScalaBigDecimalSerializer.ScalaBigDecimalSerializer ( )
[mangled: org/apache/spark/sql/execution/ScalaBigDecimalSerializer."<init>":()V]
spark-sql_2.10-1.3.0.jar, SchemaRelationProvider.class
package org.apache.spark.sql.sources
SchemaRelationProvider.createRelation ( org.apache.spark.sql.SQLContext p1, scala.collection.immutable.Map<String,String> p2, org.apache.spark.sql.types.StructType p3 ) [abstract] : BaseRelation
[mangled: org/apache/spark/sql/sources/SchemaRelationProvider.createRelation:(Lorg/apache/spark/sql/SQLContext;Lscala/collection/immutable/Map;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/sources/BaseRelation;]
spark-sql_2.10-1.3.0.jar, SetCommand.class
package org.apache.spark.sql.execution
SetCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/SetCommand.canEqual:(Ljava/lang/Object;)Z]
SetCommand.copy ( scala.Option<scala.Tuple2<String,scala.Option<String>>> kv, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output ) : SetCommand
[mangled: org/apache/spark/sql/execution/SetCommand.copy:(Lscala/Option;Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/SetCommand;]
SetCommand.curried ( ) [static] : scala.Function1<scala.Option<scala.Tuple2<String,scala.Option<String>>>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>,SetCommand>>
[mangled: org/apache/spark/sql/execution/SetCommand.curried:()Lscala/Function1;]
SetCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/SetCommand.equals:(Ljava/lang/Object;)Z]
SetCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/SetCommand.hashCode:()I]
SetCommand.kv ( ) : scala.Option<scala.Tuple2<String,scala.Option<String>>>
[mangled: org/apache/spark/sql/execution/SetCommand.kv:()Lscala/Option;]
SetCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/SetCommand.output:()Lscala/collection/Seq;]
SetCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/SetCommand.productArity:()I]
SetCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/SetCommand.productElement:(I)Ljava/lang/Object;]
SetCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/SetCommand.productIterator:()Lscala/collection/Iterator;]
SetCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/SetCommand.productPrefix:()Ljava/lang/String;]
SetCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/SetCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
SetCommand.SetCommand ( scala.Option<scala.Tuple2<String,scala.Option<String>>> kv, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output )
[mangled: org/apache/spark/sql/execution/SetCommand."<init>":(Lscala/Option;Lscala/collection/Seq;)V]
SetCommand.tupled ( ) [static] : scala.Function1<scala.Tuple2<scala.Option<scala.Tuple2<String,scala.Option<String>>>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>>,SetCommand>
[mangled: org/apache/spark/sql/execution/SetCommand.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, ShowTablesCommand.class
package org.apache.spark.sql.execution
ShowTablesCommand.andThen ( scala.Function1<ShowTablesCommand,A> p1 ) [static] : scala.Function1<scala.Option<String>,A>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.andThen:(Lscala/Function1;)Lscala/Function1;]
ShowTablesCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.canEqual:(Ljava/lang/Object;)Z]
ShowTablesCommand.compose ( scala.Function1<A,scala.Option<String>> p1 ) [static] : scala.Function1<A,ShowTablesCommand>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.compose:(Lscala/Function1;)Lscala/Function1;]
ShowTablesCommand.copy ( scala.Option<String> databaseName ) : ShowTablesCommand
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.copy:(Lscala/Option;)Lorg/apache/spark/sql/execution/ShowTablesCommand;]
ShowTablesCommand.databaseName ( ) : scala.Option<String>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.databaseName:()Lscala/Option;]
ShowTablesCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.equals:(Ljava/lang/Object;)Z]
ShowTablesCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.hashCode:()I]
ShowTablesCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.output:()Lscala/collection/Seq;]
ShowTablesCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productArity:()I]
ShowTablesCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productElement:(I)Ljava/lang/Object;]
ShowTablesCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productIterator:()Lscala/collection/Iterator;]
ShowTablesCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.productPrefix:()Ljava/lang/String;]
ShowTablesCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/ShowTablesCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
ShowTablesCommand.ShowTablesCommand ( scala.Option<String> databaseName )
[mangled: org/apache/spark/sql/execution/ShowTablesCommand."<init>":(Lscala/Option;)V]
spark-sql_2.10-1.3.0.jar, ShuffledHashJoin.class
package org.apache.spark.sql.execution.joins
ShuffledHashJoin.buildKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.buildPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.buildSide ( ) : package.BuildSide
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildSide:()Lorg/apache/spark/sql/execution/joins/package$BuildSide;]
ShuffledHashJoin.buildSideKeyGenerator ( ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.buildSideKeyGenerator:()Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
ShuffledHashJoin.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.canEqual:(Ljava/lang/Object;)Z]
ShuffledHashJoin.children ( ) : scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.children:()Lscala/collection/Seq;]
ShuffledHashJoin.copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right ) : ShuffledHashJoin
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.copy:(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/joins/ShuffledHashJoin;]
ShuffledHashJoin.curried ( ) [static] : scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.Function1<package.BuildSide,scala.Function1<org.apache.spark.sql.execution.SparkPlan,scala.Function1<org.apache.spark.sql.execution.SparkPlan,ShuffledHashJoin>>>>>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.curried:()Lscala/Function1;]
ShuffledHashJoin.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.equals:(Ljava/lang/Object;)Z]
ShuffledHashJoin.execute ( ) : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.execute:()Lorg/apache/spark/rdd/RDD;]
ShuffledHashJoin.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.hashCode:()I]
ShuffledHashJoin.hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row> streamIter, HashedRelation hashedRelation ) : scala.collection.Iterator<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.hashJoin:(Lscala/collection/Iterator;Lorg/apache/spark/sql/execution/joins/HashedRelation;)Lscala/collection/Iterator;]
ShuffledHashJoin.left ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.left:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
ShuffledHashJoin.left ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.left:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.leftKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.leftKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.output:()Lscala/collection/Seq;]
ShuffledHashJoin.outputPartitioning ( ) : org.apache.spark.sql.catalyst.plans.physical.Partitioning
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.outputPartitioning:()Lorg/apache/spark/sql/catalyst/plans/physical/Partitioning;]
ShuffledHashJoin.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productArity:()I]
ShuffledHashJoin.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productElement:(I)Ljava/lang/Object;]
ShuffledHashJoin.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productIterator:()Lscala/collection/Iterator;]
ShuffledHashJoin.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.productPrefix:()Ljava/lang/String;]
ShuffledHashJoin.requiredChildDistribution ( ) : scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.requiredChildDistribution:()Lscala/collection/immutable/List;]
ShuffledHashJoin.requiredChildDistribution ( ) : scala.collection.Seq
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.requiredChildDistribution:()Lscala/collection/Seq;]
ShuffledHashJoin.right ( ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.right:()Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
ShuffledHashJoin.right ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.right:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.rightKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.rightKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.ShuffledHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys, package.BuildSide buildSide, org.apache.spark.sql.execution.SparkPlan left, org.apache.spark.sql.execution.SparkPlan right )
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin."<init>":(Lscala/collection/Seq;Lscala/collection/Seq;Lorg/apache/spark/sql/execution/joins/package$BuildSide;Lorg/apache/spark/sql/execution/SparkPlan;Lorg/apache/spark/sql/execution/SparkPlan;)V]
ShuffledHashJoin.streamedKeys ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.streamedKeys:()Lscala/collection/Seq;]
ShuffledHashJoin.streamedPlan ( ) : org.apache.spark.sql.execution.SparkPlan
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.streamedPlan:()Lorg/apache/spark/sql/execution/SparkPlan;]
ShuffledHashJoin.streamSideKeyGenerator ( ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.streamSideKeyGenerator:()Lscala/Function0;]
ShuffledHashJoin.tupled ( ) [static] : scala.Function1<scala.Tuple5<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>,package.BuildSide,org.apache.spark.sql.execution.SparkPlan,org.apache.spark.sql.execution.SparkPlan>,ShuffledHashJoin>
[mangled: org/apache/spark/sql/execution/joins/ShuffledHashJoin.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, SparkPlan.class
package org.apache.spark.sql.execution
SparkPlan.codegenEnabled ( ) : boolean
[mangled: org/apache/spark/sql/execution/SparkPlan.codegenEnabled:()Z]
SparkPlan.executeCollect ( ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/SparkPlan.executeCollect:()[Lorg/apache/spark/sql/Row;]
SparkPlan.executeTake ( int n ) : org.apache.spark.sql.Row[ ]
[mangled: org/apache/spark/sql/execution/SparkPlan.executeTake:(I)[Lorg/apache/spark/sql/Row;]
SparkPlan.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/execution/SparkPlan.isTraceEnabled:()Z]
SparkPlan.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/execution/SparkPlan.log:()Lorg/slf4j/Logger;]
SparkPlan.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logDebug:(Lscala/Function0;)V]
SparkPlan.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
SparkPlan.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logError:(Lscala/Function0;)V]
SparkPlan.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
SparkPlan.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logInfo:(Lscala/Function0;)V]
SparkPlan.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
SparkPlan.logName ( ) : String
[mangled: org/apache/spark/sql/execution/SparkPlan.logName:()Ljava/lang/String;]
SparkPlan.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logTrace:(Lscala/Function0;)V]
SparkPlan.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
SparkPlan.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logWarning:(Lscala/Function0;)V]
SparkPlan.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
SparkPlan.makeCopy ( Object[ ] newArgs ) : org.apache.spark.sql.catalyst.trees.TreeNode
[mangled: org/apache/spark/sql/execution/SparkPlan.makeCopy:([Ljava/lang/Object;)Lorg/apache/spark/sql/catalyst/trees/TreeNode;]
SparkPlan.makeCopy ( Object[ ] newArgs ) : SparkPlan
[mangled: org/apache/spark/sql/execution/SparkPlan.makeCopy:([Ljava/lang/Object;)Lorg/apache/spark/sql/execution/SparkPlan;]
SparkPlan.newMutableProjection ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> inputSchema ) : scala.Function0<org.apache.spark.sql.catalyst.expressions.package.MutableProjection>
[mangled: org/apache/spark/sql/execution/SparkPlan.newMutableProjection:(Lscala/collection/Seq;Lscala/collection/Seq;)Lscala/Function0;]
SparkPlan.newOrdering ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> order, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> inputSchema ) : scala.math.Ordering<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/SparkPlan.newOrdering:(Lscala/collection/Seq;Lscala/collection/Seq;)Lscala/math/Ordering;]
SparkPlan.newPredicate ( org.apache.spark.sql.catalyst.expressions.Expression expression, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> inputSchema ) : scala.Function1<org.apache.spark.sql.Row,Object>
[mangled: org/apache/spark/sql/execution/SparkPlan.newPredicate:(Lorg/apache/spark/sql/catalyst/expressions/Expression;Lscala/collection/Seq;)Lscala/Function1;]
SparkPlan.newProjection ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> inputSchema ) : org.apache.spark.sql.catalyst.expressions.package.Projection
[mangled: org/apache/spark/sql/execution/SparkPlan.newProjection:(Lscala/collection/Seq;Lscala/collection/Seq;)Lorg/apache/spark/sql/catalyst/expressions/package$Projection;]
SparkPlan.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/execution/SparkPlan.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
SparkPlan.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/execution/SparkPlan.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
SparkPlan.sparkContext ( ) : org.apache.spark.SparkContext
[mangled: org/apache/spark/sql/execution/SparkPlan.sparkContext:()Lorg/apache/spark/SparkContext;]
SparkPlan.sqlContext ( ) : org.apache.spark.sql.SQLContext
[mangled: org/apache/spark/sql/execution/SparkPlan.sqlContext:()Lorg/apache/spark/sql/SQLContext;]
spark-sql_2.10-1.3.0.jar, SparkSQLParser.class
package org.apache.spark.sql
SparkSQLParser.AS ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.AS:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.CACHE ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.CACHE:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.CLEAR ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.CLEAR:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.IN ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.IN:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.LAZY ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.LAZY:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.SparkSQLParser..others ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..others:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SparkSQLParser..set ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..set:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SparkSQLParser..SetCommandParser ( ) : SparkSQLParser.SetCommandParser.
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..SetCommandParser:()Lorg/apache/spark/sql/SparkSQLParser$SetCommandParser$;]
SparkSQLParser.SparkSQLParser..show ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..show:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SparkSQLParser..uncache ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.org.apache.spark.sql.SparkSQLParser..uncache:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.SET ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.SET:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.SHOW ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.SHOW:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.SparkSQLParser ( scala.Function1<String,catalyst.plans.logical.LogicalPlan> fallback )
[mangled: org/apache/spark/sql/SparkSQLParser."<init>":(Lscala/Function1;)V]
SparkSQLParser.start ( ) : scala.util.parsing.combinator.Parsers.Parser<catalyst.plans.logical.LogicalPlan>
[mangled: org/apache/spark/sql/SparkSQLParser.start:()Lscala/util/parsing/combinator/Parsers$Parser;]
SparkSQLParser.TABLE ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.TABLE:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.TABLES ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.TABLES:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
SparkSQLParser.UNCACHE ( ) : catalyst.AbstractSparkSQLParser.Keyword
[mangled: org/apache/spark/sql/SparkSQLParser.UNCACHE:()Lorg/apache/spark/sql/catalyst/AbstractSparkSQLParser$Keyword;]
spark-sql_2.10-1.3.0.jar, SparkStrategies.class
package org.apache.spark.sql.execution
SparkStrategies.DDLStrategy ( ) : SparkStrategies.DDLStrategy.
[mangled: org/apache/spark/sql/execution/SparkStrategies.DDLStrategy:()Lorg/apache/spark/sql/execution/SparkStrategies$DDLStrategy$;]
SparkStrategies.HashAggregation ( ) : SparkStrategies.HashAggregation.
[mangled: org/apache/spark/sql/execution/SparkStrategies.HashAggregation:()Lorg/apache/spark/sql/execution/SparkStrategies$HashAggregation$;]
SparkStrategies.InMemoryScans ( ) : SparkStrategies.InMemoryScans.
[mangled: org/apache/spark/sql/execution/SparkStrategies.InMemoryScans:()Lorg/apache/spark/sql/execution/SparkStrategies$InMemoryScans$;]
SparkStrategies.LeftSemiJoin ( ) : SparkStrategies.LeftSemiJoin.
[mangled: org/apache/spark/sql/execution/SparkStrategies.LeftSemiJoin:()Lorg/apache/spark/sql/execution/SparkStrategies$LeftSemiJoin$;]
spark-sql_2.10-1.3.0.jar, SQLConf.class
package org.apache.spark.sql
SQLConf.autoBroadcastJoinThreshold ( ) : int
[mangled: org/apache/spark/sql/SQLConf.autoBroadcastJoinThreshold:()I]
SQLConf.broadcastTimeout ( ) : int
[mangled: org/apache/spark/sql/SQLConf.broadcastTimeout:()I]
SQLConf.codegenEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.codegenEnabled:()Z]
SQLConf.columnNameOfCorruptRecord ( ) : String
[mangled: org/apache/spark/sql/SQLConf.columnNameOfCorruptRecord:()Ljava/lang/String;]
SQLConf.dataFrameEagerAnalysis ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.dataFrameEagerAnalysis:()Z]
SQLConf.defaultDataSourceName ( ) : String
[mangled: org/apache/spark/sql/SQLConf.defaultDataSourceName:()Ljava/lang/String;]
SQLConf.defaultSizeInBytes ( ) : long
[mangled: org/apache/spark/sql/SQLConf.defaultSizeInBytes:()J]
SQLConf.dialect ( ) : String
[mangled: org/apache/spark/sql/SQLConf.dialect:()Ljava/lang/String;]
SQLConf.externalSortEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.externalSortEnabled:()Z]
SQLConf.getAllConfs ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/SQLConf.getAllConfs:()Lscala/collection/immutable/Map;]
SQLConf.getConf ( String key ) : String
[mangled: org/apache/spark/sql/SQLConf.getConf:(Ljava/lang/String;)Ljava/lang/String;]
SQLConf.getConf ( String key, String defaultValue ) : String
[mangled: org/apache/spark/sql/SQLConf.getConf:(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;]
SQLConf.inMemoryPartitionPruning ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.inMemoryPartitionPruning:()Z]
SQLConf.isParquetBinaryAsString ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.isParquetBinaryAsString:()Z]
SQLConf.isParquetINT96AsTimestamp ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.isParquetINT96AsTimestamp:()Z]
SQLConf.numShufflePartitions ( ) : int
[mangled: org/apache/spark/sql/SQLConf.numShufflePartitions:()I]
SQLConf.parquetCompressionCodec ( ) : String
[mangled: org/apache/spark/sql/SQLConf.parquetCompressionCodec:()Ljava/lang/String;]
SQLConf.parquetFilterPushDown ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.parquetFilterPushDown:()Z]
SQLConf.parquetUseDataSourceApi ( ) : boolean
[mangled: org/apache/spark/sql/SQLConf.parquetUseDataSourceApi:()Z]
SQLConf.setConf ( java.util.Properties props ) : void
[mangled: org/apache/spark/sql/SQLConf.setConf:(Ljava/util/Properties;)V]
SQLConf.setConf ( String key, String value ) : void
[mangled: org/apache/spark/sql/SQLConf.setConf:(Ljava/lang/String;Ljava/lang/String;)V]
SQLConf.SQLConf ( )
[mangled: org/apache/spark/sql/SQLConf."<init>":()V]
spark-sql_2.10-1.3.0.jar, SQLContext.class
package org.apache.spark.sql
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.applySchemaToPythonRDD ( org.apache.spark.rdd.RDD<Object[ ]> rdd, String schemaString ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.applySchemaToPythonRDD:(Lorg/apache/spark/rdd/RDD;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.baseRelationToDataFrame ( sources.BaseRelation baseRelation ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.baseRelationToDataFrame:(Lorg/apache/spark/sql/sources/BaseRelation;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.cacheManager ( ) : CacheManager
[mangled: org/apache/spark/sql/SQLContext.cacheManager:()Lorg/apache/spark/sql/CacheManager;]
SQLContext.checkAnalysis ( ) : catalyst.analysis.CheckAnalysis
[mangled: org/apache/spark/sql/SQLContext.checkAnalysis:()Lorg/apache/spark/sql/catalyst/analysis/CheckAnalysis;]
SQLContext.clearCache ( ) : void
[mangled: org/apache/spark/sql/SQLContext.clearCache:()V]
SQLContext.conf ( ) : SQLConf
[mangled: org/apache/spark/sql/SQLContext.conf:()Lorg/apache/spark/sql/SQLConf;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<?> rdd, Class<?> beanClass ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/lang/Class;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, java.util.List<String> columns ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Ljava/util/List;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.api.java.JavaRDD<Row> rowRDD, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<?> rdd, Class<?> beanClass ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Ljava/lang/Class;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<A> rdd, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( org.apache.spark.rdd.RDD<Row> rowRDD, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createDataFrame ( scala.collection.Seq<A> data, scala.reflect.api.TypeTags.TypeTag<A> p2 ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createDataFrame:(Lscala/collection/Seq;Lscala/reflect/api/TypeTags$TypeTag;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String path, String source ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, types.StructType schema, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, types.StructType schema, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.createExternalTable ( String tableName, String source, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.createExternalTable:(Ljava/lang/String;Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.ddlParser ( ) : sources.DDLParser
[mangled: org/apache/spark/sql/SQLContext.ddlParser:()Lorg/apache/spark/sql/sources/DDLParser;]
SQLContext.dropTempTable ( String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.dropTempTable:(Ljava/lang/String;)V]
SQLContext.emptyDataFrame ( ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.emptyDataFrame:()Lorg/apache/spark/sql/DataFrame;]
SQLContext.emptyResult ( ) : org.apache.spark.rdd.RDD<Row>
[mangled: org/apache/spark/sql/SQLContext.emptyResult:()Lorg/apache/spark/rdd/RDD;]
SQLContext.experimental ( ) : ExperimentalMethods
[mangled: org/apache/spark/sql/SQLContext.experimental:()Lorg/apache/spark/sql/ExperimentalMethods;]
SQLContext.functionRegistry ( ) : catalyst.analysis.FunctionRegistry
[mangled: org/apache/spark/sql/SQLContext.functionRegistry:()Lorg/apache/spark/sql/catalyst/analysis/FunctionRegistry;]
SQLContext.getAllConfs ( ) : scala.collection.immutable.Map<String,String>
[mangled: org/apache/spark/sql/SQLContext.getAllConfs:()Lscala/collection/immutable/Map;]
SQLContext.getConf ( String key ) : String
[mangled: org/apache/spark/sql/SQLContext.getConf:(Ljava/lang/String;)Ljava/lang/String;]
SQLContext.getConf ( String key, String defaultValue ) : String
[mangled: org/apache/spark/sql/SQLContext.getConf:(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;]
SQLContext.getSchema ( Class<?> beanClass ) : scala.collection.Seq<catalyst.expressions.AttributeReference>
[mangled: org/apache/spark/sql/SQLContext.getSchema:(Ljava/lang/Class;)Lscala/collection/Seq;]
SQLContext.implicits ( ) : SQLContext.implicits.
[mangled: org/apache/spark/sql/SQLContext.implicits:()Lorg/apache/spark/sql/SQLContext$implicits$;]
SQLContext.isCached ( String tableName ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isCached:(Ljava/lang/String;)Z]
SQLContext.isTraceEnabled ( ) : boolean
[mangled: org/apache/spark/sql/SQLContext.isTraceEnabled:()Z]
SQLContext.jdbc ( String url, String table ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jdbc ( String url, String table, String columnName, long lowerBound, long upperBound, int numPartitions ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;JJI)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jdbc ( String url, String table, String[ ] theParts ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jdbc:(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonFile ( String path, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonFile:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.api.java.JavaRDD<String> json, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/api/java/JavaRDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, double samplingRatio ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;D)Lorg/apache/spark/sql/DataFrame;]
SQLContext.jsonRDD ( org.apache.spark.rdd.RDD<String> json, types.StructType schema ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.jsonRDD:(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String path ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String path, String source ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, types.StructType schema, java.util.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Ljava/util/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, types.StructType schema, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lorg/apache/spark/sql/types/StructType;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.load ( String source, scala.collection.immutable.Map<String,String> options ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.load:(Ljava/lang/String;Lscala/collection/immutable/Map;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.log ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.log:()Lorg/slf4j/Logger;]
SQLContext.logDebug ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logDebug:(Lscala/Function0;)V]
SQLContext.logDebug ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logDebug:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logError ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logError:(Lscala/Function0;)V]
SQLContext.logError ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logError:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logInfo ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logInfo:(Lscala/Function0;)V]
SQLContext.logInfo ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logInfo:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logName ( ) : String
[mangled: org/apache/spark/sql/SQLContext.logName:()Ljava/lang/String;]
SQLContext.logTrace ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logTrace:(Lscala/Function0;)V]
SQLContext.logTrace ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logTrace:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.logWarning ( scala.Function0<String> msg ) : void
[mangled: org/apache/spark/sql/SQLContext.logWarning:(Lscala/Function0;)V]
SQLContext.logWarning ( scala.Function0<String> msg, Throwable throwable ) : void
[mangled: org/apache/spark/sql/SQLContext.logWarning:(Lscala/Function0;Ljava/lang/Throwable;)V]
SQLContext.optimizer ( ) : catalyst.optimizer.Optimizer
[mangled: org/apache/spark/sql/SQLContext.optimizer:()Lorg/apache/spark/sql/catalyst/optimizer/Optimizer;]
SQLContext.org.apache.spark.Logging..log_ ( ) : org.slf4j.Logger
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.Logging..log_:()Lorg/slf4j/Logger;]
SQLContext.org.apache.spark.Logging..log__.eq ( org.slf4j.Logger p1 ) : void
[mangled: org/apache/spark/sql/SQLContext.org.apache.spark.Logging..log__.eq:(Lorg/slf4j/Logger;)V]
SQLContext.parquetFile ( scala.collection.Seq<String> paths ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.parquetFile:(Lscala/collection/Seq;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parquetFile ( String... paths ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.parquetFile:([Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.parseDataType ( String dataTypeString ) : types.DataType
[mangled: org/apache/spark/sql/SQLContext.parseDataType:(Ljava/lang/String;)Lorg/apache/spark/sql/types/DataType;]
SQLContext.registerDataFrameAsTable ( DataFrame df, String tableName ) : void
[mangled: org/apache/spark/sql/SQLContext.registerDataFrameAsTable:(Lorg/apache/spark/sql/DataFrame;Ljava/lang/String;)V]
SQLContext.setConf ( java.util.Properties props ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Ljava/util/Properties;)V]
SQLContext.setConf ( String key, String value ) : void
[mangled: org/apache/spark/sql/SQLContext.setConf:(Ljava/lang/String;Ljava/lang/String;)V]
SQLContext.sql ( String sqlText ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.sql:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.SQLContext ( org.apache.spark.api.java.JavaSparkContext sparkContext )
[mangled: org/apache/spark/sql/SQLContext."<init>":(Lorg/apache/spark/api/java/JavaSparkContext;)V]
SQLContext.sqlParser ( ) : SparkSQLParser
[mangled: org/apache/spark/sql/SQLContext.sqlParser:()Lorg/apache/spark/sql/SparkSQLParser;]
SQLContext.table ( String tableName ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.table:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.tableNames ( ) : String[ ]
[mangled: org/apache/spark/sql/SQLContext.tableNames:()[Ljava/lang/String;]
SQLContext.tableNames ( String databaseName ) : String[ ]
[mangled: org/apache/spark/sql/SQLContext.tableNames:(Ljava/lang/String;)[Ljava/lang/String;]
SQLContext.tables ( ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.tables:()Lorg/apache/spark/sql/DataFrame;]
SQLContext.tables ( String databaseName ) : DataFrame
[mangled: org/apache/spark/sql/SQLContext.tables:(Ljava/lang/String;)Lorg/apache/spark/sql/DataFrame;]
SQLContext.udf ( ) : UDFRegistration
[mangled: org/apache/spark/sql/SQLContext.udf:()Lorg/apache/spark/sql/UDFRegistration;]
spark-sql_2.10-1.3.0.jar, TableScan.class
package org.apache.spark.sql.sources
TableScan.buildScan ( ) [abstract] : org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/sources/TableScan.buildScan:()Lorg/apache/spark/rdd/RDD;]
spark-sql_2.10-1.3.0.jar, TakeOrdered.class
package org.apache.spark.sql.execution
TakeOrdered.copy ( int limit, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, SparkPlan child ) : TakeOrdered
[mangled: org/apache/spark/sql/execution/TakeOrdered.copy:(ILscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/TakeOrdered;]
TakeOrdered.curried ( ) [static] : scala.Function1<Object,scala.Function1<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>,scala.Function1<SparkPlan,TakeOrdered>>>
[mangled: org/apache/spark/sql/execution/TakeOrdered.curried:()Lscala/Function1;]
TakeOrdered.ord ( ) : org.apache.spark.sql.catalyst.expressions.RowOrdering
[mangled: org/apache/spark/sql/execution/TakeOrdered.ord:()Lorg/apache/spark/sql/catalyst/expressions/RowOrdering;]
TakeOrdered.TakeOrdered ( int limit, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> sortOrder, SparkPlan child )
[mangled: org/apache/spark/sql/execution/TakeOrdered."<init>":(ILscala/collection/Seq;Lorg/apache/spark/sql/execution/SparkPlan;)V]
TakeOrdered.tupled ( ) [static] : scala.Function1<scala.Tuple3<Object,scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>,SparkPlan>,TakeOrdered>
[mangled: org/apache/spark/sql/execution/TakeOrdered.tupled:()Lscala/Function1;]
spark-sql_2.10-1.3.0.jar, TestGroupWriteSupport.class
package org.apache.spark.sql.parquet
TestGroupWriteSupport.TestGroupWriteSupport ( parquet.schema.MessageType schema )
[mangled: org/apache/spark/sql/parquet/TestGroupWriteSupport."<init>":(Lparquet/schema/MessageType;)V]
spark-sql_2.10-1.3.0.jar, TimestampColumnAccessor.class
package org.apache.spark.sql.columnar
TimestampColumnAccessor.TimestampColumnAccessor ( java.nio.ByteBuffer buffer )
[mangled: org/apache/spark/sql/columnar/TimestampColumnAccessor."<init>":(Ljava/nio/ByteBuffer;)V]
spark-sql_2.10-1.3.0.jar, TimestampColumnBuilder.class
package org.apache.spark.sql.columnar
TimestampColumnBuilder.TimestampColumnBuilder ( )
[mangled: org/apache/spark/sql/columnar/TimestampColumnBuilder."<init>":()V]
spark-sql_2.10-1.3.0.jar, TimestampColumnStats.class
package org.apache.spark.sql.columnar
TimestampColumnStats.TimestampColumnStats ( )
[mangled: org/apache/spark/sql/columnar/TimestampColumnStats."<init>":()V]
spark-sql_2.10-1.3.0.jar, UDFRegistration.class
package org.apache.spark.sql
UDFRegistration.UDFRegistration ( SQLContext sqlContext )
[mangled: org/apache/spark/sql/UDFRegistration."<init>":(Lorg/apache/spark/sql/SQLContext;)V]
spark-sql_2.10-1.3.0.jar, UncacheTableCommand.class
package org.apache.spark.sql.execution
UncacheTableCommand.andThen ( scala.Function1<UncacheTableCommand,A> p1 ) [static] : scala.Function1<String,A>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.andThen:(Lscala/Function1;)Lscala/Function1;]
UncacheTableCommand.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.canEqual:(Ljava/lang/Object;)Z]
UncacheTableCommand.compose ( scala.Function1<A,String> p1 ) [static] : scala.Function1<A,UncacheTableCommand>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.compose:(Lscala/Function1;)Lscala/Function1;]
UncacheTableCommand.copy ( String tableName ) : UncacheTableCommand
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.copy:(Ljava/lang/String;)Lorg/apache/spark/sql/execution/UncacheTableCommand;]
UncacheTableCommand.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.equals:(Ljava/lang/Object;)Z]
UncacheTableCommand.hashCode ( ) : int
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.hashCode:()I]
UncacheTableCommand.output ( ) : scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.output:()Lscala/collection/Seq;]
UncacheTableCommand.productArity ( ) : int
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productArity:()I]
UncacheTableCommand.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productElement:(I)Ljava/lang/Object;]
UncacheTableCommand.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productIterator:()Lscala/collection/Iterator;]
UncacheTableCommand.productPrefix ( ) : String
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.productPrefix:()Ljava/lang/String;]
UncacheTableCommand.run ( org.apache.spark.sql.SQLContext sqlContext ) : scala.collection.Seq<org.apache.spark.sql.Row>
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.run:(Lorg/apache/spark/sql/SQLContext;)Lscala/collection/Seq;]
UncacheTableCommand.tableName ( ) : String
[mangled: org/apache/spark/sql/execution/UncacheTableCommand.tableName:()Ljava/lang/String;]
UncacheTableCommand.UncacheTableCommand ( String tableName )
[mangled: org/apache/spark/sql/execution/UncacheTableCommand."<init>":(Ljava/lang/String;)V]
spark-sql_2.10-1.3.0.jar, Union.class
package org.apache.spark.sql.execution
Union.andThen ( scala.Function1<Union,A> p1 ) [static] : scala.Function1<scala.collection.Seq<SparkPlan>,A>
[mangled: org/apache/spark/sql/execution/Union.andThen:(Lscala/Function1;)Lscala/Function1;]
Union.compose ( scala.Function1<A,scala.collection.Seq<SparkPlan>> p1 ) [static] : scala.Function1<A,Union>
[mangled: org/apache/spark/sql/execution/Union.compose:(Lscala/Function1;)Lscala/Function1;]
Union.copy ( scala.collection.Seq<SparkPlan> children ) : Union
[mangled: org/apache/spark/sql/execution/Union.copy:(Lscala/collection/Seq;)Lorg/apache/spark/sql/execution/Union;]
Union.Union ( scala.collection.Seq<SparkPlan> children )
[mangled: org/apache/spark/sql/execution/Union."<init>":(Lscala/collection/Seq;)V]
spark-sql_2.10-1.3.0.jar, UniqueKeyHashedRelation.class
package org.apache.spark.sql.execution.joins
UniqueKeyHashedRelation.UniqueKeyHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.sql.Row> hashTable )
[mangled: org/apache/spark/sql/execution/joins/UniqueKeyHashedRelation."<init>":(Ljava/util/HashMap;)V]
spark-sql_2.10-1.3.0.jar, UserDefinedFunction.class
package org.apache.spark.sql
UserDefinedFunction.apply ( scala.collection.Seq<Column> exprs ) : Column
[mangled: org/apache/spark/sql/UserDefinedFunction.apply:(Lscala/collection/Seq;)Lorg/apache/spark/sql/Column;]
UserDefinedFunction.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedFunction.canEqual:(Ljava/lang/Object;)Z]
UserDefinedFunction.copy ( Object f, types.DataType dataType ) : UserDefinedFunction
[mangled: org/apache/spark/sql/UserDefinedFunction.copy:(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/UserDefinedFunction;]
UserDefinedFunction.curried ( ) [static] : scala.Function1<Object,scala.Function1<types.DataType,UserDefinedFunction>>
[mangled: org/apache/spark/sql/UserDefinedFunction.curried:()Lscala/Function1;]
UserDefinedFunction.dataType ( ) : types.DataType
[mangled: org/apache/spark/sql/UserDefinedFunction.dataType:()Lorg/apache/spark/sql/types/DataType;]
UserDefinedFunction.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedFunction.equals:(Ljava/lang/Object;)Z]
UserDefinedFunction.f ( ) : Object
[mangled: org/apache/spark/sql/UserDefinedFunction.f:()Ljava/lang/Object;]
UserDefinedFunction.hashCode ( ) : int
[mangled: org/apache/spark/sql/UserDefinedFunction.hashCode:()I]
UserDefinedFunction.productArity ( ) : int
[mangled: org/apache/spark/sql/UserDefinedFunction.productArity:()I]
UserDefinedFunction.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/UserDefinedFunction.productElement:(I)Ljava/lang/Object;]
UserDefinedFunction.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/UserDefinedFunction.productIterator:()Lscala/collection/Iterator;]
UserDefinedFunction.productPrefix ( ) : String
[mangled: org/apache/spark/sql/UserDefinedFunction.productPrefix:()Ljava/lang/String;]
UserDefinedFunction.toString ( ) : String
[mangled: org/apache/spark/sql/UserDefinedFunction.toString:()Ljava/lang/String;]
UserDefinedFunction.tupled ( ) [static] : scala.Function1<scala.Tuple2<Object,types.DataType>,UserDefinedFunction>
[mangled: org/apache/spark/sql/UserDefinedFunction.tupled:()Lscala/Function1;]
UserDefinedFunction.UserDefinedFunction ( Object f, types.DataType dataType )
[mangled: org/apache/spark/sql/UserDefinedFunction."<init>":(Ljava/lang/Object;Lorg/apache/spark/sql/types/DataType;)V]
spark-sql_2.10-1.3.0.jar, UserDefinedPythonFunction.class
package org.apache.spark.sql
UserDefinedPythonFunction.accumulator ( ) : org.apache.spark.Accumulator<java.util.List<byte[ ]>>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.accumulator:()Lorg/apache/spark/Accumulator;]
UserDefinedPythonFunction.apply ( scala.collection.Seq<Column> exprs ) : Column
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.apply:(Lscala/collection/Seq;)Lorg/apache/spark/sql/Column;]
UserDefinedPythonFunction.broadcastVars ( ) : java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.broadcastVars:()Ljava/util/List;]
UserDefinedPythonFunction.canEqual ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.canEqual:(Ljava/lang/Object;)Z]
UserDefinedPythonFunction.command ( ) : byte[ ]
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.command:()[B]
UserDefinedPythonFunction.copy ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, types.DataType dataType ) : UserDefinedPythonFunction
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.copy:(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;)Lorg/apache/spark/sql/UserDefinedPythonFunction;]
UserDefinedPythonFunction.curried ( ) [static] : scala.Function1<String,scala.Function1<byte[ ],scala.Function1<java.util.Map<String,String>,scala.Function1<java.util.List<String>,scala.Function1<String,scala.Function1<java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>,scala.Function1<org.apache.spark.Accumulator<java.util.List<byte[ ]>>,scala.Function1<types.DataType,UserDefinedPythonFunction>>>>>>>>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.curried:()Lscala/Function1;]
UserDefinedPythonFunction.dataType ( ) : types.DataType
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.dataType:()Lorg/apache/spark/sql/types/DataType;]
UserDefinedPythonFunction.envVars ( ) : java.util.Map<String,String>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.envVars:()Ljava/util/Map;]
UserDefinedPythonFunction.equals ( Object p1 ) : boolean
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.equals:(Ljava/lang/Object;)Z]
UserDefinedPythonFunction.hashCode ( ) : int
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.hashCode:()I]
UserDefinedPythonFunction.name ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.name:()Ljava/lang/String;]
UserDefinedPythonFunction.productArity ( ) : int
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productArity:()I]
UserDefinedPythonFunction.productElement ( int p1 ) : Object
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productElement:(I)Ljava/lang/Object;]
UserDefinedPythonFunction.productIterator ( ) : scala.collection.Iterator<Object>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productIterator:()Lscala/collection/Iterator;]
UserDefinedPythonFunction.productPrefix ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.productPrefix:()Ljava/lang/String;]
UserDefinedPythonFunction.pythonExec ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.pythonExec:()Ljava/lang/String;]
UserDefinedPythonFunction.pythonIncludes ( ) : java.util.List<String>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.pythonIncludes:()Ljava/util/List;]
UserDefinedPythonFunction.toString ( ) : String
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.toString:()Ljava/lang/String;]
UserDefinedPythonFunction.tupled ( ) [static] : scala.Function1<scala.Tuple8<String,byte[ ],java.util.Map<String,String>,java.util.List<String>,String,java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>,org.apache.spark.Accumulator<java.util.List<byte[ ]>>,types.DataType>,UserDefinedPythonFunction>
[mangled: org/apache/spark/sql/UserDefinedPythonFunction.tupled:()Lscala/Function1;]
UserDefinedPythonFunction.UserDefinedPythonFunction ( String name, byte[ ] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, org.apache.spark.Accumulator<java.util.List<byte[ ]>> accumulator, types.DataType dataType )
[mangled: org/apache/spark/sql/UserDefinedPythonFunction."<init>":(Ljava/lang/String;[BLjava/util/Map;Ljava/util/List;Ljava/lang/String;Ljava/util/List;Lorg/apache/spark/Accumulator;Lorg/apache/spark/sql/types/DataType;)V]
to the top
Problems with Data Types, High Severity (151)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql
[+] CachedData (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
CachedData ( catalyst.plans.logical.LogicalPlan, columnar.InMemoryRelation )This constructor is from 'CachedData' class.
cachedRepresentation ( )This method is from 'CachedData' class.
canEqual ( java.lang.Object )This method is from 'CachedData' class.
copy ( catalyst.plans.logical.LogicalPlan, columnar.InMemoryRelation )This method is from 'CachedData' class.
curried ( )This method is from 'CachedData' class.
equals ( java.lang.Object )This method is from 'CachedData' class.
hashCode ( )This method is from 'CachedData' class.
plan ( )This method is from 'CachedData' class.
productArity ( )This method is from 'CachedData' class.
productElement ( int )This method is from 'CachedData' class.
productIterator ( )This method is from 'CachedData' class.
...
[+] CacheManager (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (9)
CacheManager ( SQLContext )This constructor is from 'CacheManager' class.
cacheQuery ( DataFrame, scala.Option<java.lang.String>, org.apache.spark.storage.StorageLevel )This method is from 'CacheManager' class.
cacheTable ( java.lang.String )This method is from 'CacheManager' class.
clearCache ( )This method is from 'CacheManager' class.
invalidateCache ( catalyst.plans.logical.LogicalPlan )This method is from 'CacheManager' class.
isCached ( java.lang.String )This method is from 'CacheManager' class.
tryUncacheQuery ( DataFrame, boolean )This method is from 'CacheManager' class.
uncacheTable ( java.lang.String )This method is from 'CacheManager' class.
useCachedData ( catalyst.plans.logical.LogicalPlan )This method is from 'CacheManager' class.
[+] Column (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
and ( Column )This method is from 'Column' class.
apply ( java.lang.String )This method is from 'Column' class.
apply ( catalyst.expressions.Expression )This method is from 'Column' class.
as ( java.lang.String )This method is from 'Column' class.
as ( scala.Symbol )This method is from 'Column' class.
asc ( )This method is from 'Column' class.
cast ( java.lang.String )This method is from 'Column' class.
cast ( types.DataType )This method is from 'Column' class.
Column ( java.lang.String )This constructor is from 'Column' class.
Column ( catalyst.expressions.Expression )This constructor is from 'Column' class.
contains ( java.lang.Object )This method is from 'Column' class.
...
[+] ColumnName (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
ColumnName ( java.lang.String )This constructor is from 'ColumnName' class.
[+] DataFrame (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
agg ( java.util.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
agg ( Column, Column... )This method is from 'DataFrame' class.
agg ( Column, scala.collection.Seq<Column> )This method is from 'DataFrame' class.
agg ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'DataFrame' class.
agg ( scala.Tuple2<java.lang.String,java.lang.String>, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> )This method is from 'DataFrame' class.
apply ( java.lang.String )This method is from 'DataFrame' class.
as ( java.lang.String )This method is from 'DataFrame' class.
as ( scala.Symbol )This method is from 'DataFrame' class.
cache ( )This method is from 'DataFrame' class.
cache ( )This method is from 'DataFrame' class.
col ( java.lang.String )This method is from 'DataFrame' class.
...
[+] DataFrameHolder (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<DataFrameHolder,A> )This method is from 'DataFrameHolder' class.
canEqual ( java.lang.Object )This method is from 'DataFrameHolder' class.
compose ( scala.Function1<A,DataFrame> )This method is from 'DataFrameHolder' class.
copy ( DataFrame )This method is from 'DataFrameHolder' class.
DataFrameHolder ( DataFrame )This constructor is from 'DataFrameHolder' class.
df ( )This method is from 'DataFrameHolder' class.
equals ( java.lang.Object )This method is from 'DataFrameHolder' class.
hashCode ( )This method is from 'DataFrameHolder' class.
productArity ( )This method is from 'DataFrameHolder' class.
productElement ( int )This method is from 'DataFrameHolder' class.
productIterator ( )This method is from 'DataFrameHolder' class.
...
[+] ExperimentalMethods (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
ExperimentalMethods ( SQLContext )This constructor is from 'ExperimentalMethods' class.
extraStrategies ( )This method is from 'ExperimentalMethods' class.
[+] GroupedData (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (6)
agg ( java.util.Map<java.lang.String,java.lang.String> )This method is from 'GroupedData' class.
agg ( Column, scala.collection.Seq<Column> )This method is from 'GroupedData' class.
agg ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'GroupedData' class.
agg ( scala.Tuple2<java.lang.String,java.lang.String>, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> )This method is from 'GroupedData' class.
count ( )This method is from 'GroupedData' class.
GroupedData ( DataFrame, scala.collection.Seq<catalyst.expressions.Expression> )This constructor is from 'GroupedData' class.
[+] SaveMode (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
valueOf ( java.lang.String )This method is from 'SaveMode' class.
values ( )This method is from 'SaveMode' class.
[+] SparkSQLParser (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
AS ( )This method is from 'SparkSQLParser' class.
CACHE ( )This method is from 'SparkSQLParser' class.
CLEAR ( )This method is from 'SparkSQLParser' class.
IN ( )This method is from 'SparkSQLParser' class.
LAZY ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..others ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..set ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..SetCommandParser ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..show ( )This method is from 'SparkSQLParser' class.
SparkSQLParser..uncache ( )This method is from 'SparkSQLParser' class.
SET ( )This method is from 'SparkSQLParser' class.
...
[+] SQLConf (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
autoBroadcastJoinThreshold ( )This method is from 'SQLConf' class.
broadcastTimeout ( )This method is from 'SQLConf' class.
codegenEnabled ( )This method is from 'SQLConf' class.
columnNameOfCorruptRecord ( )This method is from 'SQLConf' class.
dataFrameEagerAnalysis ( )This method is from 'SQLConf' class.
defaultDataSourceName ( )This method is from 'SQLConf' class.
defaultSizeInBytes ( )This method is from 'SQLConf' class.
dialect ( )This method is from 'SQLConf' class.
externalSortEnabled ( )This method is from 'SQLConf' class.
getAllConfs ( )This method is from 'SQLConf' class.
getConf ( java.lang.String )This method is from 'SQLConf' class.
...
[+] SQLContext (1)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.Logging. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (>10)
analyzer ( )This method is from 'SQLContext' class.
cacheTable ( java.lang.String )This method is from 'SQLContext' class.
catalog ( )This method is from 'SQLContext' class.
executePlan ( catalyst.plans.logical.LogicalPlan )This method is from 'SQLContext' class.
executeSql ( java.lang.String )This method is from 'SQLContext' class.
parseSql ( java.lang.String )This method is from 'SQLContext' class.
planner ( )This method is from 'SQLContext' class.
prepareForExecution ( )This method is from 'SQLContext' class.
sparkContext ( )This method is from 'SQLContext' class.
SQLContext ( org.apache.spark.SparkContext )This constructor is from 'SQLContext' class.
uncacheTable ( java.lang.String )This method is from 'SQLContext' class.
...
[+] SQLContext.QueryExecution (1)
| Change | Effect |
---|
1 | This class became abstract. | A client program may be interrupted by InstantiationError exception. |
[+] affected methods (2)
executePlan ( catalyst.plans.logical.LogicalPlan )Return value of this method has type 'SQLContext.QueryExecution'.
executeSql ( java.lang.String )Return value of this method has type 'SQLContext.QueryExecution'.
[+] UDFRegistration (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
UDFRegistration ( SQLContext )This constructor is from 'UDFRegistration' class.
[+] UserDefinedFunction (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
apply ( scala.collection.Seq<Column> )This method is from 'UserDefinedFunction' class.
canEqual ( java.lang.Object )This method is from 'UserDefinedFunction' class.
copy ( java.lang.Object, types.DataType )This method is from 'UserDefinedFunction' class.
curried ( )This method is from 'UserDefinedFunction' class.
dataType ( )This method is from 'UserDefinedFunction' class.
equals ( java.lang.Object )This method is from 'UserDefinedFunction' class.
f ( )This method is from 'UserDefinedFunction' class.
hashCode ( )This method is from 'UserDefinedFunction' class.
productArity ( )This method is from 'UserDefinedFunction' class.
productElement ( int )This method is from 'UserDefinedFunction' class.
productIterator ( )This method is from 'UserDefinedFunction' class.
...
[+] UserDefinedPythonFunction (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
accumulator ( )This method is from 'UserDefinedPythonFunction' class.
apply ( scala.collection.Seq<Column> )This method is from 'UserDefinedPythonFunction' class.
broadcastVars ( )This method is from 'UserDefinedPythonFunction' class.
canEqual ( java.lang.Object )This method is from 'UserDefinedPythonFunction' class.
command ( )This method is from 'UserDefinedPythonFunction' class.
copy ( java.lang.String, byte[ ], java.util.Map<java.lang.String,java.lang.String>, java.util.List<java.lang.String>, java.lang.String, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>, org.apache.spark.Accumulator<java.util.List<byte[ ]>>, types.DataType )This method is from 'UserDefinedPythonFunction' class.
curried ( )This method is from 'UserDefinedPythonFunction' class.
dataType ( )This method is from 'UserDefinedPythonFunction' class.
envVars ( )This method is from 'UserDefinedPythonFunction' class.
equals ( java.lang.Object )This method is from 'UserDefinedPythonFunction' class.
hashCode ( )This method is from 'UserDefinedPythonFunction' class.
...
package org.apache.spark.sql.columnar
[+] BinaryColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
BinaryColumnStats ( )This constructor is from 'BinaryColumnStats' class.
[+] BooleanColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
BooleanColumnStats ( )This constructor is from 'BooleanColumnStats' class.
[+] ByteColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
ByteColumnStats ( )This constructor is from 'ByteColumnStats' class.
[+] CachedBatch (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buffers ( )This method is from 'CachedBatch' class.
CachedBatch ( byte[ ][ ], org.apache.spark.sql.Row )This constructor is from 'CachedBatch' class.
canEqual ( java.lang.Object )This method is from 'CachedBatch' class.
copy ( byte[ ][ ], org.apache.spark.sql.Row )This method is from 'CachedBatch' class.
curried ( )This method is from 'CachedBatch' class.
equals ( java.lang.Object )This method is from 'CachedBatch' class.
hashCode ( )This method is from 'CachedBatch' class.
productArity ( )This method is from 'CachedBatch' class.
productElement ( int )This method is from 'CachedBatch' class.
productIterator ( )This method is from 'CachedBatch' class.
productPrefix ( )This method is from 'CachedBatch' class.
...
[+] ColumnBuilder (1)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (3)
build ( )This abstract method is from 'ColumnBuilder' interface.
columnStats ( )This abstract method is from 'ColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'ColumnBuilder' interface.
[+] DateColumnAccessor (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
DateColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'DateColumnAccessor' class.
[+] DateColumnBuilder (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
DateColumnBuilder ( )This constructor is from 'DateColumnBuilder' class.
[+] DateColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
DateColumnStats ( )This constructor is from 'DateColumnStats' class.
[+] DoubleColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
DoubleColumnStats ( )This constructor is from 'DoubleColumnStats' class.
[+] FloatColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
FloatColumnStats ( )This constructor is from 'FloatColumnStats' class.
[+] GenericColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
GenericColumnStats ( )This constructor is from 'GenericColumnStats' class.
[+] InMemoryRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
batchSize ( )This method is from 'InMemoryRelation' class.
cachedColumnBuffers ( )This method is from 'InMemoryRelation' class.
canEqual ( java.lang.Object )This method is from 'InMemoryRelation' class.
child ( )This method is from 'InMemoryRelation' class.
children ( )This method is from 'InMemoryRelation' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, boolean, int, org.apache.spark.storage.StorageLevel, org.apache.spark.sql.execution.SparkPlan, scala.Option<java.lang.String>, org.apache.spark.rdd.RDD<CachedBatch>, org.apache.spark.sql.catalyst.plans.logical.Statistics )This method is from 'InMemoryRelation' class.
equals ( java.lang.Object )This method is from 'InMemoryRelation' class.
hashCode ( )This method is from 'InMemoryRelation' class.
InMemoryRelation ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, boolean, int, org.apache.spark.storage.StorageLevel, org.apache.spark.sql.execution.SparkPlan, scala.Option<java.lang.String>, org.apache.spark.rdd.RDD<CachedBatch>, org.apache.spark.sql.catalyst.plans.logical.Statistics )This constructor is from 'InMemoryRelation' class.
newInstance ( )This method is from 'InMemoryRelation' class.
newInstance ( )This method is from 'InMemoryRelation' class.
...
[+] IntColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
IntColumnStats ( )This constructor is from 'IntColumnStats' class.
[+] LongColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
LongColumnStats ( )This constructor is from 'LongColumnStats' class.
[+] NullableColumnBuilder (7)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method buildNonNulls ( ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
3 | Abstract method nullCount ( ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
4 | Abstract method nullCount_.eq ( int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
5 | Abstract method nulls ( ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
6 | Abstract method nulls_.eq ( java.nio.ByteBuffer ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
7 | Abstract method NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (6)
build ( )This abstract method is from 'NullableColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos_.eq ( int )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.build ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
[+] PartitionStatistics (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (3)
forAttribute ( )This method is from 'PartitionStatistics' class.
PartitionStatistics ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> )This constructor is from 'PartitionStatistics' class.
schema ( )This method is from 'PartitionStatistics' class.
[+] ShortColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
ShortColumnStats ( )This constructor is from 'ShortColumnStats' class.
[+] StringColumnStats (1)
| Change | Effect |
---|
1 | Removed super-interface ColumnStats. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
StringColumnStats ( )This constructor is from 'StringColumnStats' class.
[+] TimestampColumnAccessor (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
TimestampColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'TimestampColumnAccessor' class.
[+] TimestampColumnBuilder (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
TimestampColumnBuilder ( )This constructor is from 'TimestampColumnBuilder' class.
[+] TimestampColumnStats (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
TimestampColumnStats ( )This constructor is from 'TimestampColumnStats' class.
package org.apache.spark.sql.columnar.compression
[+] CompressionScheme (1)
| Change | Effect |
---|
1 | Abstract method encoder ( org.apache.spark.sql.columnar.NativeColumnType<T> ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (3)
decoder ( java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> )This abstract method is from 'CompressionScheme' interface.
supports ( org.apache.spark.sql.columnar.ColumnType<?,?> )This abstract method is from 'CompressionScheme' interface.
typeId ( )This abstract method is from 'CompressionScheme' interface.
[+] Decoder<T> (2)
| Change | Effect |
---|
1 | Abstract method hasNext ( ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method next ( org.apache.spark.sql.catalyst.expressions.MutableRow, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (1)
decoder ( java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> )Return value of this abstract method has type 'Decoder<T>'.
[+] Encoder<T> (2)
| Change | Effect |
---|
1 | Abstract method compress ( java.nio.ByteBuffer, java.nio.ByteBuffer ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Abstract method gatherCompressibilityStats ( org.apache.spark.sql.Row, int ) has been removed from this interface. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (3)
compressedSize ( )This abstract method is from 'Encoder<T>' interface.
compressionRatio ( )This abstract method is from 'Encoder<T>' interface.
uncompressedSize ( )This abstract method is from 'Encoder<T>' interface.
package org.apache.spark.sql.execution
[+] AddExchange (3)
| Change | Effect |
---|
1 | This class became final. | A client program may be interrupted by VerifyError exception. |
2 | Removed super-interface scala.Product. | A client program may be interrupted by NoSuchMethodError exception. |
3 | Removed super-interface scala.Serializable. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (2)
apply ( SparkPlan )This method is from 'AddExchange' class.
numPartitions ( )This method is from 'AddExchange' class.
[+] AggregateEvaluation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
AggregateEvaluation ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.catalyst.expressions.Expression )This constructor is from 'AggregateEvaluation' class.
canEqual ( java.lang.Object )This method is from 'AggregateEvaluation' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.catalyst.expressions.Expression )This method is from 'AggregateEvaluation' class.
curried ( )This method is from 'AggregateEvaluation' class.
equals ( java.lang.Object )This method is from 'AggregateEvaluation' class.
hashCode ( )This method is from 'AggregateEvaluation' class.
initialValues ( )This method is from 'AggregateEvaluation' class.
productArity ( )This method is from 'AggregateEvaluation' class.
productElement ( int )This method is from 'AggregateEvaluation' class.
productIterator ( )This method is from 'AggregateEvaluation' class.
productPrefix ( )This method is from 'AggregateEvaluation' class.
...
[+] BatchPythonEvaluation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
BatchPythonEvaluation ( PythonUDF, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This constructor is from 'BatchPythonEvaluation' class.
canEqual ( java.lang.Object )This method is from 'BatchPythonEvaluation' class.
child ( )This method is from 'BatchPythonEvaluation' class.
children ( )This method is from 'BatchPythonEvaluation' class.
children ( )This method is from 'BatchPythonEvaluation' class.
copy ( PythonUDF, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This method is from 'BatchPythonEvaluation' class.
curried ( )This method is from 'BatchPythonEvaluation' class.
equals ( java.lang.Object )This method is from 'BatchPythonEvaluation' class.
execute ( )This method is from 'BatchPythonEvaluation' class.
hashCode ( )This method is from 'BatchPythonEvaluation' class.
output ( )This method is from 'BatchPythonEvaluation' class.
...
[+] CacheTableCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
CacheTableCommand ( java.lang.String, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>, boolean )This constructor is from 'CacheTableCommand' class.
canEqual ( java.lang.Object )This method is from 'CacheTableCommand' class.
copy ( java.lang.String, scala.Option<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>, boolean )This method is from 'CacheTableCommand' class.
curried ( )This method is from 'CacheTableCommand' class.
equals ( java.lang.Object )This method is from 'CacheTableCommand' class.
hashCode ( )This method is from 'CacheTableCommand' class.
isLazy ( )This method is from 'CacheTableCommand' class.
output ( )This method is from 'CacheTableCommand' class.
plan ( )This method is from 'CacheTableCommand' class.
productArity ( )This method is from 'CacheTableCommand' class.
productElement ( int )This method is from 'CacheTableCommand' class.
...
[+] DescribeCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'DescribeCommand' class.
child ( )This method is from 'DescribeCommand' class.
copy ( SparkPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, boolean )This method is from 'DescribeCommand' class.
curried ( )This method is from 'DescribeCommand' class.
DescribeCommand ( SparkPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, boolean )This constructor is from 'DescribeCommand' class.
equals ( java.lang.Object )This method is from 'DescribeCommand' class.
hashCode ( )This method is from 'DescribeCommand' class.
isExtended ( )This method is from 'DescribeCommand' class.
output ( )This method is from 'DescribeCommand' class.
productArity ( )This method is from 'DescribeCommand' class.
productElement ( int )This method is from 'DescribeCommand' class.
...
[+] Distinct (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'Distinct' class.
child ( )This method is from 'Distinct' class.
child ( )This method is from 'Distinct' class.
children ( )This method is from 'Distinct' class.
children ( )This method is from 'Distinct' class.
copy ( boolean, SparkPlan )This method is from 'Distinct' class.
curried ( )This method is from 'Distinct' class.
Distinct ( boolean, SparkPlan )This constructor is from 'Distinct' class.
equals ( java.lang.Object )This method is from 'Distinct' class.
execute ( )This method is from 'Distinct' class.
hashCode ( )This method is from 'Distinct' class.
...
[+] EvaluatePython (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'EvaluatePython' class.
child ( )This method is from 'EvaluatePython' class.
child ( )This method is from 'EvaluatePython' class.
copy ( PythonUDF, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, org.apache.spark.sql.catalyst.expressions.AttributeReference )This method is from 'EvaluatePython' class.
equals ( java.lang.Object )This method is from 'EvaluatePython' class.
EvaluatePython ( PythonUDF, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, org.apache.spark.sql.catalyst.expressions.AttributeReference )This constructor is from 'EvaluatePython' class.
fromJava ( java.lang.Object, org.apache.spark.sql.types.DataType )This method is from 'EvaluatePython' class.
hashCode ( )This method is from 'EvaluatePython' class.
output ( )This method is from 'EvaluatePython' class.
productArity ( )This method is from 'EvaluatePython' class.
productElement ( int )This method is from 'EvaluatePython' class.
...
[+] Except (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'Except' class.
children ( )This method is from 'Except' class.
copy ( SparkPlan, SparkPlan )This method is from 'Except' class.
curried ( )This method is from 'Except' class.
equals ( java.lang.Object )This method is from 'Except' class.
Except ( SparkPlan, SparkPlan )This constructor is from 'Except' class.
execute ( )This method is from 'Except' class.
hashCode ( )This method is from 'Except' class.
left ( )This method is from 'Except' class.
left ( )This method is from 'Except' class.
output ( )This method is from 'Except' class.
...
[+] ExecutedCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<ExecutedCommand,A> )This method is from 'ExecutedCommand' class.
canEqual ( java.lang.Object )This method is from 'ExecutedCommand' class.
children ( )This method is from 'ExecutedCommand' class.
children ( )This method is from 'ExecutedCommand' class.
cmd ( )This method is from 'ExecutedCommand' class.
compose ( scala.Function1<A,RunnableCommand> )This method is from 'ExecutedCommand' class.
copy ( RunnableCommand )This method is from 'ExecutedCommand' class.
equals ( java.lang.Object )This method is from 'ExecutedCommand' class.
execute ( )This method is from 'ExecutedCommand' class.
executeCollect ( )This method is from 'ExecutedCommand' class.
ExecutedCommand ( RunnableCommand )This constructor is from 'ExecutedCommand' class.
...
[+] Expand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'Expand' class.
child ( )This method is from 'Expand' class.
child ( )This method is from 'Expand' class.
children ( )This method is from 'Expand' class.
children ( )This method is from 'Expand' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This method is from 'Expand' class.
curried ( )This method is from 'Expand' class.
equals ( java.lang.Object )This method is from 'Expand' class.
execute ( )This method is from 'Expand' class.
Expand ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.GroupExpression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This constructor is from 'Expand' class.
hashCode ( )This method is from 'Expand' class.
...
[+] ExplainCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'ExplainCommand' class.
copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, boolean )This method is from 'ExplainCommand' class.
curried ( )This method is from 'ExplainCommand' class.
equals ( java.lang.Object )This method is from 'ExplainCommand' class.
ExplainCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, boolean )This constructor is from 'ExplainCommand' class.
extended ( )This method is from 'ExplainCommand' class.
hashCode ( )This method is from 'ExplainCommand' class.
logicalPlan ( )This method is from 'ExplainCommand' class.
output ( )This method is from 'ExplainCommand' class.
productArity ( )This method is from 'ExplainCommand' class.
productElement ( int )This method is from 'ExplainCommand' class.
...
[+] ExternalSort (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'ExternalSort' class.
child ( )This method is from 'ExternalSort' class.
child ( )This method is from 'ExternalSort' class.
children ( )This method is from 'ExternalSort' class.
children ( )This method is from 'ExternalSort' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>, boolean, SparkPlan )This method is from 'ExternalSort' class.
curried ( )This method is from 'ExternalSort' class.
equals ( java.lang.Object )This method is from 'ExternalSort' class.
execute ( )This method is from 'ExternalSort' class.
ExternalSort ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>, boolean, SparkPlan )This constructor is from 'ExternalSort' class.
global ( )This method is from 'ExternalSort' class.
...
[+] GeneratedAggregate (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
aggregateExpressions ( )This method is from 'GeneratedAggregate' class.
canEqual ( java.lang.Object )This method is from 'GeneratedAggregate' class.
child ( )This method is from 'GeneratedAggregate' class.
child ( )This method is from 'GeneratedAggregate' class.
children ( )This method is from 'GeneratedAggregate' class.
children ( )This method is from 'GeneratedAggregate' class.
copy ( boolean, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>, SparkPlan )This method is from 'GeneratedAggregate' class.
curried ( )This method is from 'GeneratedAggregate' class.
equals ( java.lang.Object )This method is from 'GeneratedAggregate' class.
execute ( )This method is from 'GeneratedAggregate' class.
GeneratedAggregate ( boolean, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>, SparkPlan )This constructor is from 'GeneratedAggregate' class.
...
[+] IntegerHashSetSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
IntegerHashSetSerializer ( )This constructor is from 'IntegerHashSetSerializer' class.
[+] Intersect (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'Intersect' class.
children ( )This method is from 'Intersect' class.
copy ( SparkPlan, SparkPlan )This method is from 'Intersect' class.
curried ( )This method is from 'Intersect' class.
equals ( java.lang.Object )This method is from 'Intersect' class.
execute ( )This method is from 'Intersect' class.
hashCode ( )This method is from 'Intersect' class.
Intersect ( SparkPlan, SparkPlan )This constructor is from 'Intersect' class.
left ( )This method is from 'Intersect' class.
left ( )This method is from 'Intersect' class.
output ( )This method is from 'Intersect' class.
...
[+] JavaBigDecimalSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
JavaBigDecimalSerializer ( )This constructor is from 'JavaBigDecimalSerializer' class.
[+] KryoResourcePool (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
KryoResourcePool ( int )This constructor is from 'KryoResourcePool' class.
[+] LocalTableScan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'LocalTableScan' class.
children ( )This method is from 'LocalTableScan' class.
children ( )This method is from 'LocalTableScan' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row> )This method is from 'LocalTableScan' class.
curried ( )This method is from 'LocalTableScan' class.
equals ( java.lang.Object )This method is from 'LocalTableScan' class.
execute ( )This method is from 'LocalTableScan' class.
executeCollect ( )This method is from 'LocalTableScan' class.
executeTake ( int )This method is from 'LocalTableScan' class.
hashCode ( )This method is from 'LocalTableScan' class.
LocalTableScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row> )This constructor is from 'LocalTableScan' class.
...
[+] LogicalLocalTable (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'LogicalLocalTable' class.
children ( )This method is from 'LogicalLocalTable' class.
children ( )This method is from 'LogicalLocalTable' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This method is from 'LogicalLocalTable' class.
equals ( java.lang.Object )This method is from 'LogicalLocalTable' class.
hashCode ( )This method is from 'LogicalLocalTable' class.
LogicalLocalTable ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This constructor is from 'LogicalLocalTable' class.
newInstance ( )This method is from 'LogicalLocalTable' class.
newInstance ( )This method is from 'LogicalLocalTable' class.
output ( )This method is from 'LogicalLocalTable' class.
productArity ( )This method is from 'LogicalLocalTable' class.
...
[+] LogicalRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'LogicalRDD' class.
children ( )This method is from 'LogicalRDD' class.
children ( )This method is from 'LogicalRDD' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This method is from 'LogicalRDD' class.
equals ( java.lang.Object )This method is from 'LogicalRDD' class.
hashCode ( )This method is from 'LogicalRDD' class.
LogicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row>, org.apache.spark.sql.SQLContext )This constructor is from 'LogicalRDD' class.
newInstance ( )This method is from 'LogicalRDD' class.
newInstance ( )This method is from 'LogicalRDD' class.
output ( )This method is from 'LogicalRDD' class.
productArity ( )This method is from 'LogicalRDD' class.
...
[+] LongHashSetSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
LongHashSetSerializer ( )This constructor is from 'LongHashSetSerializer' class.
[+] OpenHashSetSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
OpenHashSetSerializer ( )This constructor is from 'OpenHashSetSerializer' class.
[+] OutputFaker (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'OutputFaker' class.
child ( )This method is from 'OutputFaker' class.
children ( )This method is from 'OutputFaker' class.
children ( )This method is from 'OutputFaker' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This method is from 'OutputFaker' class.
curried ( )This method is from 'OutputFaker' class.
equals ( java.lang.Object )This method is from 'OutputFaker' class.
execute ( )This method is from 'OutputFaker' class.
hashCode ( )This method is from 'OutputFaker' class.
output ( )This method is from 'OutputFaker' class.
OutputFaker ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, SparkPlan )This constructor is from 'OutputFaker' class.
...
[+] PhysicalRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'PhysicalRDD' class.
children ( )This method is from 'PhysicalRDD' class.
children ( )This method is from 'PhysicalRDD' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> )This method is from 'PhysicalRDD' class.
curried ( )This method is from 'PhysicalRDD' class.
equals ( java.lang.Object )This method is from 'PhysicalRDD' class.
execute ( )This method is from 'PhysicalRDD' class.
hashCode ( )This method is from 'PhysicalRDD' class.
output ( )This method is from 'PhysicalRDD' class.
PhysicalRDD ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, org.apache.spark.rdd.RDD<org.apache.spark.sql.Row> )This constructor is from 'PhysicalRDD' class.
productArity ( )This method is from 'PhysicalRDD' class.
...
[+] PythonUDF (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
accumulator ( )This method is from 'PythonUDF' class.
broadcastVars ( )This method is from 'PythonUDF' class.
canEqual ( java.lang.Object )This method is from 'PythonUDF' class.
children ( )This method is from 'PythonUDF' class.
command ( )This method is from 'PythonUDF' class.
copy ( java.lang.String, byte[ ], java.util.Map<java.lang.String,java.lang.String>, java.util.List<java.lang.String>, java.lang.String, java.util.List<org.apache.spark.broadcast.Broadcast<org.apache.spark.api.python.PythonBroadcast>>, org.apache.spark.Accumulator<java.util.List<byte[ ]>>, org.apache.spark.sql.types.DataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'PythonUDF' class.
curried ( )This method is from 'PythonUDF' class.
dataType ( )This method is from 'PythonUDF' class.
envVars ( )This method is from 'PythonUDF' class.
equals ( java.lang.Object )This method is from 'PythonUDF' class.
eval ( org.apache.spark.sql.Row )This method is from 'PythonUDF' class.
...
[+] RunnableCommand (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
run ( org.apache.spark.sql.SQLContext )This abstract method is from 'RunnableCommand' interface.
[+] ScalaBigDecimalSerializer (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
ScalaBigDecimalSerializer ( )This constructor is from 'ScalaBigDecimalSerializer' class.
[+] SetCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'SetCommand' class.
copy ( scala.Option<scala.Tuple2<java.lang.String,scala.Option<java.lang.String>>>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> )This method is from 'SetCommand' class.
curried ( )This method is from 'SetCommand' class.
equals ( java.lang.Object )This method is from 'SetCommand' class.
hashCode ( )This method is from 'SetCommand' class.
kv ( )This method is from 'SetCommand' class.
output ( )This method is from 'SetCommand' class.
productArity ( )This method is from 'SetCommand' class.
productElement ( int )This method is from 'SetCommand' class.
productIterator ( )This method is from 'SetCommand' class.
productPrefix ( )This method is from 'SetCommand' class.
...
[+] ShowTablesCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<ShowTablesCommand,A> )This method is from 'ShowTablesCommand' class.
canEqual ( java.lang.Object )This method is from 'ShowTablesCommand' class.
compose ( scala.Function1<A,scala.Option<java.lang.String>> )This method is from 'ShowTablesCommand' class.
copy ( scala.Option<java.lang.String> )This method is from 'ShowTablesCommand' class.
databaseName ( )This method is from 'ShowTablesCommand' class.
equals ( java.lang.Object )This method is from 'ShowTablesCommand' class.
hashCode ( )This method is from 'ShowTablesCommand' class.
output ( )This method is from 'ShowTablesCommand' class.
productArity ( )This method is from 'ShowTablesCommand' class.
productElement ( int )This method is from 'ShowTablesCommand' class.
productIterator ( )This method is from 'ShowTablesCommand' class.
...
[+] SparkPlan (2)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.Logging. | A client program may be interrupted by NoSuchMethodError exception. |
2 | Removed super-interface scala.Serializable. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (>10)
apply ( SparkPlan )Return value of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
copy ( org.apache.spark.sql.catalyst.plans.physical.Partitioning, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
Exchange ( org.apache.spark.sql.catalyst.plans.physical.Partitioning, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
copy ( org.apache.spark.sql.catalyst.expressions.Expression, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
Filter ( org.apache.spark.sql.catalyst.expressions.Expression, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
copy ( org.apache.spark.sql.catalyst.expressions.Generator, boolean, boolean, SparkPlan )4th parameter 'child' of this method has type 'SparkPlan'.
Generate ( org.apache.spark.sql.catalyst.expressions.Generator, boolean, boolean, SparkPlan )4th parameter 'child' of this method has type 'SparkPlan'.
...
[+] UncacheTableCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<UncacheTableCommand,A> )This method is from 'UncacheTableCommand' class.
canEqual ( java.lang.Object )This method is from 'UncacheTableCommand' class.
compose ( scala.Function1<A,java.lang.String> )This method is from 'UncacheTableCommand' class.
copy ( java.lang.String )This method is from 'UncacheTableCommand' class.
equals ( java.lang.Object )This method is from 'UncacheTableCommand' class.
hashCode ( )This method is from 'UncacheTableCommand' class.
output ( )This method is from 'UncacheTableCommand' class.
productArity ( )This method is from 'UncacheTableCommand' class.
productElement ( int )This method is from 'UncacheTableCommand' class.
productIterator ( )This method is from 'UncacheTableCommand' class.
productPrefix ( )This method is from 'UncacheTableCommand' class.
...
package org.apache.spark.sql.execution.joins
[+] BroadcastHashJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
BroadcastHashJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'BroadcastHashJoin' class.
buildKeys ( )This method is from 'BroadcastHashJoin' class.
buildPlan ( )This method is from 'BroadcastHashJoin' class.
buildSide ( )This method is from 'BroadcastHashJoin' class.
buildSideKeyGenerator ( )This method is from 'BroadcastHashJoin' class.
canEqual ( java.lang.Object )This method is from 'BroadcastHashJoin' class.
children ( )This method is from 'BroadcastHashJoin' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'BroadcastHashJoin' class.
curried ( )This method is from 'BroadcastHashJoin' class.
equals ( java.lang.Object )This method is from 'BroadcastHashJoin' class.
execute ( )This method is from 'BroadcastHashJoin' class.
...
[+] BroadcastLeftSemiJoinHash (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
BroadcastLeftSemiJoinHash ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'BroadcastLeftSemiJoinHash' class.
buildKeys ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildPlan ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildSide ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildSide ( )This method is from 'BroadcastLeftSemiJoinHash' class.
buildSideKeyGenerator ( )This method is from 'BroadcastLeftSemiJoinHash' class.
canEqual ( java.lang.Object )This method is from 'BroadcastLeftSemiJoinHash' class.
children ( )This method is from 'BroadcastLeftSemiJoinHash' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'BroadcastLeftSemiJoinHash' class.
curried ( )This method is from 'BroadcastLeftSemiJoinHash' class.
equals ( java.lang.Object )This method is from 'BroadcastLeftSemiJoinHash' class.
...
[+] BroadcastNestedLoopJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
BroadcastNestedLoopJoin ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, package.BuildSide, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This constructor is from 'BroadcastNestedLoopJoin' class.
buildSide ( )This method is from 'BroadcastNestedLoopJoin' class.
canEqual ( java.lang.Object )This method is from 'BroadcastNestedLoopJoin' class.
children ( )This method is from 'BroadcastNestedLoopJoin' class.
condition ( )This method is from 'BroadcastNestedLoopJoin' class.
copy ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, package.BuildSide, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'BroadcastNestedLoopJoin' class.
curried ( )This method is from 'BroadcastNestedLoopJoin' class.
equals ( java.lang.Object )This method is from 'BroadcastNestedLoopJoin' class.
execute ( )This method is from 'BroadcastNestedLoopJoin' class.
hashCode ( )This method is from 'BroadcastNestedLoopJoin' class.
joinType ( )This method is from 'BroadcastNestedLoopJoin' class.
...
[+] CartesianProduct (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'CartesianProduct' class.
CartesianProduct ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'CartesianProduct' class.
children ( )This method is from 'CartesianProduct' class.
copy ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'CartesianProduct' class.
curried ( )This method is from 'CartesianProduct' class.
equals ( java.lang.Object )This method is from 'CartesianProduct' class.
execute ( )This method is from 'CartesianProduct' class.
hashCode ( )This method is from 'CartesianProduct' class.
left ( )This method is from 'CartesianProduct' class.
left ( )This method is from 'CartesianProduct' class.
output ( )This method is from 'CartesianProduct' class.
...
[+] GeneralHashedRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
GeneralHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.util.collection.CompactBuffer<org.apache.spark.sql.Row>> )This constructor is from 'GeneralHashedRelation' class.
[+] HashedRelation (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
get ( org.apache.spark.sql.Row )This abstract method is from 'HashedRelation' interface.
[+] HashJoin (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buildKeys ( )This abstract method is from 'HashJoin' interface.
buildPlan ( )This abstract method is from 'HashJoin' interface.
buildSide ( )This abstract method is from 'HashJoin' interface.
buildSideKeyGenerator ( )This abstract method is from 'HashJoin' interface.
hashJoin ( scala.collection.Iterator<org.apache.spark.sql.Row>, HashedRelation )This abstract method is from 'HashJoin' interface.
left ( )This abstract method is from 'HashJoin' interface.
leftKeys ( )This abstract method is from 'HashJoin' interface.
output ( )This abstract method is from 'HashJoin' interface.
right ( )This abstract method is from 'HashJoin' interface.
rightKeys ( )This abstract method is from 'HashJoin' interface.
streamedKeys ( )This abstract method is from 'HashJoin' interface.
...
[+] HashOuterJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'HashOuterJoin' class.
children ( )This method is from 'HashOuterJoin' class.
condition ( )This method is from 'HashOuterJoin' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'HashOuterJoin' class.
curried ( )This method is from 'HashOuterJoin' class.
equals ( java.lang.Object )This method is from 'HashOuterJoin' class.
execute ( )This method is from 'HashOuterJoin' class.
hashCode ( )This method is from 'HashOuterJoin' class.
HashOuterJoin ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.catalyst.plans.JoinType, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This constructor is from 'HashOuterJoin' class.
joinType ( )This method is from 'HashOuterJoin' class.
left ( )This method is from 'HashOuterJoin' class.
...
[+] LeftSemiJoinBNL (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
broadcast ( )This method is from 'LeftSemiJoinBNL' class.
canEqual ( java.lang.Object )This method is from 'LeftSemiJoinBNL' class.
children ( )This method is from 'LeftSemiJoinBNL' class.
condition ( )This method is from 'LeftSemiJoinBNL' class.
copy ( org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan, scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'LeftSemiJoinBNL' class.
curried ( )This method is from 'LeftSemiJoinBNL' class.
equals ( java.lang.Object )This method is from 'LeftSemiJoinBNL' class.
execute ( )This method is from 'LeftSemiJoinBNL' class.
hashCode ( )This method is from 'LeftSemiJoinBNL' class.
left ( )This method is from 'LeftSemiJoinBNL' class.
left ( )This method is from 'LeftSemiJoinBNL' class.
...
[+] LeftSemiJoinHash (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buildKeys ( )This method is from 'LeftSemiJoinHash' class.
buildPlan ( )This method is from 'LeftSemiJoinHash' class.
buildSide ( )This method is from 'LeftSemiJoinHash' class.
buildSide ( )This method is from 'LeftSemiJoinHash' class.
buildSideKeyGenerator ( )This method is from 'LeftSemiJoinHash' class.
canEqual ( java.lang.Object )This method is from 'LeftSemiJoinHash' class.
children ( )This method is from 'LeftSemiJoinHash' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'LeftSemiJoinHash' class.
curried ( )This method is from 'LeftSemiJoinHash' class.
equals ( java.lang.Object )This method is from 'LeftSemiJoinHash' class.
execute ( )This method is from 'LeftSemiJoinHash' class.
...
[+] ShuffledHashJoin (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buildKeys ( )This method is from 'ShuffledHashJoin' class.
buildPlan ( )This method is from 'ShuffledHashJoin' class.
buildSide ( )This method is from 'ShuffledHashJoin' class.
buildSideKeyGenerator ( )This method is from 'ShuffledHashJoin' class.
canEqual ( java.lang.Object )This method is from 'ShuffledHashJoin' class.
children ( )This method is from 'ShuffledHashJoin' class.
copy ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression>, package.BuildSide, org.apache.spark.sql.execution.SparkPlan, org.apache.spark.sql.execution.SparkPlan )This method is from 'ShuffledHashJoin' class.
curried ( )This method is from 'ShuffledHashJoin' class.
equals ( java.lang.Object )This method is from 'ShuffledHashJoin' class.
execute ( )This method is from 'ShuffledHashJoin' class.
hashCode ( )This method is from 'ShuffledHashJoin' class.
...
[+] UniqueKeyHashedRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
UniqueKeyHashedRelation ( java.util.HashMap<org.apache.spark.sql.Row,org.apache.spark.sql.Row> )This constructor is from 'UniqueKeyHashedRelation' class.
package org.apache.spark.sql.jdbc
[+] DriverQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (4)
DriverQuirks ( )This constructor is from 'DriverQuirks' abstract class.
get ( java.lang.String )This method is from 'DriverQuirks' abstract class.
getCatalystType ( int, java.lang.String, int, org.apache.spark.sql.types.MetadataBuilder )This abstract method is from 'DriverQuirks' abstract class.
getJDBCType ( org.apache.spark.sql.types.DataType )This abstract method is from 'DriverQuirks' abstract class.
[+] JDBCPartition (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'JDBCPartition' class.
copy ( java.lang.String, int )This method is from 'JDBCPartition' class.
curried ( )This method is from 'JDBCPartition' class.
equals ( java.lang.Object )This method is from 'JDBCPartition' class.
hashCode ( )This method is from 'JDBCPartition' class.
idx ( )This method is from 'JDBCPartition' class.
index ( )This method is from 'JDBCPartition' class.
JDBCPartition ( java.lang.String, int )This constructor is from 'JDBCPartition' class.
productArity ( )This method is from 'JDBCPartition' class.
productElement ( int )This method is from 'JDBCPartition' class.
productIterator ( )This method is from 'JDBCPartition' class.
...
[+] JDBCPartitioningInfo (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'JDBCPartitioningInfo' class.
column ( )This method is from 'JDBCPartitioningInfo' class.
copy ( java.lang.String, long, long, int )This method is from 'JDBCPartitioningInfo' class.
curried ( )This method is from 'JDBCPartitioningInfo' class.
equals ( java.lang.Object )This method is from 'JDBCPartitioningInfo' class.
hashCode ( )This method is from 'JDBCPartitioningInfo' class.
JDBCPartitioningInfo ( java.lang.String, long, long, int )This constructor is from 'JDBCPartitioningInfo' class.
lowerBound ( )This method is from 'JDBCPartitioningInfo' class.
numPartitions ( )This method is from 'JDBCPartitioningInfo' class.
productArity ( )This method is from 'JDBCPartitioningInfo' class.
productElement ( int )This method is from 'JDBCPartitioningInfo' class.
...
[+] JDBCRDD (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
BinaryConversion ( )This method is from 'JDBCRDD' class.
BinaryLongConversion ( )This method is from 'JDBCRDD' class.
BooleanConversion ( )This method is from 'JDBCRDD' class.
compute ( org.apache.spark.Partition, org.apache.spark.TaskContext )This method is from 'JDBCRDD' class.
DateConversion ( )This method is from 'JDBCRDD' class.
DecimalConversion ( )This method is from 'JDBCRDD' class.
DoubleConversion ( )This method is from 'JDBCRDD' class.
FloatConversion ( )This method is from 'JDBCRDD' class.
getConnector ( java.lang.String, java.lang.String )This method is from 'JDBCRDD' class.
getConversions ( org.apache.spark.sql.types.StructType )This method is from 'JDBCRDD' class.
getPartitions ( )This method is from 'JDBCRDD' class.
...
[+] JDBCRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buildScan ( java.lang.String[ ], org.apache.spark.sql.sources.Filter[ ] )This method is from 'JDBCRelation' class.
canEqual ( java.lang.Object )This method is from 'JDBCRelation' class.
columnPartition ( JDBCPartitioningInfo )This method is from 'JDBCRelation' class.
copy ( java.lang.String, java.lang.String, org.apache.spark.Partition[ ], org.apache.spark.sql.SQLContext )This method is from 'JDBCRelation' class.
equals ( java.lang.Object )This method is from 'JDBCRelation' class.
hashCode ( )This method is from 'JDBCRelation' class.
JDBCRelation ( java.lang.String, java.lang.String, org.apache.spark.Partition[ ], org.apache.spark.sql.SQLContext )This constructor is from 'JDBCRelation' class.
parts ( )This method is from 'JDBCRelation' class.
productArity ( )This method is from 'JDBCRelation' class.
productElement ( int )This method is from 'JDBCRelation' class.
productIterator ( )This method is from 'JDBCRelation' class.
...
[+] MySQLQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
MySQLQuirks ( )This constructor is from 'MySQLQuirks' class.
[+] NoQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
NoQuirks ( )This constructor is from 'NoQuirks' class.
[+] PostgresQuirks (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
PostgresQuirks ( )This constructor is from 'PostgresQuirks' class.
package org.apache.spark.sql.json
[+] JSONRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buildScan ( )This method is from 'JSONRelation' class.
canEqual ( java.lang.Object )This method is from 'JSONRelation' class.
copy ( java.lang.String, double, scala.Option<org.apache.spark.sql.types.StructType>, org.apache.spark.sql.SQLContext )This method is from 'JSONRelation' class.
equals ( java.lang.Object )This method is from 'JSONRelation' class.
hashCode ( )This method is from 'JSONRelation' class.
insert ( org.apache.spark.sql.DataFrame, boolean )This method is from 'JSONRelation' class.
JSONRelation ( java.lang.String, double, scala.Option<org.apache.spark.sql.types.StructType>, org.apache.spark.sql.SQLContext )This constructor is from 'JSONRelation' class.
JSONRelation..baseRDD ( )This method is from 'JSONRelation' class.
path ( )This method is from 'JSONRelation' class.
productArity ( )This method is from 'JSONRelation' class.
productElement ( int )This method is from 'JSONRelation' class.
...
package org.apache.spark.sql.parquet
[+] CatalystArrayContainsNullConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystArrayContainsNullConverter ( org.apache.spark.sql.types.DataType, int, CatalystConverter )This constructor is from 'CatalystArrayContainsNullConverter' class.
[+] CatalystArrayConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystArrayConverter ( org.apache.spark.sql.types.DataType, int, CatalystConverter )This constructor is from 'CatalystArrayConverter' class.
[+] CatalystConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
ARRAY_CONTAINS_NULL_BAG_SCHEMA_NAME ( )This method is from 'CatalystConverter' abstract class.
ARRAY_ELEMENTS_SCHEMA_NAME ( )This method is from 'CatalystConverter' abstract class.
CatalystConverter ( )This constructor is from 'CatalystConverter' abstract class.
clearBuffer ( )This abstract method is from 'CatalystConverter' abstract class.
getCurrentRecord ( )This method is from 'CatalystConverter' abstract class.
index ( )This abstract method is from 'CatalystConverter' abstract class.
isRootConverter ( )This method is from 'CatalystConverter' abstract class.
MAP_KEY_SCHEMA_NAME ( )This method is from 'CatalystConverter' abstract class.
MAP_SCHEMA_NAME ( )This method is from 'CatalystConverter' abstract class.
MAP_VALUE_SCHEMA_NAME ( )This method is from 'CatalystConverter' abstract class.
parent ( )This abstract method is from 'CatalystConverter' abstract class.
...
[+] CatalystMapConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystMapConverter ( org.apache.spark.sql.types.StructField[ ], int, CatalystConverter )This constructor is from 'CatalystMapConverter' class.
[+] CatalystNativeArrayConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystNativeArrayConverter ( org.apache.spark.sql.types.NativeType, int, CatalystConverter, int )This constructor is from 'CatalystNativeArrayConverter' class.
[+] CatalystPrimitiveRowConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystPrimitiveRowConverter ( org.apache.spark.sql.catalyst.expressions.Attribute[ ] )This constructor is from 'CatalystPrimitiveRowConverter' class.
[+] CatalystStructConverter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CatalystStructConverter ( org.apache.spark.sql.types.StructField[ ], int, CatalystConverter )This constructor is from 'CatalystStructConverter' class.
[+] InsertIntoParquetTable (1)
| Change | Effect |
---|
1 | Removed super-interface org.apache.spark.mapreduce.SparkHadoopMapReduceUtil. | A client program may be interrupted by NoSuchMethodError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'InsertIntoParquetTable' class.
child ( )This method is from 'InsertIntoParquetTable' class.
child ( )This method is from 'InsertIntoParquetTable' class.
children ( )This method is from 'InsertIntoParquetTable' class.
children ( )This method is from 'InsertIntoParquetTable' class.
equals ( java.lang.Object )This method is from 'InsertIntoParquetTable' class.
execute ( )This method is from 'InsertIntoParquetTable' class.
hashCode ( )This method is from 'InsertIntoParquetTable' class.
newJobContext ( org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.JobID )This method is from 'InsertIntoParquetTable' class.
newTaskAttemptContext ( org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.TaskAttemptID )This method is from 'InsertIntoParquetTable' class.
newTaskAttemptID ( java.lang.String, int, boolean, int, int )This method is from 'InsertIntoParquetTable' class.
...
[+] ParquetRelation2 (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> )This method is from 'ParquetRelation2' class.
canEqual ( java.lang.Object )This method is from 'ParquetRelation2' class.
copy ( scala.collection.Seq<java.lang.String>, scala.collection.immutable.Map<java.lang.String,java.lang.String>, scala.Option<org.apache.spark.sql.types.StructType>, scala.Option<PartitionSpec>, org.apache.spark.sql.SQLContext )This method is from 'ParquetRelation2' class.
DEFAULT_PARTITION_NAME ( )This method is from 'ParquetRelation2' class.
equals ( java.lang.Object )This method is from 'ParquetRelation2' class.
hashCode ( )This method is from 'ParquetRelation2' class.
insert ( org.apache.spark.sql.DataFrame, boolean )This method is from 'ParquetRelation2' class.
isPartitioned ( )This method is from 'ParquetRelation2' class.
isTraceEnabled ( )This method is from 'ParquetRelation2' class.
log ( )This method is from 'ParquetRelation2' class.
logDebug ( scala.Function0<java.lang.String> )This method is from 'ParquetRelation2' class.
...
[+] ParquetTest (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
configuration ( )This abstract method is from 'ParquetTest' interface.
makeParquetFile ( org.apache.spark.sql.DataFrame, java.io.File, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
makeParquetFile ( scala.collection.Seq<T>, java.io.File, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
makePartitionDir ( java.io.File, java.lang.String, scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.Object>> )This abstract method is from 'ParquetTest' interface.
sqlContext ( )This abstract method is from 'ParquetTest' interface.
withParquetDataFrame ( scala.collection.Seq<T>, scala.Function1<org.apache.spark.sql.DataFrame,scala.runtime.BoxedUnit>, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
withParquetFile ( scala.collection.Seq<T>, scala.Function1<java.lang.String,scala.runtime.BoxedUnit>, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
withParquetTable ( scala.collection.Seq<T>, java.lang.String, scala.Function0<scala.runtime.BoxedUnit>, scala.reflect.ClassTag<T>, scala.reflect.api.TypeTags.TypeTag<T> )This abstract method is from 'ParquetTest' interface.
withSQLConf ( scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>>, scala.Function0<scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
withTempDir ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
withTempPath ( scala.Function1<java.io.File,scala.runtime.BoxedUnit> )This abstract method is from 'ParquetTest' interface.
...
[+] ParquetTypeInfo (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'ParquetTypeInfo' class.
copy ( parquet.schema.PrimitiveType.PrimitiveTypeName, scala.Option<parquet.schema.OriginalType>, scala.Option<parquet.schema.DecimalMetadata>, scala.Option<java.lang.Object> )This method is from 'ParquetTypeInfo' class.
curried ( )This method is from 'ParquetTypeInfo' class.
decimalMetadata ( )This method is from 'ParquetTypeInfo' class.
equals ( java.lang.Object )This method is from 'ParquetTypeInfo' class.
hashCode ( )This method is from 'ParquetTypeInfo' class.
length ( )This method is from 'ParquetTypeInfo' class.
originalType ( )This method is from 'ParquetTypeInfo' class.
ParquetTypeInfo ( parquet.schema.PrimitiveType.PrimitiveTypeName, scala.Option<parquet.schema.OriginalType>, scala.Option<parquet.schema.DecimalMetadata>, scala.Option<java.lang.Object> )This constructor is from 'ParquetTypeInfo' class.
primitiveType ( )This method is from 'ParquetTypeInfo' class.
productArity ( )This method is from 'ParquetTypeInfo' class.
...
[+] Partition (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'Partition' class.
copy ( org.apache.spark.sql.Row, java.lang.String )This method is from 'Partition' class.
curried ( )This method is from 'Partition' class.
equals ( java.lang.Object )This method is from 'Partition' class.
hashCode ( )This method is from 'Partition' class.
Partition ( org.apache.spark.sql.Row, java.lang.String )This constructor is from 'Partition' class.
path ( )This method is from 'Partition' class.
productArity ( )This method is from 'Partition' class.
productElement ( int )This method is from 'Partition' class.
productIterator ( )This method is from 'Partition' class.
productPrefix ( )This method is from 'Partition' class.
...
[+] PartitionSpec (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'PartitionSpec' class.
copy ( org.apache.spark.sql.types.StructType, scala.collection.Seq<Partition> )This method is from 'PartitionSpec' class.
curried ( )This method is from 'PartitionSpec' class.
equals ( java.lang.Object )This method is from 'PartitionSpec' class.
hashCode ( )This method is from 'PartitionSpec' class.
partitionColumns ( )This method is from 'PartitionSpec' class.
partitions ( )This method is from 'PartitionSpec' class.
PartitionSpec ( org.apache.spark.sql.types.StructType, scala.collection.Seq<Partition> )This constructor is from 'PartitionSpec' class.
productArity ( )This method is from 'PartitionSpec' class.
productElement ( int )This method is from 'PartitionSpec' class.
productIterator ( )This method is from 'PartitionSpec' class.
...
[+] TestGroupWriteSupport (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
TestGroupWriteSupport ( parquet.schema.MessageType )This constructor is from 'TestGroupWriteSupport' class.
package org.apache.spark.sql.parquet.timestamp
[+] NanoTime (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (5)
getJulianDay ( )This method is from 'NanoTime' class.
getTimeOfDayNanos ( )This method is from 'NanoTime' class.
NanoTime ( )This constructor is from 'NanoTime' class.
set ( int, long )This method is from 'NanoTime' class.
toBinary ( )This method is from 'NanoTime' class.
package org.apache.spark.sql.sources
[+] And (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
And ( Filter, Filter )This constructor is from 'And' class.
canEqual ( java.lang.Object )This method is from 'And' class.
copy ( Filter, Filter )This method is from 'And' class.
curried ( )This method is from 'And' class.
equals ( java.lang.Object )This method is from 'And' class.
hashCode ( )This method is from 'And' class.
left ( )This method is from 'And' class.
productArity ( )This method is from 'And' class.
productElement ( int )This method is from 'And' class.
productIterator ( )This method is from 'And' class.
productPrefix ( )This method is from 'And' class.
...
[+] BaseRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (4)
BaseRelation ( )This constructor is from 'BaseRelation' abstract class.
schema ( )This abstract method is from 'BaseRelation' abstract class.
sizeInBytes ( )This method is from 'BaseRelation' abstract class.
sqlContext ( )This abstract method is from 'BaseRelation' abstract class.
[+] CaseInsensitiveMap (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
CaseInsensitiveMap ( scala.collection.immutable.Map<java.lang.String,java.lang.String> )This constructor is from 'CaseInsensitiveMap' class.
[+] CatalystScan (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
buildScan ( scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute>, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> )This abstract method is from 'CatalystScan' interface.
[+] CreatableRelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.DataFrame )This abstract method is from 'CreatableRelationProvider' interface.
[+] CreateTableUsing (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
allowExisting ( )This method is from 'CreateTableUsing' class.
canEqual ( java.lang.Object )This method is from 'CreateTableUsing' class.
copy ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, boolean, scala.collection.immutable.Map<java.lang.String,java.lang.String>, boolean, boolean )This method is from 'CreateTableUsing' class.
CreateTableUsing ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, boolean, scala.collection.immutable.Map<java.lang.String,java.lang.String>, boolean, boolean )This constructor is from 'CreateTableUsing' class.
curried ( )This method is from 'CreateTableUsing' class.
equals ( java.lang.Object )This method is from 'CreateTableUsing' class.
hashCode ( )This method is from 'CreateTableUsing' class.
managedIfNoPath ( )This method is from 'CreateTableUsing' class.
options ( )This method is from 'CreateTableUsing' class.
productArity ( )This method is from 'CreateTableUsing' class.
productElement ( int )This method is from 'CreateTableUsing' class.
...
[+] CreateTableUsingAsSelect (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'CreateTableUsingAsSelect' class.
child ( )This method is from 'CreateTableUsingAsSelect' class.
child ( )This method is from 'CreateTableUsingAsSelect' class.
copy ( java.lang.String, java.lang.String, boolean, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'CreateTableUsingAsSelect' class.
CreateTableUsingAsSelect ( java.lang.String, java.lang.String, boolean, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This constructor is from 'CreateTableUsingAsSelect' class.
curried ( )This method is from 'CreateTableUsingAsSelect' class.
equals ( java.lang.Object )This method is from 'CreateTableUsingAsSelect' class.
hashCode ( )This method is from 'CreateTableUsingAsSelect' class.
mode ( )This method is from 'CreateTableUsingAsSelect' class.
options ( )This method is from 'CreateTableUsingAsSelect' class.
output ( )This method is from 'CreateTableUsingAsSelect' class.
...
[+] CreateTempTableUsing (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'CreateTempTableUsing' class.
copy ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'CreateTempTableUsing' class.
CreateTempTableUsing ( java.lang.String, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This constructor is from 'CreateTempTableUsing' class.
curried ( )This method is from 'CreateTempTableUsing' class.
equals ( java.lang.Object )This method is from 'CreateTempTableUsing' class.
hashCode ( )This method is from 'CreateTempTableUsing' class.
options ( )This method is from 'CreateTempTableUsing' class.
productArity ( )This method is from 'CreateTempTableUsing' class.
productElement ( int )This method is from 'CreateTempTableUsing' class.
productIterator ( )This method is from 'CreateTempTableUsing' class.
productPrefix ( )This method is from 'CreateTempTableUsing' class.
...
[+] CreateTempTableUsingAsSelect (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'CreateTempTableUsingAsSelect' class.
copy ( java.lang.String, java.lang.String, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This method is from 'CreateTempTableUsingAsSelect' class.
CreateTempTableUsingAsSelect ( java.lang.String, java.lang.String, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan )This constructor is from 'CreateTempTableUsingAsSelect' class.
curried ( )This method is from 'CreateTempTableUsingAsSelect' class.
equals ( java.lang.Object )This method is from 'CreateTempTableUsingAsSelect' class.
hashCode ( )This method is from 'CreateTempTableUsingAsSelect' class.
mode ( )This method is from 'CreateTempTableUsingAsSelect' class.
options ( )This method is from 'CreateTempTableUsingAsSelect' class.
productArity ( )This method is from 'CreateTempTableUsingAsSelect' class.
productElement ( int )This method is from 'CreateTempTableUsingAsSelect' class.
productIterator ( )This method is from 'CreateTempTableUsingAsSelect' class.
...
[+] DDLParser (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (2)
apply ( java.lang.String, boolean )This method is from 'DDLParser' class.
DDLParser ( scala.Function1<java.lang.String,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> )This constructor is from 'DDLParser' class.
[+] DescribeCommand (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'DescribeCommand' class.
copy ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This method is from 'DescribeCommand' class.
curried ( )This method is from 'DescribeCommand' class.
DescribeCommand ( org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This constructor is from 'DescribeCommand' class.
equals ( java.lang.Object )This method is from 'DescribeCommand' class.
hashCode ( )This method is from 'DescribeCommand' class.
isExtended ( )This method is from 'DescribeCommand' class.
output ( )This method is from 'DescribeCommand' class.
productArity ( )This method is from 'DescribeCommand' class.
productElement ( int )This method is from 'DescribeCommand' class.
productIterator ( )This method is from 'DescribeCommand' class.
...
[+] EqualTo (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
attribute ( )This method is from 'EqualTo' class.
canEqual ( java.lang.Object )This method is from 'EqualTo' class.
copy ( java.lang.String, java.lang.Object )This method is from 'EqualTo' class.
curried ( )This method is from 'EqualTo' class.
equals ( java.lang.Object )This method is from 'EqualTo' class.
EqualTo ( java.lang.String, java.lang.Object )This constructor is from 'EqualTo' class.
hashCode ( )This method is from 'EqualTo' class.
productArity ( )This method is from 'EqualTo' class.
productElement ( int )This method is from 'EqualTo' class.
productIterator ( )This method is from 'EqualTo' class.
productPrefix ( )This method is from 'EqualTo' class.
...
[+] Filter (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
Filter ( )This constructor is from 'Filter' abstract class.
[+] GreaterThan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
attribute ( )This method is from 'GreaterThan' class.
canEqual ( java.lang.Object )This method is from 'GreaterThan' class.
copy ( java.lang.String, java.lang.Object )This method is from 'GreaterThan' class.
curried ( )This method is from 'GreaterThan' class.
equals ( java.lang.Object )This method is from 'GreaterThan' class.
GreaterThan ( java.lang.String, java.lang.Object )This constructor is from 'GreaterThan' class.
hashCode ( )This method is from 'GreaterThan' class.
productArity ( )This method is from 'GreaterThan' class.
productElement ( int )This method is from 'GreaterThan' class.
productIterator ( )This method is from 'GreaterThan' class.
productPrefix ( )This method is from 'GreaterThan' class.
...
[+] GreaterThanOrEqual (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
attribute ( )This method is from 'GreaterThanOrEqual' class.
canEqual ( java.lang.Object )This method is from 'GreaterThanOrEqual' class.
copy ( java.lang.String, java.lang.Object )This method is from 'GreaterThanOrEqual' class.
curried ( )This method is from 'GreaterThanOrEqual' class.
equals ( java.lang.Object )This method is from 'GreaterThanOrEqual' class.
GreaterThanOrEqual ( java.lang.String, java.lang.Object )This constructor is from 'GreaterThanOrEqual' class.
hashCode ( )This method is from 'GreaterThanOrEqual' class.
productArity ( )This method is from 'GreaterThanOrEqual' class.
productElement ( int )This method is from 'GreaterThanOrEqual' class.
productIterator ( )This method is from 'GreaterThanOrEqual' class.
productPrefix ( )This method is from 'GreaterThanOrEqual' class.
...
[+] In (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
attribute ( )This method is from 'In' class.
canEqual ( java.lang.Object )This method is from 'In' class.
copy ( java.lang.String, java.lang.Object[ ] )This method is from 'In' class.
curried ( )This method is from 'In' class.
equals ( java.lang.Object )This method is from 'In' class.
hashCode ( )This method is from 'In' class.
In ( java.lang.String, java.lang.Object[ ] )This constructor is from 'In' class.
productArity ( )This method is from 'In' class.
productElement ( int )This method is from 'In' class.
productIterator ( )This method is from 'In' class.
productPrefix ( )This method is from 'In' class.
...
[+] InsertableRelation (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
insert ( org.apache.spark.sql.DataFrame, boolean )This abstract method is from 'InsertableRelation' interface.
[+] InsertIntoDataSource (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'InsertIntoDataSource' class.
copy ( LogicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This method is from 'InsertIntoDataSource' class.
curried ( )This method is from 'InsertIntoDataSource' class.
equals ( java.lang.Object )This method is from 'InsertIntoDataSource' class.
hashCode ( )This method is from 'InsertIntoDataSource' class.
InsertIntoDataSource ( LogicalRelation, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, boolean )This constructor is from 'InsertIntoDataSource' class.
logicalRelation ( )This method is from 'InsertIntoDataSource' class.
overwrite ( )This method is from 'InsertIntoDataSource' class.
productArity ( )This method is from 'InsertIntoDataSource' class.
productElement ( int )This method is from 'InsertIntoDataSource' class.
productIterator ( )This method is from 'InsertIntoDataSource' class.
...
[+] IsNotNull (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<IsNotNull,A> )This method is from 'IsNotNull' class.
attribute ( )This method is from 'IsNotNull' class.
canEqual ( java.lang.Object )This method is from 'IsNotNull' class.
compose ( scala.Function1<A,java.lang.String> )This method is from 'IsNotNull' class.
copy ( java.lang.String )This method is from 'IsNotNull' class.
equals ( java.lang.Object )This method is from 'IsNotNull' class.
hashCode ( )This method is from 'IsNotNull' class.
IsNotNull ( java.lang.String )This constructor is from 'IsNotNull' class.
productArity ( )This method is from 'IsNotNull' class.
productElement ( int )This method is from 'IsNotNull' class.
productIterator ( )This method is from 'IsNotNull' class.
...
[+] IsNull (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<IsNull,A> )This method is from 'IsNull' class.
attribute ( )This method is from 'IsNull' class.
canEqual ( java.lang.Object )This method is from 'IsNull' class.
compose ( scala.Function1<A,java.lang.String> )This method is from 'IsNull' class.
copy ( java.lang.String )This method is from 'IsNull' class.
equals ( java.lang.Object )This method is from 'IsNull' class.
hashCode ( )This method is from 'IsNull' class.
IsNull ( java.lang.String )This constructor is from 'IsNull' class.
productArity ( )This method is from 'IsNull' class.
productElement ( int )This method is from 'IsNull' class.
productIterator ( )This method is from 'IsNull' class.
...
[+] LessThan (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
attribute ( )This method is from 'LessThan' class.
canEqual ( java.lang.Object )This method is from 'LessThan' class.
copy ( java.lang.String, java.lang.Object )This method is from 'LessThan' class.
curried ( )This method is from 'LessThan' class.
equals ( java.lang.Object )This method is from 'LessThan' class.
hashCode ( )This method is from 'LessThan' class.
LessThan ( java.lang.String, java.lang.Object )This constructor is from 'LessThan' class.
productArity ( )This method is from 'LessThan' class.
productElement ( int )This method is from 'LessThan' class.
productIterator ( )This method is from 'LessThan' class.
productPrefix ( )This method is from 'LessThan' class.
...
[+] LessThanOrEqual (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
attribute ( )This method is from 'LessThanOrEqual' class.
canEqual ( java.lang.Object )This method is from 'LessThanOrEqual' class.
copy ( java.lang.String, java.lang.Object )This method is from 'LessThanOrEqual' class.
curried ( )This method is from 'LessThanOrEqual' class.
equals ( java.lang.Object )This method is from 'LessThanOrEqual' class.
hashCode ( )This method is from 'LessThanOrEqual' class.
LessThanOrEqual ( java.lang.String, java.lang.Object )This constructor is from 'LessThanOrEqual' class.
productArity ( )This method is from 'LessThanOrEqual' class.
productElement ( int )This method is from 'LessThanOrEqual' class.
productIterator ( )This method is from 'LessThanOrEqual' class.
productPrefix ( )This method is from 'LessThanOrEqual' class.
...
[+] LogicalRelation (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<LogicalRelation,A> )This method is from 'LogicalRelation' class.
attributeMap ( )This method is from 'LogicalRelation' class.
canEqual ( java.lang.Object )This method is from 'LogicalRelation' class.
compose ( scala.Function1<A,BaseRelation> )This method is from 'LogicalRelation' class.
copy ( BaseRelation )This method is from 'LogicalRelation' class.
equals ( java.lang.Object )This method is from 'LogicalRelation' class.
hashCode ( )This method is from 'LogicalRelation' class.
LogicalRelation ( BaseRelation )This constructor is from 'LogicalRelation' class.
newInstance ( )This method is from 'LogicalRelation' class.
newInstance ( )This method is from 'LogicalRelation' class.
output ( )This method is from 'LogicalRelation' class.
...
[+] Not (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<Not,A> )This method is from 'Not' class.
canEqual ( java.lang.Object )This method is from 'Not' class.
child ( )This method is from 'Not' class.
compose ( scala.Function1<A,Filter> )This method is from 'Not' class.
copy ( Filter )This method is from 'Not' class.
equals ( java.lang.Object )This method is from 'Not' class.
hashCode ( )This method is from 'Not' class.
Not ( Filter )This constructor is from 'Not' class.
productArity ( )This method is from 'Not' class.
productElement ( int )This method is from 'Not' class.
productIterator ( )This method is from 'Not' class.
...
[+] Or (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'Or' class.
copy ( Filter, Filter )This method is from 'Or' class.
curried ( )This method is from 'Or' class.
equals ( java.lang.Object )This method is from 'Or' class.
hashCode ( )This method is from 'Or' class.
left ( )This method is from 'Or' class.
Or ( Filter, Filter )This constructor is from 'Or' class.
productArity ( )This method is from 'Or' class.
productElement ( int )This method is from 'Or' class.
productIterator ( )This method is from 'Or' class.
productPrefix ( )This method is from 'Or' class.
...
[+] PreWriteCheck (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
andThen ( scala.Function1<scala.runtime.BoxedUnit,A> )This method is from 'PreWriteCheck' class.
andThen.mcDD.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcDF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcDI.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcDJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFD.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFI.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcFJ.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcID.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
andThen.mcIF.sp ( scala.Function1<java.lang.Object,A> )This method is from 'PreWriteCheck' class.
...
[+] PrunedFilteredScan (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
buildScan ( java.lang.String[ ], Filter[ ] )This abstract method is from 'PrunedFilteredScan' interface.
[+] RefreshTable (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
canEqual ( java.lang.Object )This method is from 'RefreshTable' class.
copy ( java.lang.String, java.lang.String )This method is from 'RefreshTable' class.
curried ( )This method is from 'RefreshTable' class.
databaseName ( )This method is from 'RefreshTable' class.
equals ( java.lang.Object )This method is from 'RefreshTable' class.
hashCode ( )This method is from 'RefreshTable' class.
productArity ( )This method is from 'RefreshTable' class.
productElement ( int )This method is from 'RefreshTable' class.
productIterator ( )This method is from 'RefreshTable' class.
productPrefix ( )This method is from 'RefreshTable' class.
RefreshTable ( java.lang.String, java.lang.String )This constructor is from 'RefreshTable' class.
...
[+] RelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This abstract method is from 'RelationProvider' interface.
[+] ResolvedDataSource (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (>10)
apply ( org.apache.spark.sql.SQLContext, java.lang.String, org.apache.spark.sql.SaveMode, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.DataFrame )This method is from 'ResolvedDataSource' class.
apply ( org.apache.spark.sql.SQLContext, scala.Option<org.apache.spark.sql.types.StructType>, java.lang.String, scala.collection.immutable.Map<java.lang.String,java.lang.String> )This method is from 'ResolvedDataSource' class.
canEqual ( java.lang.Object )This method is from 'ResolvedDataSource' class.
copy ( java.lang.Class<?>, BaseRelation )This method is from 'ResolvedDataSource' class.
equals ( java.lang.Object )This method is from 'ResolvedDataSource' class.
hashCode ( )This method is from 'ResolvedDataSource' class.
lookupDataSource ( java.lang.String )This method is from 'ResolvedDataSource' class.
productArity ( )This method is from 'ResolvedDataSource' class.
productElement ( int )This method is from 'ResolvedDataSource' class.
productIterator ( )This method is from 'ResolvedDataSource' class.
productPrefix ( )This method is from 'ResolvedDataSource' class.
...
[+] SchemaRelationProvider (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
createRelation ( org.apache.spark.sql.SQLContext, scala.collection.immutable.Map<java.lang.String,java.lang.String>, org.apache.spark.sql.types.StructType )This abstract method is from 'SchemaRelationProvider' interface.
[+] TableScan (1)
| Change | Effect |
---|
1 | This interface has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (1)
buildScan ( )This abstract method is from 'TableScan' interface.
package org.apache.spark.sql.test
[+] ExamplePoint (1)
| Change | Effect |
---|
1 | This class has been removed. | A client program may be interrupted by NoClassDefFoundError exception. |
[+] affected methods (3)
ExamplePoint ( double, double )This constructor is from 'ExamplePoint' class.
x ( )This method is from 'ExamplePoint' class.
y ( )This method is from 'ExamplePoint' class.
to the top
Problems with Methods, High Severity (4)
spark-sql_2.10-1.3.0.jar, AddExchange
package org.apache.spark.sql.execution
[+] AddExchange.apply ( SparkPlan plan ) : SparkPlan (1)
[mangled: org/apache/spark/sql/execution/AddExchange.apply:(Lorg/apache/spark/sql/execution/SparkPlan;)Lorg/apache/spark/sql/execution/SparkPlan;]
| Change | Effect |
---|
1 | Method became static.
| A client program may be interrupted by NoSuchMethodError exception. |
[+] AddExchange.numPartitions ( ) : int (1)
[mangled: org/apache/spark/sql/execution/AddExchange.numPartitions:()I]
| Change | Effect |
---|
1 | Method became static.
| A client program may be interrupted by NoSuchMethodError exception. |
spark-sql_2.10-1.3.0.jar, RowWriteSupport
package org.apache.spark.sql.parquet
[+] RowWriteSupport.writer ( ) : parquet.io.api.RecordConsumer (1)
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writer:()Lparquet/io/api/RecordConsumer;]
| Change | Effect |
---|
1 | Access level has been changed from public to private. | A client program may be interrupted by IllegalAccessError exception. |
[+] RowWriteSupport.writer_.eq ( parquet.io.api.RecordConsumer p1 ) : void (1)
[mangled: org/apache/spark/sql/parquet/RowWriteSupport.writer_.eq:(Lparquet/io/api/RecordConsumer;)V]
| Change | Effect |
---|
1 | Access level has been changed from public to private. | A client program may be interrupted by IllegalAccessError exception. |
to the top
Problems with Data Types, Medium Severity (32)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql.columnar
[+] BinaryColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from BasicColumnAccessor<org.apache.spark.sql.types.BinaryType.,byte[]> to BasicColumnAccessor<org.apache.spark.sql.catalyst.types.BinaryType.,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BinaryColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'BinaryColumnAccessor' class.
[+] BinaryColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from ComplexColumnBuilder<org.apache.spark.sql.types.BinaryType.,byte[]> to ComplexColumnBuilder<org.apache.spark.sql.catalyst.types.BinaryType.,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BinaryColumnBuilder ( )This constructor is from 'BinaryColumnBuilder' class.
[+] BooleanColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.BooleanType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.BooleanType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BooleanColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'BooleanColumnAccessor' class.
[+] BooleanColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.BooleanType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.BooleanType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BooleanColumnBuilder ( )This constructor is from 'BooleanColumnBuilder' class.
[+] ByteColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.ByteType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.ByteType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ByteColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'ByteColumnAccessor' class.
[+] ByteColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.ByteType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.ByteType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ByteColumnBuilder ( )This constructor is from 'ByteColumnBuilder' class.
[+] DoubleColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.DoubleType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.DoubleType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
DoubleColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'DoubleColumnAccessor' class.
[+] DoubleColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.DoubleType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.DoubleType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
DoubleColumnBuilder ( )This constructor is from 'DoubleColumnBuilder' class.
[+] FloatColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.FloatType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.FloatType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
FloatColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'FloatColumnAccessor' class.
[+] FloatColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.FloatType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.FloatType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
FloatColumnBuilder ( )This constructor is from 'FloatColumnBuilder' class.
[+] GenericColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from BasicColumnAccessor<org.apache.spark.sql.types.DataType,byte[]> to BasicColumnAccessor<org.apache.spark.sql.catalyst.types.DataType,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
GenericColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'GenericColumnAccessor' class.
[+] GenericColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from ComplexColumnBuilder<org.apache.spark.sql.types.DataType,byte[]> to ComplexColumnBuilder<org.apache.spark.sql.catalyst.types.DataType,byte[]>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
GenericColumnBuilder ( )This constructor is from 'GenericColumnBuilder' class.
[+] IntColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.IntegerType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.IntegerType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
IntColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'IntColumnAccessor' class.
[+] IntColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.IntegerType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.IntegerType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
IntColumnBuilder ( )This constructor is from 'IntColumnBuilder' class.
[+] LongColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.LongType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.LongType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LongColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'LongColumnAccessor' class.
[+] LongColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.LongType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.LongType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LongColumnBuilder ( )This constructor is from 'LongColumnBuilder' class.
[+] ShortColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.ShortType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.ShortType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ShortColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'ShortColumnAccessor' class.
[+] ShortColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.ShortType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.ShortType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ShortColumnBuilder ( )This constructor is from 'ShortColumnBuilder' class.
[+] StringColumnAccessor (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnAccessor<org.apache.spark.sql.types.StringType.> to NativeColumnAccessor<org.apache.spark.sql.catalyst.types.StringType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
StringColumnAccessor ( java.nio.ByteBuffer )This constructor is from 'StringColumnAccessor' class.
[+] StringColumnBuilder (1)
| Change | Effect |
---|
1 | Superclass has been changed from NativeColumnBuilder<org.apache.spark.sql.types.StringType.> to NativeColumnBuilder<org.apache.spark.sql.catalyst.types.StringType.>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
StringColumnBuilder ( )This constructor is from 'StringColumnBuilder' class.
package org.apache.spark.sql.execution
[+] AddExchange (1)
| Change | Effect |
---|
1 | Removed super-class org.apache.spark.sql.catalyst.rules.Rule<SparkPlan>. | Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. |
[+] affected methods (2)
apply ( SparkPlan )This method is from 'AddExchange' class.
numPartitions ( )This method is from 'AddExchange' class.
[+] SparkPlan (1)
| Change | Effect |
---|
1 | Added super-interface com.typesafe.scalalogging.slf4j.Logging. | If abstract methods from an added super-interface must be implemented by client then it may be interrupted by AbstractMethodError exception. Abstract method toString () from the added super-interface is called by the method cachedColumnBuffers.lzycompute ( ) in 2nd library version and may not be implemented by old clients. |
[+] affected methods (>10)
apply ( SparkPlan )1st parameter 'plan' of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
copy ( org.apache.spark.sql.catalyst.plans.physical.Partitioning, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
Exchange ( org.apache.spark.sql.catalyst.plans.physical.Partitioning, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
copy ( org.apache.spark.sql.catalyst.expressions.Expression, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
Filter ( org.apache.spark.sql.catalyst.expressions.Expression, SparkPlan )2nd parameter 'child' of this method has type 'SparkPlan'.
child ( )Return value of this method has type 'SparkPlan'.
copy ( org.apache.spark.sql.catalyst.expressions.Generator, boolean, boolean, SparkPlan )4th parameter 'child' of this method has type 'SparkPlan'.
Generate ( org.apache.spark.sql.catalyst.expressions.Generator, boolean, boolean, SparkPlan )4th parameter 'child' of this method has type 'SparkPlan'.
...
[+] SparkStrategies.BasicOperators. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BasicOperators ( )Return value of this method has type 'SparkStrategies.BasicOperators.'.
[+] SparkStrategies.BroadcastNestedLoopJoin. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BroadcastNestedLoopJoin ( )Return value of this method has type 'SparkStrategies.BroadcastNestedLoopJoin.'.
[+] SparkStrategies.CartesianProduct. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
CartesianProduct ( )Return value of this method has type 'SparkStrategies.CartesianProduct.'.
[+] SparkStrategies.HashJoin. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
HashJoin ( )Return value of this method has type 'SparkStrategies.HashJoin.'.
[+] SparkStrategies.ParquetOperations. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ParquetOperations ( )Return value of this method has type 'SparkStrategies.ParquetOperations.'.
[+] SparkStrategies.TakeOrdered. (1)
| Change | Effect |
---|
1 | Superclass has been changed from org.apache.spark.sql.catalyst.planning.GenericStrategy<SparkPlan> to org.apache.spark.sql.catalyst.planning.QueryPlanner<SparkPlan>.Strategy. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
TakeOrdered ( )Return value of this method has type 'SparkStrategies.TakeOrdered.'.
package org.apache.spark.sql.parquet
[+] AppendingParquetOutputFormat (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.hadoop.ParquetOutputFormat<org.apache.spark.sql.Row> to parquet.hadoop.ParquetOutputFormat<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
AppendingParquetOutputFormat ( int )This constructor is from 'AppendingParquetOutputFormat' class.
[+] CatalystGroupConverter (1)
| Change | Effect |
---|
1 | Superclass has been changed from CatalystConverter to parquet.io.api.GroupConverter. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (4)
converters ( )This method is from 'CatalystGroupConverter' class.
end ( )This method is from 'CatalystGroupConverter' class.
getConverter ( int )This method is from 'CatalystGroupConverter' class.
start ( )This method is from 'CatalystGroupConverter' class.
[+] RowReadSupport (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.hadoop.api.ReadSupport<org.apache.spark.sql.Row> to parquet.hadoop.api.ReadSupport<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
RowReadSupport ( )This constructor is from 'RowReadSupport' class.
[+] RowWriteSupport (1)
| Change | Effect |
---|
1 | Superclass has been changed from parquet.hadoop.api.WriteSupport<org.apache.spark.sql.Row> to parquet.hadoop.api.WriteSupport<org.apache.spark.sql.catalyst.expressions.Row>. | 1) Access of a client program to the fields or methods of the old super-class may be interrupted by NoSuchFieldError or NoSuchMethodError exceptions. 2) A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (>10)
init ( org.apache.hadoop.conf.Configuration )This method is from 'RowWriteSupport' class.
isTraceEnabled ( )This method is from 'RowWriteSupport' class.
log ( )This method is from 'RowWriteSupport' class.
logDebug ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logDebug ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logError ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logError ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logInfo ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logInfo ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
logTrace ( scala.Function0<java.lang.String> )This method is from 'RowWriteSupport' class.
logTrace ( scala.Function0<java.lang.String>, java.lang.Throwable )This method is from 'RowWriteSupport' class.
...
to the top
Problems with Data Types, Low Severity (12)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql.columnar
[+] BooleanColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.BooleanType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
BooleanColumnStats ( )This constructor is from 'BooleanColumnStats' class.
[+] ByteColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.ByteType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ByteColumnStats ( )This constructor is from 'ByteColumnStats' class.
[+] DoubleColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.DoubleType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
DoubleColumnStats ( )This constructor is from 'DoubleColumnStats' class.
[+] FloatColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.FloatType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
FloatColumnStats ( )This constructor is from 'FloatColumnStats' class.
[+] IntColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.IntegerType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
IntColumnStats ( )This constructor is from 'IntColumnStats' class.
[+] LongColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.LongType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
LongColumnStats ( )This constructor is from 'LongColumnStats' class.
[+] ShortColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.ShortType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
ShortColumnStats ( )This constructor is from 'ShortColumnStats' class.
[+] StringColumnStats (1)
| Change | Effect |
---|
1 | Added super-class BasicColumnStats<org.apache.spark.sql.catalyst.types.StringType.>. | A static field from a super-interface of a client class may hide a field (with the same name) inherited from new super-class and cause IncompatibleClassChangeError exception. |
[+] affected methods (1)
StringColumnStats ( )This constructor is from 'StringColumnStats' class.
package org.apache.spark.sql.execution
[+] Limit (2)
| Change | Effect |
---|
1 | Method executeCollect ( ) has been moved up type hierarchy to executeCollect ( ) | Method executeCollect ( ) will be called instead of executeCollect ( ) in a client program. |
2 | Method outputPartitioning ( ) has been moved up type hierarchy to outputPartitioning ( ) | Method outputPartitioning ( ) will be called instead of outputPartitioning ( ) in a client program. |
[+] affected methods (2)
executeCollect ( )Method 'executeCollect ( )' will be called instead of this method in a client program.
outputPartitioning ( )Method 'outputPartitioning ( )' will be called instead of this method in a client program.
[+] TakeOrdered (2)
| Change | Effect |
---|
1 | Method executeCollect ( ) has been moved up type hierarchy to executeCollect ( ) | Method executeCollect ( ) will be called instead of executeCollect ( ) in a client program. |
2 | Method outputPartitioning ( ) has been moved up type hierarchy to outputPartitioning ( ) | Method outputPartitioning ( ) will be called instead of outputPartitioning ( ) in a client program. |
[+] affected methods (2)
executeCollect ( )Method 'executeCollect ( )' will be called instead of this method in a client program.
outputPartitioning ( )Method 'outputPartitioning ( )' will be called instead of this method in a client program.
to the top
Other Changes in Data Types (10)
spark-sql_2.10-1.3.0.jar
package org.apache.spark.sql.columnar
[+] ColumnBuilder (1)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.catalyst.expressions.Row, int ) has been added to this interface. | No effect. |
[+] affected methods (3)
build ( )This abstract method is from 'ColumnBuilder' interface.
columnStats ( )This abstract method is from 'ColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'ColumnBuilder' interface.
[+] NullableColumnBuilder (6)
| Change | Effect |
---|
1 | Abstract method appendFrom ( org.apache.spark.sql.catalyst.expressions.Row, int ) has been added to this interface. | No effect. |
2 | Abstract method NullableColumnBuilder..nullCount ( ) has been added to this interface. | No effect. |
3 | Abstract method NullableColumnBuilder..nullCount_.eq ( int ) has been added to this interface. | No effect. |
4 | Abstract method NullableColumnBuilder..nulls ( ) has been added to this interface. | No effect. |
5 | Abstract method NullableColumnBuilder..nulls_.eq ( java.nio.ByteBuffer ) has been added to this interface. | No effect. |
6 | Abstract method NullableColumnBuilder..super.appendFrom ( org.apache.spark.sql.catalyst.expressions.Row, int ) has been added to this interface. | No effect. |
[+] affected methods (6)
build ( )This abstract method is from 'NullableColumnBuilder' interface.
initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..pos_.eq ( int )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.build ( )This abstract method is from 'NullableColumnBuilder' interface.
NullableColumnBuilder..super.initialize ( int, java.lang.String, boolean )This abstract method is from 'NullableColumnBuilder' interface.
package org.apache.spark.sql.columnar.compression
[+] CompressionScheme (1)
| Change | Effect |
---|
1 | Abstract method encoder ( ) has been added to this interface. | No effect. |
[+] affected methods (3)
decoder ( java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> )This abstract method is from 'CompressionScheme' interface.
supports ( org.apache.spark.sql.columnar.ColumnType<?,?> )This abstract method is from 'CompressionScheme' interface.
typeId ( )This abstract method is from 'CompressionScheme' interface.
[+] Encoder<T> (2)
| Change | Effect |
---|
1 | Abstract method compress ( java.nio.ByteBuffer, java.nio.ByteBuffer, org.apache.spark.sql.columnar.NativeColumnType<T> ) has been added to this interface. | No effect. |
2 | Abstract method gatherCompressibilityStats ( java.lang.Object, org.apache.spark.sql.columnar.NativeColumnType<T> ) has been added to this interface. | No effect. |
[+] affected methods (3)
compressedSize ( )This abstract method is from 'Encoder<T>' interface.
compressionRatio ( )This abstract method is from 'Encoder<T>' interface.
uncompressedSize ( )This abstract method is from 'Encoder<T>' interface.
to the top
Java ARchives (1)
spark-sql_2.10-1.3.0.jar
to the top
Generated on Wed Oct 28 11:07:31 2015 for succinct-0.1.2 by Java API Compliance Checker 1.4.1
A tool for checking backward compatibility of a Java library API