public class LogicalRDD
extends org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
implements org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation, scala.Product, scala.Serializable
Constructor and Description |
---|
LogicalRDD(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output,
RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd,
SQLContext sqlContext) |
Modifier and Type | Method and Description |
---|---|
scala.collection.immutable.Nil$ |
children() |
LogicalRDD |
newInstance() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
rdd() |
boolean |
sameResult(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) |
org.apache.spark.sql.catalyst.plans.logical.Statistics |
statistics() |
childrenResolved, cleanArgs, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, org$apache$spark$Logging$$log__$eq, org$apache$spark$Logging$$log_, resolve, resolve, resolveChildren, resolved, statePrefix
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, makeCopy, map, mapChildren, nodeName, numberedTreeString, otherCopyArgs, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, log_
public LogicalRDD(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd, SQLContext sqlContext)
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
public RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd()
public scala.collection.immutable.Nil$ children()
public LogicalRDD newInstance()
newInstance
in interface org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
public boolean sameResult(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan)
sameResult
in class org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
public org.apache.spark.sql.catalyst.plans.logical.Statistics statistics()
statistics
in class org.apache.spark.sql.catalyst.plans.logical.LogicalPlan