public class MetastoreRelation
extends org.apache.spark.sql.catalyst.plans.logical.LeafNode
implements scala.Product, scala.Serializable
Modifier and Type | Class and Description |
---|---|
class |
MetastoreRelation.SchemaAttribute |
Constructor and Description |
---|
MetastoreRelation(String databaseName,
String tableName,
scala.Option<String> alias,
org.apache.hadoop.hive.metastore.api.Table table,
scala.collection.Seq<org.apache.hadoop.hive.metastore.api.Partition> partitions,
SQLContext sqlContext) |
Modifier and Type | Method and Description |
---|---|
scala.Option<String> |
alias() |
org.apache.spark.sql.catalyst.expressions.AttributeMap<org.apache.spark.sql.catalyst.expressions.AttributeReference> |
attributeMap()
An attribute map that can be used to lookup original attributes based on expression id.
|
scala.collection.mutable.Buffer<org.apache.spark.sql.catalyst.expressions.AttributeReference> |
attributes()
Non-partitionKey attributes
|
org.apache.spark.sql.catalyst.expressions.AttributeMap<Object> |
columnOrdinals()
An attribute map for determining the ordinal for non-partition columns.
|
String |
databaseName() |
scala.collection.Seq<org.apache.hadoop.hive.ql.metadata.Partition> |
hiveQlPartitions() |
org.apache.hadoop.hive.ql.metadata.Table |
hiveQlTable() |
scala.collection.mutable.Buffer<org.apache.spark.sql.catalyst.expressions.AttributeReference> |
output() |
scala.collection.mutable.Buffer<org.apache.spark.sql.catalyst.expressions.AttributeReference> |
partitionKeys() |
scala.collection.Seq<org.apache.hadoop.hive.metastore.api.Partition> |
partitions() |
org.apache.spark.sql.catalyst.plans.logical.Statistics |
statistics() |
org.apache.hadoop.hive.metastore.api.Table |
table() |
org.apache.hadoop.hive.ql.plan.TableDesc |
tableDesc() |
String |
tableName() |
childrenResolved, cleanArgs, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, org$apache$spark$Logging$$log__$eq, org$apache$spark$Logging$$log_, resolve, resolve, resolveChildren, resolved, sameResult, statePrefix
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, makeCopy, map, mapChildren, nodeName, numberedTreeString, otherCopyArgs, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, log_
public MetastoreRelation(String databaseName, String tableName, scala.Option<String> alias, org.apache.hadoop.hive.metastore.api.Table table, scala.collection.Seq<org.apache.hadoop.hive.metastore.api.Partition> partitions, SQLContext sqlContext)
public String databaseName()
public String tableName()
public scala.Option<String> alias()
public org.apache.hadoop.hive.metastore.api.Table table()
public scala.collection.Seq<org.apache.hadoop.hive.metastore.api.Partition> partitions()
public org.apache.hadoop.hive.ql.metadata.Table hiveQlTable()
public scala.collection.Seq<org.apache.hadoop.hive.ql.metadata.Partition> hiveQlPartitions()
public org.apache.spark.sql.catalyst.plans.logical.Statistics statistics()
statistics
in class org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
public org.apache.hadoop.hive.ql.plan.TableDesc tableDesc()
public scala.collection.mutable.Buffer<org.apache.spark.sql.catalyst.expressions.AttributeReference> partitionKeys()
public scala.collection.mutable.Buffer<org.apache.spark.sql.catalyst.expressions.AttributeReference> attributes()
public scala.collection.mutable.Buffer<org.apache.spark.sql.catalyst.expressions.AttributeReference> output()
public org.apache.spark.sql.catalyst.expressions.AttributeMap<org.apache.spark.sql.catalyst.expressions.AttributeReference> attributeMap()
public org.apache.spark.sql.catalyst.expressions.AttributeMap<Object> columnOrdinals()