public class CreateHiveTableAsSelectCommand extends org.apache.spark.sql.catalyst.plans.logical.LogicalPlan implements CreateHiveTableAsSelectBase, scala.Product, scala.Serializable
param: tableDesc the table description, which may contain serde, storage handler etc. param: query the query whose result will be insert into the new relation param: mode SaveMode
Constructor and Description |
---|
CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query,
scala.collection.Seq<String> outputColumnNames,
SaveMode mode) |
Modifier and Type | Method and Description |
---|---|
abstract static R |
apply(T1 v1,
T2 v2,
T3 v3,
T4 v4) |
org.apache.spark.sql.execution.command.DataWritingCommand |
getWritingCommand(org.apache.spark.sql.catalyst.catalog.SessionCatalog catalog,
org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
boolean tableExists) |
scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> |
metrics() |
SaveMode |
mode() |
scala.collection.Seq<String> |
outputColumnNames() |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
query() |
org.apache.spark.sql.catalyst.catalog.CatalogTable |
tableDesc() |
static String |
toString() |
String |
writingCommandClassName() |
analyzed, assertNotAnalysisRule, childrenResolved, clone, constraints, constructIsNotNullConstraints, inferAdditionalConstraints, initializeForcefully, initializeLogIfNecessary, initializeLogIfNecessary, initializeLogIfNecessary$default$2, invalidateStatsCache, isStreaming, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, maxRows, maxRowsPerPartition, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$_analyzed_$eq, org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$_analyzed, outputOrdering, refresh, resolve, resolve, resolveChildren, resolved, resolveExpressions, resolveOperators, resolveOperatorsDown, resolveOperatorsUp, resolveOperatorsUpWithNewOutput, resolveQuoted, sameOutput, setAnalyzed, statePrefix, stats, statsCache_$eq, statsCache, transformAllExpressions, transformDown, transformUp, validConstraints, verboseStringWithSuffix
allAttributes, append, append$default$5, append$default$6, canonicalized, CODEGEN_ID_TAG, collectWithSubqueries, conf, doCanonicalize, expressions, findExpressionAndTrackLineageDown, formattedNodeName, innerChildren, inputSet, isCanonicalizedPlan, mapExpressions, missingInput, normalizeExpressions, normalizePredicates, OP_ID_TAG, org$apache$spark$sql$catalyst$plans$QueryPlan$$updateAttr, org$apache$spark$sql$catalyst$plans$QueryPlan$$updateOuterReferencesInSubquery, output, outputSet, printSchema, producedAttributes, references, sameResult, schema, schemaString, semanticHash, simpleString, simpleStringWithNodeId, subqueries, subqueriesAll, transformExpressions, transformExpressionsDown, transformExpressionsUp, transformUpWithNewOutput, transformUpWithNewOutput$default$2, verboseString, verboseStringWithOperatorId
apply, argString, asCode, children, collect, collectFirst, collectLeaves, containsChild, copyTagsFrom, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, getTagValue, hashCode, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, setTagValue, stringArgs, toJSON, toString, transform, treeString, treeString, treeString, treeString$default$2, treeString$default$3, treeString$default$4, unsetTagValue, withNewChildren
argString, run
$init$, basicWriteJobStatsTracker, children, logicalPlanOutputWithNames, outputColumns
$init$, output, stats
$init$, productArity, productElement, productIterator, productPrefix
$init$, allowInvokingTransformsInAnalyzer, markInAnalyzer
$init$
$init$
public CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, scala.collection.Seq<String> outputColumnNames, SaveMode mode)
public abstract static R apply(T1 v1, T2 v2, T3 v3, T4 v4)
public static String toString()
public scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> metrics()
metrics
in interface org.apache.spark.sql.execution.command.DataWritingCommand
public org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc()
tableDesc
in interface CreateHiveTableAsSelectBase
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query()
query
in interface org.apache.spark.sql.execution.command.DataWritingCommand
query
in interface CreateHiveTableAsSelectBase
public scala.collection.Seq<String> outputColumnNames()
outputColumnNames
in interface org.apache.spark.sql.execution.command.DataWritingCommand
outputColumnNames
in interface CreateHiveTableAsSelectBase
public SaveMode mode()
mode
in interface CreateHiveTableAsSelectBase
public org.apache.spark.sql.execution.command.DataWritingCommand getWritingCommand(org.apache.spark.sql.catalyst.catalog.SessionCatalog catalog, org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc, boolean tableExists)
getWritingCommand
in interface CreateHiveTableAsSelectBase
public String writingCommandClassName()
writingCommandClassName
in interface CreateHiveTableAsSelectBase