public interface CreateHiveTableAsSelectBase
extends org.apache.spark.sql.execution.command.DataWritingCommand
Modifier and Type | Method and Description |
---|---|
String |
argString(int maxFields) |
org.apache.spark.sql.execution.command.DataWritingCommand |
getWritingCommand(org.apache.spark.sql.catalyst.catalog.SessionCatalog catalog,
org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
boolean tableExists) |
SaveMode |
mode() |
scala.collection.Seq<String> |
outputColumnNames() |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
query() |
scala.collection.Seq<Row> |
run(SparkSession sparkSession,
org.apache.spark.sql.execution.SparkPlan child) |
org.apache.spark.sql.catalyst.catalog.CatalogTable |
tableDesc() |
org.apache.spark.sql.catalyst.TableIdentifier |
tableIdentifier() |
String |
writingCommandClassName() |
String argString(int maxFields)
org.apache.spark.sql.execution.command.DataWritingCommand getWritingCommand(org.apache.spark.sql.catalyst.catalog.SessionCatalog catalog, org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc, boolean tableExists)
SaveMode mode()
scala.collection.Seq<String> outputColumnNames()
outputColumnNames
in interface org.apache.spark.sql.execution.command.DataWritingCommand
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query()
query
in interface org.apache.spark.sql.execution.command.DataWritingCommand
scala.collection.Seq<Row> run(SparkSession sparkSession, org.apache.spark.sql.execution.SparkPlan child)
run
in interface org.apache.spark.sql.execution.command.DataWritingCommand
org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc()
org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier()
String writingCommandClassName()