public class RowWriteSupport extends parquet.hadoop.api.WriteSupport<org.apache.spark.sql.catalyst.expressions.Row> implements Logging
parquet.hadoop.api.WriteSupport
for Row ojects.Constructor and Description |
---|
RowWriteSupport() |
Modifier and Type | Method and Description |
---|---|
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
attributes() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
getSchema(org.apache.hadoop.conf.Configuration configuration) |
parquet.hadoop.api.WriteSupport.WriteContext |
init(org.apache.hadoop.conf.Configuration configuration) |
void |
prepareForWrite(parquet.io.api.RecordConsumer recordConsumer) |
static void |
setSchema(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema,
org.apache.hadoop.conf.Configuration configuration) |
static String |
SPARK_ROW_SCHEMA() |
void |
write(org.apache.spark.sql.catalyst.expressions.Row record) |
void |
writeArray(org.apache.spark.sql.catalyst.types.ArrayType schema,
scala.collection.Seq<Object> array) |
void |
writeDecimal(org.apache.spark.sql.catalyst.types.decimal.Decimal decimal,
int precision) |
void |
writeMap(org.apache.spark.sql.catalyst.types.MapType schema,
scala.collection.immutable.Map<?,Object> map) |
void |
writePrimitive(org.apache.spark.sql.catalyst.types.PrimitiveType schema,
Object value) |
parquet.io.api.RecordConsumer |
writer() |
void |
writeStruct(org.apache.spark.sql.catalyst.types.StructType schema,
scala.collection.Seq<Object> struct) |
void |
writeValue(org.apache.spark.sql.catalyst.types.DataType schema,
Object value) |
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public static String SPARK_ROW_SCHEMA()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> getSchema(org.apache.hadoop.conf.Configuration configuration)
public static void setSchema(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema, org.apache.hadoop.conf.Configuration configuration)
public parquet.io.api.RecordConsumer writer()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> attributes()
public parquet.hadoop.api.WriteSupport.WriteContext init(org.apache.hadoop.conf.Configuration configuration)
init
in class parquet.hadoop.api.WriteSupport<org.apache.spark.sql.catalyst.expressions.Row>
public void prepareForWrite(parquet.io.api.RecordConsumer recordConsumer)
prepareForWrite
in class parquet.hadoop.api.WriteSupport<org.apache.spark.sql.catalyst.expressions.Row>
public void write(org.apache.spark.sql.catalyst.expressions.Row record)
write
in class parquet.hadoop.api.WriteSupport<org.apache.spark.sql.catalyst.expressions.Row>
public void writeValue(org.apache.spark.sql.catalyst.types.DataType schema, Object value)
public void writePrimitive(org.apache.spark.sql.catalyst.types.PrimitiveType schema, Object value)
public void writeStruct(org.apache.spark.sql.catalyst.types.StructType schema, scala.collection.Seq<Object> struct)
public void writeArray(org.apache.spark.sql.catalyst.types.ArrayType schema, scala.collection.Seq<Object> array)
public void writeMap(org.apache.spark.sql.catalyst.types.MapType schema, scala.collection.immutable.Map<?,Object> map)
public void writeDecimal(org.apache.spark.sql.catalyst.types.decimal.Decimal decimal, int precision)