Constructor and Description |
---|
RowReadSupport() |
Modifier and Type | Method and Description |
---|---|
parquet.hadoop.api.ReadSupport.ReadContext |
init(org.apache.hadoop.conf.Configuration configuration,
java.util.Map<String,String> keyValueMetaData,
parquet.schema.MessageType fileSchema) |
parquet.io.api.RecordMaterializer<org.apache.spark.sql.catalyst.expressions.Row> |
prepareForRead(org.apache.hadoop.conf.Configuration conf,
java.util.Map<String,String> stringMap,
parquet.schema.MessageType fileSchema,
parquet.hadoop.api.ReadSupport.ReadContext readContext) |
static String |
SPARK_METADATA_KEY() |
static String |
SPARK_ROW_REQUESTED_SCHEMA() |
getSchemaForRead, getSchemaForRead, init
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public static String SPARK_ROW_REQUESTED_SCHEMA()
public static String SPARK_METADATA_KEY()
public parquet.io.api.RecordMaterializer<org.apache.spark.sql.catalyst.expressions.Row> prepareForRead(org.apache.hadoop.conf.Configuration conf, java.util.Map<String,String> stringMap, parquet.schema.MessageType fileSchema, parquet.hadoop.api.ReadSupport.ReadContext readContext)
prepareForRead
in class parquet.hadoop.api.ReadSupport<org.apache.spark.sql.catalyst.expressions.Row>
public parquet.hadoop.api.ReadSupport.ReadContext init(org.apache.hadoop.conf.Configuration configuration, java.util.Map<String,String> keyValueMetaData, parquet.schema.MessageType fileSchema)
init
in class parquet.hadoop.api.ReadSupport<org.apache.spark.sql.catalyst.expressions.Row>