Packages

c

org.apache.spark.sql.execution.datasources.parquet

GeoParquetToSparkSchemaConverter

class GeoParquetToSparkSchemaConverter extends AnyRef

This converter class is used to convert Parquet MessageType to Spark SQL StructType.

Parquet format backwards-compatibility rules are respected when converting Parquet MessageType schemas.

See also

https://github.com/apache/parquet-format/blob/master/LogicalTypes.md

Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. GeoParquetToSparkSchemaConverter
  2. AnyRef
  3. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new GeoParquetToSparkSchemaConverter(keyValueMetaData: Map[String, String], conf: Configuration)
  2. new GeoParquetToSparkSchemaConverter(keyValueMetaData: Map[String, String], conf: SQLConf)
  3. new GeoParquetToSparkSchemaConverter(keyValueMetaData: Map[String, String], assumeBinaryIsString: Boolean = SQLConf.PARQUET_BINARY_AS_STRING.defaultValue.get, assumeInt96IsTimestamp: Boolean = SQLConf.PARQUET_INT96_AS_TIMESTAMP.defaultValue.get)

    assumeBinaryIsString

    Whether unannotated BINARY fields should be assumed to be Spark SQL StringType fields.

    assumeInt96IsTimestamp

    Whether unannotated INT96 fields should be assumed to be Spark SQL TimestampType fields.

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native()
  6. def convert(parquetSchema: MessageType): StructType

    Converts Parquet MessageType parquetSchema to a Spark SQL StructType.

  7. def convertFieldWithGeo(parquetType: Type): DataType

    Converts a Parquet Type to a Spark SQL DataType.

  8. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  9. def equals(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef → Any
  10. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable])
  11. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  12. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  13. def isElementTypeWithGeo(repeatedType: Type, parentName: String): Boolean
  14. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  15. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  16. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  17. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  18. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  19. def toString(): String
    Definition Classes
    AnyRef → Any
  20. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  21. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  22. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()

Inherited from AnyRef

Inherited from Any

Ungrouped