Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,11 @@ public BigDecimal getDecimal() {
return VariantUtil.getDecimal(value, pos);
}

// Get the decimal value, including trailing zeros
public BigDecimal getDecimalWithOriginalScale() {
return VariantUtil.getDecimalWithOriginalScale(value, pos);
}

// Get a float value from the variant.
public float getFloat() {
return VariantUtil.getFloat(value, pos);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5343,6 +5343,31 @@ object SQLConf {
.stringConf
.createWithDefault("")

val VARIANT_SHREDDING_MAX_SCHEMA_WIDTH =
buildConf("spark.sql.variant.shredding.maxSchemaWidth")
.internal()
.doc("Maximum number of shredded fields to create when inferring a schema for Variant")
.version("4.2.0")
.intConf
.createWithDefault(300)

val VARIANT_SHREDDING_MAX_SCHEMA_DEPTH =
buildConf("spark.sql.variant.shredding.maxSchemaDepth")
.internal()
.doc("Maximum depth in Variant value to traverse when inferring a schema. " +
"Any array/object below this depth will be shredded as a single binary.")
.version("4.2.0")
.intConf
.createWithDefault(50)

val VARIANT_INFER_SHREDDING_SCHEMA =
buildConf("spark.sql.variant.inferShreddingSchema")
.internal()
.doc("Infer shredding schema when writing Variant columns in Parquet tables.")
.version("4.2.0")
.booleanConf
.createWithDefault(false)

val LEGACY_CSV_ENABLE_DATE_TIME_PARSING_FALLBACK =
buildConf("spark.sql.legacy.csv.enableDateTimeParsingFallback")
.internal()
Expand Down
Loading