From 00bb416281a3fcccdd7422660a3dfbc74ca864b6 Mon Sep 17 00:00:00 2001 From: spenes Date: Fri, 13 Dec 2024 13:43:18 +0300 Subject: [PATCH 1/2] Redshift: use zstd compression encoding instead text255 with the columns for enum fields (#215) Currently, we make the compression encoding of the columns for enum fields text255. However, text255 doesn't allow to alter the column therefore we get exception while trying to resize the column. In order to solve this problem, we decided to use zstd compression encoding with enum fields as well. Encoding of the newly created columns for enum fields will be zstd after this change however compression encoding of the existing columns won't be updated after this change and we will continue to get exception when those columns are tried to be resized. Those exception will be caught and ignored in RDB Loader. --- .../iglu.schemaddl/redshift/ShredModelEntry.scala | 11 +++-------- .../redshift/internal/ShredModelEntrySpec.scala | 4 ++-- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/redshift/ShredModelEntry.scala b/modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/redshift/ShredModelEntry.scala index 5a72dfba..4912c3d5 100644 --- a/modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/redshift/ShredModelEntry.scala +++ b/modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/redshift/ShredModelEntry.scala @@ -35,11 +35,9 @@ case class ShredModelEntry( .flatMap(_.apply(subSchema)) .getOrElse(ShredModelEntry.ColumnType.RedshiftVarchar(ShredModelEntry.VARCHAR_SIZE)) - lazy val compressionEncoding: ShredModelEntry.CompressionEncoding = (subSchema.`enum`, columnType) match { - case (Some(_), ShredModelEntry.ColumnType.RedshiftVarchar(size)) if size <= 255 => - ShredModelEntry.CompressionEncoding.Text255Encoding - case (_, ShredModelEntry.ColumnType.RedshiftBoolean) => ShredModelEntry.CompressionEncoding.RunLengthEncoding - case (_, ShredModelEntry.ColumnType.RedshiftDouble) => ShredModelEntry.CompressionEncoding.RawEncoding + lazy val compressionEncoding: ShredModelEntry.CompressionEncoding = columnType match { + case ShredModelEntry.ColumnType.RedshiftBoolean => ShredModelEntry.CompressionEncoding.RunLengthEncoding + case ShredModelEntry.ColumnType.RedshiftDouble => ShredModelEntry.CompressionEncoding.RawEncoding case _ => ShredModelEntry.CompressionEncoding.ZstdEncoding } @@ -169,7 +167,6 @@ object ShredModelEntry { implicit val compressionEncodingShow: Show[CompressionEncoding] = Show.show { case RawEncoding => s"ENCODE RAW" - case Text255Encoding => s"ENCODE TEXT255" case ZstdEncoding => s"ENCODE ZSTD" case RunLengthEncoding => "ENCODE RUNLENGTH" } @@ -178,8 +175,6 @@ object ShredModelEntry { case object RunLengthEncoding extends CompressionEncoding - case object Text255Encoding extends CompressionEncoding - case object ZstdEncoding extends CompressionEncoding } diff --git a/modules/core/src/test/scala/com/snowplowanalytics/iglu/schemaddl/redshift/internal/ShredModelEntrySpec.scala b/modules/core/src/test/scala/com/snowplowanalytics/iglu/schemaddl/redshift/internal/ShredModelEntrySpec.scala index 3d4e6408..cd23d0d9 100644 --- a/modules/core/src/test/scala/com/snowplowanalytics/iglu/schemaddl/redshift/internal/ShredModelEntrySpec.scala +++ b/modules/core/src/test/scala/com/snowplowanalytics/iglu/schemaddl/redshift/internal/ShredModelEntrySpec.scala @@ -85,9 +85,9 @@ class ShredModelEntrySpec extends Specification { } "suggest compression" should { - "suggest Text255Encoding for enums less then 255 in length" in { + "suggest zstd for enums less then 255 in length" in { val props = json"""{"type": "string", "enum": ["one", "two"], "maxLength": 42}""".schema - ShredModelEntry(dummyPtr, props).compressionEncoding must beEqualTo(Text255Encoding) + ShredModelEntry(dummyPtr, props).compressionEncoding must beEqualTo(ZstdEncoding) } "suggest RunLengthEncoding for booleans" in { From a3ad1dee962f2b2ea29fe4e8029560002de1276b Mon Sep 17 00:00:00 2001 From: spenes Date: Thu, 19 Dec 2024 16:39:59 +0300 Subject: [PATCH 2/2] Prepare for 0.27.0 release --- CHANGELOG | 4 ++++ README.md | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 4387f947..87b3053f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,7 @@ +Version 0.27.0 (2024-12-19) +--------------------------- +Redshift: use zstd compression encoding instead text255 with the columns for enum fields (#215) + Version 0.26.0 (2024-11-25) --------------------------- Preserve original field order when merging parquet Fields (#213) diff --git a/README.md b/README.md index a348f734..e12b7768 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ Schema DDL itself does not provide any CLI and expose only Scala API. Schema DDL is compiled against Scala 2.12 and 2.13 and available on Maven Central. In order to use it with SBT, include following module: ```scala -libraryDependencies += "com.snowplowanalytics" %% "schema-ddl" % "0.25.0" +libraryDependencies += "com.snowplowanalytics" %% "schema-ddl" % "0.27.0" ``` @@ -64,5 +64,5 @@ limitations under the License. [contributing]: https://docs.snowplow.io/docs/contributing/ [contributing-image]: https://d3i6fms1cm1j0i.cloudfront.net/github/images/contributing.png -[api-reference]: https://snowplow.github.io/schema-ddl/0.25.0/com/snowplowanalytics/iglu/schemaddl/index.html +[api-reference]: https://snowplow.github.io/schema-ddl/0.27.0/com/snowplowanalytics/iglu/schemaddl/index.html [api-reference-image]: https://d3i6fms1cm1j0i.cloudfront.net/github/images/techdocs.png