From bce341f23ae1b191e4686332a8fdc51fdd7edf6f Mon Sep 17 00:00:00 2001 From: Edward Cho Date: Wed, 21 Feb 2024 10:04:46 -0500 Subject: [PATCH] Skip SparkTableMetricsRepositoryTest iceberg test when SupportsRowLevelOperations is not available --- .../SparkTableMetricsRepositoryTest.scala | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala b/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala index 667b5b50..8e0d0aac 100644 --- a/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala +++ b/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala @@ -101,21 +101,24 @@ class SparkTableMetricsRepositoryTest extends AnyWordSpec } "save and load to iceberg a single metric" in withSparkSessionIcebergCatalog { spark => { - val resultKey = ResultKey(System.currentTimeMillis(), Map("tag" -> "value")) - val metric = DoubleMetric(Entity.Column, "m1", "", Try(100)) - val context = AnalyzerContext(Map(analyzer -> metric)) - - val repository = new SparkTableMetricsRepository(spark, "local.metrics_table") - // Save the metric - repository.save(resultKey, context) - - // Load the metric - val loadedContext = repository.loadByKey(resultKey) - - assert(loadedContext.isDefined) - assert(loadedContext.get.metric(analyzer).contains(metric)) - } - - } + // The SupportsRowLevelOperations class is available from spark 3.3 + // We should skip this test for lower spark versions + val className = "org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations" + if (Try(Class.forName(className)).isSuccess) { + val resultKey = ResultKey(System.currentTimeMillis(), Map("tag" -> "value")) + val metric = DoubleMetric(Entity.Column, "m1", "", Try(100)) + val context = AnalyzerContext(Map(analyzer -> metric)) + + val repository = new SparkTableMetricsRepository(spark, "local.metrics_table") + // Save the metric + repository.save(resultKey, context) + + // Load the metric + val loadedContext = repository.loadByKey(resultKey) + + assert(loadedContext.isDefined) + assert(loadedContext.get.metric(analyzer).contains(metric)) + } + } } } }