diff --git a/src/main/scala/com/metabolic/data/core/domain/Environment.scala b/src/main/scala/com/metabolic/data/core/domain/Environment.scala index 933e5350..5ce3da5b 100644 --- a/src/main/scala/com/metabolic/data/core/domain/Environment.scala +++ b/src/main/scala/com/metabolic/data/core/domain/Environment.scala @@ -13,5 +13,5 @@ case class Environment(name: String, autoSchema: Boolean = false, namespaces: Seq[String] = Seq.empty, infix_namespaces: Seq[String] = Seq.empty, - enableJDBC: Boolean, - queryOutputLocation: Option[String]) + enableJDBC: Boolean = false, + queryOutputLocation: String = "") diff --git a/src/main/scala/com/metabolic/data/core/services/spark/reader/table/TableReader.scala b/src/main/scala/com/metabolic/data/core/services/spark/reader/table/TableReader.scala index 1f8cbf17..e713a16a 100644 --- a/src/main/scala/com/metabolic/data/core/services/spark/reader/table/TableReader.scala +++ b/src/main/scala/com/metabolic/data/core/services/spark/reader/table/TableReader.scala @@ -32,5 +32,6 @@ class TableReader(fqn : String, enableJDBC: Boolean, queryOutputLocation: String } object TableReader { - def apply(fqn: String) = new TableReader(fqn) + def apply(fqn: String, enableJDBC: Boolean, queryOutputLocation: String) = new TableReader(fqn, enableJDBC, queryOutputLocation) + } \ No newline at end of file diff --git a/src/main/scala/com/metabolic/data/mapper/app/MetabolicReader.scala b/src/main/scala/com/metabolic/data/mapper/app/MetabolicReader.scala index f365300a..0ec19213 100644 --- a/src/main/scala/com/metabolic/data/mapper/app/MetabolicReader.scala +++ b/src/main/scala/com/metabolic/data/mapper/app/MetabolicReader.scala @@ -16,7 +16,7 @@ object MetabolicReader extends Logging { def read(source: Source, historical: Boolean, mode: EngineMode, enableJDBC: Boolean, queryOutputLocation: String)(implicit spark: SparkSession) = { - val input = readSource(source, mode, spark, enableJDBC) + val input = readSource(source, mode, spark, enableJDBC, queryOutputLocation) val prepared = prepareSource(source, historical, input) diff --git a/src/main/scala/com/metabolic/data/mapper/services/ConfigParserService.scala b/src/main/scala/com/metabolic/data/mapper/services/ConfigParserService.scala index b2cac881..2b31d3df 100644 --- a/src/main/scala/com/metabolic/data/mapper/services/ConfigParserService.scala +++ b/src/main/scala/com/metabolic/data/mapper/services/ConfigParserService.scala @@ -93,9 +93,9 @@ class ConfigParserService(implicit region: Regions) extends Logging { } val queryOutputLocation = if (config.hasPathOrNull("queryOutputLocation")) { - Option.apply(config.getString("queryOutputLocation")) + config.getString("queryOutputLocation") } else { - Option.empty + "" } Environment(envPrefix, engineMode, baseCheckpointLocation, crawl, dbname, iamrole, atlanToken, atlanBaseUrl,historical, autoSchema, namespaces, infix_namespaces, enableJDBC, queryOutputLocation) diff --git a/src/test/scala/com/metabolic/data/mapper/app/MetabolicReaderIT.scala b/src/test/scala/com/metabolic/data/mapper/app/MetabolicReaderIT.scala index b3031eac..c1ac80ed 100644 --- a/src/test/scala/com/metabolic/data/mapper/app/MetabolicReaderIT.scala +++ b/src/test/scala/com/metabolic/data/mapper/app/MetabolicReaderIT.scala @@ -78,7 +78,7 @@ class MetabolicReaderIT extends AnyFunSuite val source = getFileSource(inputPath, tableName, IOFormat.PARQUET.toString).head - MetabolicReader.read(source, true, EngineMode.Batch)(spark) + MetabolicReader.read(source, true, EngineMode.Batch, false, "")(spark) val result = spark.table(tableName) @@ -103,7 +103,7 @@ class MetabolicReaderIT extends AnyFunSuite val source = getFileSource(inputPath, tableName, IOFormat.JSON.toString).head - MetabolicReader.read(source, true, EngineMode.Batch)(spark) + MetabolicReader.read(source, true, EngineMode.Batch, false, "")(spark) val result = spark.table(tableName) @@ -129,7 +129,7 @@ class MetabolicReaderIT extends AnyFunSuite val source = getFileSource(inputPath, tableName, IOFormat.CSV.toString).head - MetabolicReader.read(source, true, EngineMode.Batch)(spark) + MetabolicReader.read(source, true, EngineMode.Batch, false, "")(spark) val result = spark.table(tableName) @@ -153,7 +153,7 @@ class MetabolicReaderIT extends AnyFunSuite val source = getFileSource(inputPath, tableName, IOFormat.DELTA.toString).head - MetabolicReader.read(source, true, EngineMode.Batch)(spark) + MetabolicReader.read(source, true, EngineMode.Batch, false, "")(spark) val result = spark.table(tableName) diff --git a/src/test/scala/com/metabolic/data/mapper/app/ProdConfigsMetabolicAppIT.scala b/src/test/scala/com/metabolic/data/mapper/app/ProdConfigsMetabolicAppIT.scala index 43ca7cd0..93fbea7f 100644 --- a/src/test/scala/com/metabolic/data/mapper/app/ProdConfigsMetabolicAppIT.scala +++ b/src/test/scala/com/metabolic/data/mapper/app/ProdConfigsMetabolicAppIT.scala @@ -36,7 +36,7 @@ class ProdConfigsMetabolicAppIT extends AnyFunSuite .parseConfig(config) parsedConfig.head.sources.foreach { source => - MetabolicReader.read(source, true, EngineMode.Batch)(spark) + MetabolicReader.read(source, true, EngineMode.Batch, false, "")(spark) }