diff --git a/src/main/scala/io/smartdatalake/completion/SDLBCompletionEngineImpl.scala b/src/main/scala/io/smartdatalake/completion/SDLBCompletionEngineImpl.scala index a50a714..cfe002c 100644 --- a/src/main/scala/io/smartdatalake/completion/SDLBCompletionEngineImpl.scala +++ b/src/main/scala/io/smartdatalake/completion/SDLBCompletionEngineImpl.scala @@ -18,7 +18,7 @@ class SDLBCompletionEngineImpl(private val schemaReader: SchemaReader) extends S case TemplateCollection(templates, templateType) => generateTemplateSuggestions(templates, templateType, context.isInList) val itemSuggestionsFromConfig = generateItemSuggestionsFromConfig(context) - val allItems = itemSuggestionsFromConfig ++ itemSuggestionsFromSchema + val allItems = itemSuggestionsFromConfig ++ itemSuggestionsFromSchema //TODO better split schema and config suggestions if allItems.isEmpty then typeList else allItems private def generateItemSuggestionsFromConfig(context: SDLBContext): List[CompletionItem] = context.parentPath.lastOption match @@ -46,13 +46,14 @@ class SDLBCompletionEngineImpl(private val schemaReader: SchemaReader) extends S val keyName = if templateType == TemplateType.OBJECT then s"${actionType.toLowerCase}_PLACEHOLDER" else "" val startObject = if templateType != TemplateType.ATTRIBUTES then "{" else "" val endObject = if templateType != TemplateType.ATTRIBUTES then "}" else "" + val newLine = sys.props("line.separator") // Useful to permit to package the LSP independently of the platform completionItem.setInsertText( //TODO handle indentation s"""$keyName $startObject |${ def generatePlaceHolderValue(att: SchemaItem) = { if att.name == "type" then actionType else att.itemType.defaultValue } - attributes.map(att => "\t\t" + att.name + " = " + generatePlaceHolderValue(att)).mkString("\n")}\n\t$endObject + attributes.map(att => "\t\t" + att.name + " = " + generatePlaceHolderValue(att)).mkString(newLine)}$newLine\t$endObject |""".stripMargin) //TODO remove blank lines? completionItem.setKind(CompletionItemKind.Snippet) completionItem diff --git a/src/main/scala/io/smartdatalake/schema/SchemaReaderImpl.scala b/src/main/scala/io/smartdatalake/schema/SchemaReaderImpl.scala index 0be90d0..5b6af1c 100644 --- a/src/main/scala/io/smartdatalake/schema/SchemaReaderImpl.scala +++ b/src/main/scala/io/smartdatalake/schema/SchemaReaderImpl.scala @@ -15,7 +15,7 @@ class SchemaReaderImpl(val schemaPath: String) extends SchemaReader { private val logger = LoggerFactory.getLogger(getClass) private val schema = ujson.read(Using.resource(getClass.getClassLoader.getResourceAsStream(schemaPath)) { inputStream => - Source.fromInputStream(inputStream).getLines().mkString("\n").trim + Source.fromInputStream(inputStream).getLines().mkString(sys.props("line.separator")).trim }) @@ -56,20 +56,23 @@ class SchemaReaderImpl(val schemaPath: String) extends SchemaReader { }._1 end retrieveSchemaContext - private[schema] def moveInConfigAndRetrieveType(config: ConfigValue, path: String): (ConfigValue, Option[String]) = //TODO what about a path finishing with "type" + private[schema] def moveInConfigAndRetrieveType(config: ConfigValue, path: String): (ConfigValue, Option[String]) = val newConfig = config match case asConfigObject: ConfigObject => asConfigObject.get(path) - case asConfigList: ConfigList => path.toIntOption.map(asConfigList.get).getOrElse(config) // keep config idle if path doesn't work TODO log? + case asConfigList: ConfigList => path.toIntOption.map(asConfigList.get).getOrElse { + logger.debug("Trying to access an index in config {} but given element path is not of type int: {}", config, path) + config + } case _ => - logger.debug("trying to move with config {} while receiving path element {}", config, path) - config //TODO return config itself? + logger.debug("Trying to move with config {} while receiving path element {}", config, path) + config val objectType = retrieveType(newConfig) if (newConfig == null) {logger.error("Error, newConfig is null with path={}, config={}", path, config)} (newConfig, objectType) private def retrieveType(config: ConfigValue): Option[String] = config match - case asConfigObjectAgain: ConfigObject => Option(asConfigObjectAgain.get("type")).flatMap(_.unwrapped() match + case asConfigObject: ConfigObject => Option(asConfigObject.get("type")).flatMap(_.unwrapped() match case s: String => Some(s) case _ => None) case _ => None diff --git a/src/main/scala/io/smartdatalake/utils/MultiLineTransformer.scala b/src/main/scala/io/smartdatalake/utils/MultiLineTransformer.scala index 2f60bd5..3d6319c 100644 --- a/src/main/scala/io/smartdatalake/utils/MultiLineTransformer.scala +++ b/src/main/scala/io/smartdatalake/utils/MultiLineTransformer.scala @@ -6,7 +6,7 @@ object MultiLineTransformer { def flattenMultiLines(text: String): String = val pattern = raw"""(?s)\"\"\".*?\"\"\"""".r - pattern.replaceAllIn(text, m => m.matched.replace("\n", "")) + pattern.replaceAllIn(text, m => m.matched.replace(sys.props("line.separator"), "")) def computeCorrectedPosition(text: String, lineNumber: Int, columnNumber: Int): (Int, Int) = diff --git a/src/test/scala/io/smartdatalake/UnitSpec.scala b/src/test/scala/io/smartdatalake/UnitSpec.scala index 2736a9f..b446d0e 100644 --- a/src/test/scala/io/smartdatalake/UnitSpec.scala +++ b/src/test/scala/io/smartdatalake/UnitSpec.scala @@ -11,5 +11,5 @@ import scala.util.Using abstract class UnitSpec extends AnyFlatSpec with should.Matchers with OptionValues with Inside with Inspectors with TestModule: def loadFile(filePath: String): String = Using.resource(getClass.getClassLoader.getResourceAsStream(filePath)) { inputStream => - Source.fromInputStream(inputStream).getLines().mkString("\n").trim + Source.fromInputStream(inputStream).getLines().mkString(sys.props("line.separator")).trim } diff --git a/src/test/scala/io/smartdatalake/completion/SDLBCompletionEngineSpec.scala b/src/test/scala/io/smartdatalake/completion/SDLBCompletionEngineSpec.scala index 8530426..4512304 100644 --- a/src/test/scala/io/smartdatalake/completion/SDLBCompletionEngineSpec.scala +++ b/src/test/scala/io/smartdatalake/completion/SDLBCompletionEngineSpec.scala @@ -9,17 +9,25 @@ import scala.util.Using class SDLBCompletionEngineSpec extends UnitSpec { + private val context = SDLBContext.fromText(loadFile("fixture/hocon/with-lists-example.conf")) + "SDLB Completion engine" should "retrieve all the properties of copyAction" in { val context = SDLBContext.fromText(loadFile("fixture/hocon/with-multi-lines-flattened-example.conf")) .withCaretPosition(16, 0) completionEngine.generateCompletionItems(context) should have size 12 } - it should "do something" in { //TODO either rename or change. Or remove it. - val context = SDLBContext.fromText(loadFile("fixture/hocon/with-lists-example.conf")) + it should "generate completion items inside a specific action" in { completionEngine.generateCompletionItems(context.withCaretPosition(3, 0)) should have size 9 + } + + it should "generate completion items within a specific transformer" in { completionEngine.generateCompletionItems(context.withCaretPosition(7, 0)) should have size 4 } + + it should "generate completion items after a type" in { + //TODO implement feature first :) + } } diff --git a/src/test/scala/io/smartdatalake/context/hocon/HoconParserSpec.scala b/src/test/scala/io/smartdatalake/context/hocon/HoconParserSpec.scala index 62ced43..a4436cf 100644 --- a/src/test/scala/io/smartdatalake/context/hocon/HoconParserSpec.scala +++ b/src/test/scala/io/smartdatalake/context/hocon/HoconParserSpec.scala @@ -145,7 +145,7 @@ class HoconParserSpec extends UnitSpec { } - it should "find path in file with lists" in { //TODO test nested lists + it should "find path in file with lists" in { val fixture = loadFixture("fixture/hocon/with-lists-example.conf") val leftCaretData = List( @@ -265,9 +265,8 @@ class HoconParserSpec extends UnitSpec { HoconParser.retrieveWordAtPosition(text, 4, 0) shouldBe "global" HoconParser.retrieveWordAtPosition(text, 4, 6) shouldBe "global" HoconParser.retrieveWordAtPosition(text, 4, 7) shouldBe "{" - HoconParser.retrieveWordAtPosition(text, 6, 7) shouldBe "#\"spark.sql.shuffle.partitions\"" //TODO its a comment line, disable? - HoconParser.retrieveWordAtPosition(text, 7, 7) shouldBe "\"spark.sql.shuffle.partitions\"" //TODO its a string value, disable? - + HoconParser.retrieveWordAtPosition(text, 6, 7) shouldBe "#\"spark.sql.shuffle.partitions\"" // Which means hovering works with commented code + HoconParser.retrieveWordAtPosition(text, 7, 7) shouldBe "\"spark.sql.shuffle.partitions\"" } it should "transform line column position to absolute position" in { diff --git a/src/test/scala/io/smartdatalake/schema/SchemaReaderSpec.scala b/src/test/scala/io/smartdatalake/schema/SchemaReaderSpec.scala index 42f9c9c..57a7bf0 100644 --- a/src/test/scala/io/smartdatalake/schema/SchemaReaderSpec.scala +++ b/src/test/scala/io/smartdatalake/schema/SchemaReaderSpec.scala @@ -81,13 +81,6 @@ class SchemaReaderSpec extends UnitSpec { println(context.parentPath.appended(context.word)) println(schemaReader.retrieveDescription(context)) } - - it should "debug" in { - val contextText = - "\nactions {\n\n join-departures-airports {\n type \u003d CustomDataFrameAction\n \n inputIds \u003d [stg-departures, int-airports, dataobjectsexporterdataobject_PLACEHOLDER]\n transformer \u003d {\n\n code \u003d {\n btl-connected-airports \u003d \"select stg_departures.estdepartureairport, stg_departures.estarrivalairport, airports.* from stg_departures join int_airports airports on stg_departures.estArrivalAirport \u003d airports.ident\"\n }\n }\n }\n\n compute-distances {\n type \u003d CopyAction\n transformers \u003d [\n {\n\t\ttype \u003d PythonCodeDfTransformer\n\t},\n {\n\t\ttype \u003d BlacklistTransformer\n description \u003d \"???\"\n\t\tcolumnBlacklist \u003d []\n\t}\n\n ]\n executionMode {\n \n\t\ttype \u003d DataObjectStateIncrementalMode\n \n }\n inputId \u003d \"???\"\n code \u003d {\n btl-departures-arrivals-airports \u003d \"select btl_connected_airports.estdepartureairport, btl_connected_airports.estarrivalairport, btl_connected_airports.name as arr_name, btl_connected_airports.latitude_deg as arr_latitude_deg, btl_connected_airports.longitude_deg as arr_longitude_deg, airports.name as dep_name, airports.latitude_deg as dep_latitude_deg, airports.longitude_deg as dep_longitude_deg from btl_connected_airports join int_airports airports on btl_connected_airports.estdepartureairport \u003d airports.ident\"\n }\n metadata {\n feed \u003d compute\n }\n }\n\n historizeaction_PLACEHOLDER {\n\t\ttype \u003d HistorizeAction\n\t\tinputId \u003d \"???\"\n\t\toutputId \u003d \"???\"\n\t}\n\n download-airports {\n \n inputId \u003d ext-airports\n }\n \n}\n\ndataObjects {\n dataobjectsexporterdataobject_PLACEHOLDER {\n type \u003d DataObjectsExporterDataObject\n }\n\n csvfiledataobject_PLACEHOLDER {\n type \u003d CsvFileDataObject\n path \u003d \"???\"\n }\n\n}\n\n\n" - val context = SDLBContext.fromText(contextText).withCaretPosition(7, 22) //or 52 - println(context) - } //TODO add tests for description } diff --git a/src/test/scala/io/smartdatalake/utils/MultiLineTransformerSpec.scala b/src/test/scala/io/smartdatalake/utils/MultiLineTransformerSpec.scala index 4ef5e8b..3bf482e 100644 --- a/src/test/scala/io/smartdatalake/utils/MultiLineTransformerSpec.scala +++ b/src/test/scala/io/smartdatalake/utils/MultiLineTransformerSpec.scala @@ -9,7 +9,7 @@ import scala.util.Using class MultiLineTransformerSpec extends UnitSpec { - private val text: String = loadFile("fixture/hocon/with-multi-lines-example.conf") //TODO add a fixture with mix-in flattened triple quotes and not + private val text: String = loadFile("fixture/hocon/with-multi-lines-example.conf") "Multi line transformer" should "correctly flatten multi lines" in {