@@ -52,9 +52,11 @@ lazy val cobrix = (project in file("."))
5252 .disablePlugins(sbtassembly.AssemblyPlugin )
5353 .settings(
5454 name := " cobrix" ,
55+ crossScalaVersions := List (scala211, scala212, scala213),
5556
5657 // No need to publish the aggregation [empty] artifact
5758 publishArtifact := false ,
59+ publish / skip := true ,
5860 publish := {},
5961 publishLocal := {}
6062 )
@@ -65,13 +67,13 @@ lazy val cobolParser = (project in file("cobol-parser"))
6567 .enablePlugins(AutomateHeaderPlugin )
6668 .settings(
6769 name := " cobol-parser" ,
70+ crossScalaVersions := List (scala211, scala212, scala213),
6871 libraryDependencies ++= CobolParserDependencies :+ getScalaDependency(scalaVersion.value),
6972 shadedDependencies ++= CobolParserShadedDependencies ,
7073 shadingRules ++= Seq (
7174 ShadingRule .moveUnder(" org.antlr.v4.runtime" , " za.co.absa.cobrix.cobol.parser.shaded" )
7275 ),
7376 validNamespaces ++= Set (" za" ),
74- releasePublishArtifactsAction := PgpKeys .publishSigned.value,
7577 assemblySettings,
7678 jacocoReportSettings := commonJacocoReportSettings.withTitle(" cobrix:cobol-parser Jacoco Report" ),
7779 jacocoExcludes := commonJacocoExcludes
@@ -83,16 +85,19 @@ lazy val cobolConverters = (project in file("cobol-converters"))
8385 .enablePlugins(AutomateHeaderPlugin )
8486 .settings(
8587 name := " cobol-converters" ,
88+ crossScalaVersions := List (scala211, scala212, scala213),
8689 libraryDependencies ++= CobolConvertersDependencies :+ getScalaDependency(scalaVersion.value),
8790 // No need to publish this artifact since it has test only at the moment
8891 publishArtifact := false ,
92+ publish / skip := true ,
8993 publish := {},
9094 publishLocal := {}
9195 )
9296
9397lazy val sparkCobol = (project in file(" spark-cobol" ))
9498 .settings(
9599 name := " spark-cobol" ,
100+ crossScalaVersions := List (scala211, scala212, scala213),
96101 printSparkVersion := {
97102 val log = streams.value.log
98103 log.info(s " Building with Spark ${sparkVersion(scalaVersion.value)}, Scala ${scalaVersion.value}" )
@@ -111,7 +116,6 @@ lazy val sparkCobol = (project in file("spark-cobol"))
111116 libraryDependencies ++= SparkCobolDependencies (scalaVersion.value) :+ getScalaDependency(scalaVersion.value),
112117 Test / fork := true , // Spark tests fail randomly otherwise
113118 populateBuildInfoTemplate,
114- releasePublishArtifactsAction := PgpKeys .publishSigned.value,
115119 assemblySettings
116120 ).dependsOn(cobolParser)
117121 .settings(
@@ -124,10 +128,6 @@ lazy val sparkCobol = (project in file("spark-cobol"))
124128ThisBuild / coverageExcludedPackages := " .*examples.*;.*replication.*"
125129ThisBuild / coverageExcludedFiles := " .*Example.*;Test.*"
126130
127- // release settings
128- releaseCrossBuild := true
129- addCommandAlias(" releaseNow" , " ;set releaseVersionBump := sbtrelease.Version.Bump.Bugfix; release with-defaults" )
130-
131131lazy val assemblySettings = Seq (
132132 // This merge strategy retains service entries for all services in manifest.
133133 // It allows custom Spark data sources to be used together, e.g. 'spark-xml' and 'spark-cobol'.
0 commit comments