Skip to content

Commit b0f9b89

Browse files
authored
Merge pull request #6 from CodelyTV/update/scalafmt-core-3.8.3
Update scalafmt-core to 3.8.3
2 parents de26042 + 90f8bdb commit b0f9b89

File tree

3 files changed

+10
-14
lines changed

3 files changed

+10
-14
lines changed

.scalafmt.conf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
version = 3.8.2
1+
version = 3.8.3
22
runner.dialect = scala213
33
style = default
44
maxColumn = 120

src/main/g8/project/Dependencies.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,11 @@ import sbt._
22

33
object Dependencies {
44
private val prod = Seq(
5-
"com.github.nscala-time" %% "nscala-time" % "$nscala-time_version$",
6-
"com.lihaoyi" %% "pprint" % "$pprint_version$",
7-
"org.apache.spark" %% "spark-core" % "$spark_version$" % Provided,
8-
"org.apache.spark" %% "spark-sql" % "$spark_version$" % Provided,
9-
"org.apache.spark" %% "spark-streaming" % "$spark_version$" % Provided
5+
"com.github.nscala-time" %% "nscala-time" % "$nscala-time_version$",
6+
"com.lihaoyi" %% "pprint" % "$pprint_version$",
7+
"org.apache.spark" %% "spark-core" % "$spark_version$" % Provided,
8+
"org.apache.spark" %% "spark-sql" % "$spark_version$" % Provided,
9+
"org.apache.spark" %% "spark-streaming" % "$spark_version$" % Provided
1010
)
1111
private val test = Seq(
1212
"org.scalatest" %% "scalatest" % "$scalatest_version$",

src/main/g8/src/test/$package$/SparkTestHelper.scala

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,18 +11,14 @@ import java.io.File
1111
import java.nio.file.Files
1212
import scala.reflect.io.Directory
1313

14-
trait SparkTestHelper
15-
extends AnyWordSpec
16-
with BeforeAndAfterEach
17-
with BeforeAndAfterAll
18-
with Matchers {
14+
trait SparkTestHelper extends AnyWordSpec with BeforeAndAfterEach with BeforeAndAfterAll with Matchers {
1915

2016
private val sparkSession = SparkSession
2117
.builder()
2218
.master("local[*]")
2319
.appName("test-spark-session")
2420
.config(sparkConfiguration)
25-
//.enableHiveSupport() uncomment this if you want to use Hive
21+
// .enableHiveSupport() uncomment this if you want to use Hive
2622
.getOrCreate()
2723

2824
protected var tempDir: String = _
@@ -33,14 +29,14 @@ trait SparkTestHelper
3329

3430
protected def sparkConfiguration: SparkConf =
3531
new SparkConf()
36-
/* Uncomment this if you want to use Delta Lake
32+
/* Uncomment this if you want to use Delta Lake
3733
3834
.set("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
3935
.set(
4036
"spark.sql.catalog.spark_catalog",
4137
"org.apache.spark.sql.delta.catalog.DeltaCatalog"
4238
)
43-
*/
39+
*/
4440

4541
override protected def beforeAll(): Unit = {
4642
super.beforeAll()

0 commit comments

Comments
 (0)