Skip to content

Commit f58ccd8

Browse files
committed
splain plugin
+jvmopts scalafmt
1 parent 75fc432 commit f58ccd8

File tree

159 files changed

+10375
-6249
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

159 files changed

+10375
-6249
lines changed

.jvmopts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
-Xmx4G

build.sbt

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ val Scala213 = "2.13.12"
1818
ThisBuild / tlBaseVersion := "0.15"
1919

2020
ThisBuild / crossScalaVersions := Seq(Scala213, Scala212)
21-
ThisBuild / scalaVersion := Scala212
21+
ThisBuild / scalaVersion := Scala213
2222

2323
lazy val root = project
2424
.in(file("."))
@@ -315,6 +315,21 @@ lazy val framelessSettings = Seq(
315315
mimaPreviousArtifacts ~= {
316316
_.filterNot(_.revision == "0.11.0") // didn't release properly
317317
},
318+
//
319+
// addCompilerPlugin(
320+
// "io.tryp" % "splain" % "1.1.0-RC0" cross CrossVersion.patch
321+
// ),
322+
// scalacOptions ++= Seq(
323+
// "-Vimplicits",
324+
// "-Vimplicits-verbose-tree",
325+
// "-Vtype-diffs",
326+
// "-P:splain:Vimplicits-diverging",
327+
// "-P:splain:Vtype-detail:4",
328+
// "-P:splain:Vtype-diffs-detail:3"
329+
// // "-P:splain:Vdebug"
330+
// ),
331+
//
332+
318333
/**
319334
* The old Scala XML is pulled from Scala 2.12.x.
320335
*

cats/src/main/scala/frameless/cats/FramelessSyntax.scala

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,25 @@ import _root_.cats.mtl.Ask
77
import org.apache.spark.sql.SparkSession
88

99
trait FramelessSyntax extends frameless.FramelessSyntax {
10-
implicit class SparkJobOps[F[_], A](fa: F[A])(implicit S: Sync[F], A: Ask[F, SparkSession]) {
10+
11+
implicit class SparkJobOps[F[_], A](
12+
fa: F[A]
13+
)(implicit
14+
S: Sync[F],
15+
A: Ask[F, SparkSession]) {
1116
import S._, A._
1217

1318
def withLocalProperty(key: String, value: String): F[A] =
1419
for {
1520
session <- ask
16-
_ <- delay(session.sparkContext.setLocalProperty(key, value))
17-
a <- fa
21+
_ <- delay(session.sparkContext.setLocalProperty(key, value))
22+
a <- fa
1823
} yield a
1924

20-
def withGroupId(groupId: String): F[A] = withLocalProperty("spark.jobGroup.id", groupId)
25+
def withGroupId(groupId: String): F[A] =
26+
withLocalProperty("spark.jobGroup.id", groupId)
2127

22-
def withDescription(description: String): F[A] = withLocalProperty("spark.job.description", description)
28+
def withDescription(description: String): F[A] =
29+
withLocalProperty("spark.job.description", description)
2330
}
2431
}

cats/src/main/scala/frameless/cats/SparkDelayInstances.scala

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,16 @@ import _root_.cats.effect.Sync
55
import org.apache.spark.sql.SparkSession
66

77
trait SparkDelayInstances {
8-
implicit def framelessCatsSparkDelayForSync[F[_]](implicit S: Sync[F]): SparkDelay[F] = new SparkDelay[F] {
9-
def delay[A](a: => A)(implicit spark: SparkSession): F[A] = S.delay(a)
8+
9+
implicit def framelessCatsSparkDelayForSync[F[_]](
10+
implicit
11+
S: Sync[F]
12+
): SparkDelay[F] = new SparkDelay[F] {
13+
14+
def delay[A](
15+
a: => A
16+
)(implicit
17+
spark: SparkSession
18+
): F[A] = S.delay(a)
1019
}
1120
}

cats/src/main/scala/frameless/cats/SparkTask.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import _root_.cats.data.Kleisli
66
import org.apache.spark.SparkContext
77

88
object SparkTask {
9+
910
def apply[A](f: SparkContext => A): SparkTask[A] =
1011
Kleisli[Id, SparkContext, A](f)
1112

cats/src/main/scala/frameless/cats/implicits.scala

Lines changed: 61 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,73 +2,119 @@ package frameless
22
package cats
33

44
import _root_.cats._
5-
import _root_.cats.kernel.{CommutativeMonoid, CommutativeSemigroup}
5+
import _root_.cats.kernel.{ CommutativeMonoid, CommutativeSemigroup }
66
import _root_.cats.syntax.all._
77
import alleycats.Empty
88

99
import scala.reflect.ClassTag
1010
import org.apache.spark.rdd.RDD
1111

1212
object implicits extends FramelessSyntax with SparkDelayInstances {
13+
1314
implicit class rddOps[A: ClassTag](lhs: RDD[A]) {
14-
def csum(implicit m: CommutativeMonoid[A]): A =
15+
16+
def csum(
17+
implicit
18+
m: CommutativeMonoid[A]
19+
): A =
1520
lhs.fold(m.empty)(_ |+| _)
16-
def csumOption(implicit m: CommutativeSemigroup[A]): Option[A] =
21+
22+
def csumOption(
23+
implicit
24+
m: CommutativeSemigroup[A]
25+
): Option[A] =
1726
lhs.aggregate[Option[A]](None)(
1827
(acc, a) => Some(acc.fold(a)(_ |+| a)),
1928
(l, r) => l.fold(r)(x => r.map(_ |+| x) orElse Some(x))
2029
)
2130

22-
def cmin(implicit o: Order[A], e: Empty[A]): A = {
31+
def cmin(implicit
32+
o: Order[A],
33+
e: Empty[A]
34+
): A = {
2335
if (lhs.isEmpty()) e.empty
2436
else lhs.reduce(_ min _)
2537
}
26-
def cminOption(implicit o: Order[A]): Option[A] =
38+
39+
def cminOption(
40+
implicit
41+
o: Order[A]
42+
): Option[A] =
2743
csumOption(new CommutativeSemigroup[A] {
2844
def combine(l: A, r: A) = l min r
2945
})
3046

31-
def cmax(implicit o: Order[A], e: Empty[A]): A = {
47+
def cmax(implicit
48+
o: Order[A],
49+
e: Empty[A]
50+
): A = {
3251
if (lhs.isEmpty()) e.empty
3352
else lhs.reduce(_ max _)
3453
}
35-
def cmaxOption(implicit o: Order[A]): Option[A] =
54+
55+
def cmaxOption(
56+
implicit
57+
o: Order[A]
58+
): Option[A] =
3659
csumOption(new CommutativeSemigroup[A] {
3760
def combine(l: A, r: A) = l max r
3861
})
3962
}
4063

4164
implicit class pairRddOps[K: ClassTag, V: ClassTag](lhs: RDD[(K, V)]) {
42-
def csumByKey(implicit m: CommutativeSemigroup[V]): RDD[(K, V)] = lhs.reduceByKey(_ |+| _)
43-
def cminByKey(implicit o: Order[V]): RDD[(K, V)] = lhs.reduceByKey(_ min _)
44-
def cmaxByKey(implicit o: Order[V]): RDD[(K, V)] = lhs.reduceByKey(_ max _)
65+
66+
def csumByKey(
67+
implicit
68+
m: CommutativeSemigroup[V]
69+
): RDD[(K, V)] = lhs.reduceByKey(_ |+| _)
70+
71+
def cminByKey(
72+
implicit
73+
o: Order[V]
74+
): RDD[(K, V)] = lhs.reduceByKey(_ min _)
75+
76+
def cmaxByKey(
77+
implicit
78+
o: Order[V]
79+
): RDD[(K, V)] = lhs.reduceByKey(_ max _)
4580
}
4681
}
4782

4883
object union {
84+
4985
implicit def unionSemigroup[A]: Semigroup[RDD[A]] =
5086
new Semigroup[RDD[A]] {
5187
def combine(lhs: RDD[A], rhs: RDD[A]): RDD[A] = lhs union rhs
5288
}
5389
}
5490

5591
object inner {
56-
implicit def pairwiseInnerSemigroup[K: ClassTag, V: ClassTag: Semigroup]: Semigroup[RDD[(K, V)]] =
92+
93+
implicit def pairwiseInnerSemigroup[
94+
K: ClassTag,
95+
V: ClassTag: Semigroup
96+
]: Semigroup[RDD[(K, V)]] =
5797
new Semigroup[RDD[(K, V)]] {
98+
5899
def combine(lhs: RDD[(K, V)], rhs: RDD[(K, V)]): RDD[(K, V)] =
59100
lhs.join(rhs).mapValues { case (x, y) => x |+| y }
60101
}
61102
}
62103

63104
object outer {
64-
implicit def pairwiseOuterSemigroup[K: ClassTag, V: ClassTag](implicit m: Monoid[V]): Semigroup[RDD[(K, V)]] =
105+
106+
implicit def pairwiseOuterSemigroup[K: ClassTag, V: ClassTag](
107+
implicit
108+
m: Monoid[V]
109+
): Semigroup[RDD[(K, V)]] =
65110
new Semigroup[RDD[(K, V)]] {
111+
66112
def combine(lhs: RDD[(K, V)], rhs: RDD[(K, V)]): RDD[(K, V)] =
67113
lhs.fullOuterJoin(rhs).mapValues {
68114
case (Some(x), Some(y)) => x |+| y
69-
case (None, Some(y)) => y
70-
case (Some(x), None) => x
71-
case (None, None) => m.empty
115+
case (None, Some(y)) => y
116+
case (Some(x), None) => x
117+
case (None, None) => m.empty
72118
}
73119
}
74120
}

cats/src/test/scala/frameless/cats/FramelessSyntaxTests.scala

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,18 @@ import _root_.cats.effect.IO
66
import _root_.cats.effect.unsafe.implicits.global
77
import org.apache.spark.sql.SparkSession
88
import org.scalatest.matchers.should.Matchers
9-
import org.scalacheck.{Test => PTest}
9+
import org.scalacheck.{ Test => PTest }
1010
import org.scalacheck.Prop, Prop._
1111
import org.scalacheck.effect.PropF, PropF._
1212

1313
class FramelessSyntaxTests extends TypedDatasetSuite with Matchers {
1414
override val sparkDelay = null
1515

16-
def prop[A, B](data: Vector[X2[A, B]])(
17-
implicit ev: TypedEncoder[X2[A, B]]
18-
): Prop = {
16+
def prop[A, B](
17+
data: Vector[X2[A, B]]
18+
)(implicit
19+
ev: TypedEncoder[X2[A, B]]
20+
): Prop = {
1921
import implicits._
2022

2123
val dataset = TypedDataset.create(data).dataset
@@ -24,7 +26,13 @@ class FramelessSyntaxTests extends TypedDatasetSuite with Matchers {
2426
val typedDataset = dataset.typed
2527
val typedDatasetFromDataFrame = dataframe.unsafeTyped[X2[A, B]]
2628

27-
typedDataset.collect[IO]().unsafeRunSync().toVector ?= typedDatasetFromDataFrame.collect[IO]().unsafeRunSync().toVector
29+
typedDataset
30+
.collect[IO]()
31+
.unsafeRunSync()
32+
.toVector ?= typedDatasetFromDataFrame
33+
.collect[IO]()
34+
.unsafeRunSync()
35+
.toVector
2836
}
2937

3038
test("dataset typed - toTyped") {
@@ -37,8 +45,7 @@ class FramelessSyntaxTests extends TypedDatasetSuite with Matchers {
3745

3846
forAllF { (k: String, v: String) =>
3947
val scopedKey = "frameless.tests." + k
40-
1
41-
.pure[ReaderT[IO, SparkSession, *]]
48+
1.pure[ReaderT[IO, SparkSession, *]]
4249
.withLocalProperty(scopedKey, v)
4350
.withGroupId(v)
4451
.withDescription(v)
@@ -47,7 +54,8 @@ class FramelessSyntaxTests extends TypedDatasetSuite with Matchers {
4754
sc.getLocalProperty(scopedKey) shouldBe v
4855
sc.getLocalProperty("spark.jobGroup.id") shouldBe v
4956
sc.getLocalProperty("spark.job.description") shouldBe v
50-
}.void
57+
}
58+
.void
5159
}.check().unsafeRunSync().status shouldBe PTest.Passed
5260
}
5361
}

0 commit comments

Comments
 (0)