Skip to content
This repository has been archived by the owner on Apr 15, 2018. It is now read-only.

Commit

Permalink
Upgrade dependencies and plugins (#157)
Browse files Browse the repository at this point in the history
  • Loading branch information
hseeberger authored Apr 7, 2017
1 parent 1742a0f commit 6bf4b89
Show file tree
Hide file tree
Showing 12 changed files with 106 additions and 113 deletions.
2 changes: 2 additions & 0 deletions .scalafmt.conf
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ style = defaultWithAlign

danglingParentheses = true
indentOperator = spray
maxColumn = 100
project.excludeFilters = [".*\\.sbt"]
rewrite.rules = [RedundantBraces, RedundantParens, SortImports]
spaces.inImportCurlyBraces = true
unindentTopLevelOperators = true
48 changes: 10 additions & 38 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,11 @@ lazy val library =
new {
object Version {
final val akka = "2.4.17"
final val akkaHttp = "10.0.3"
final val akkaHttp = "10.0.5"
final val akkaLog4j = "1.3.0"
final val circe = "0.7.0"
final val log4j = "2.8"
final val mockito = "2.6.8"
final val circe = "0.7.1"
final val log4j = "2.8.2"
final val mockito = "2.7.22"
final val scalaTest = "3.0.1"
}
val akkaActor = "com.typesafe.akka" %% "akka-actor" % Version.akka
Expand All @@ -94,7 +94,6 @@ lazy val library =

lazy val settings =
commonSettings ++
scalafmtSettings ++
gitSettings ++
headerSettings ++
sonatypeSettings ++
Expand All @@ -107,35 +106,18 @@ lazy val commonSettings =
organization := "de.heikoseeberger",
licenses += ("Apache 2.0",
url("http://www.apache.org/licenses/LICENSE-2.0")),
mappings.in(Compile, packageBin) +=
baseDirectory.in(ThisBuild).value / "LICENSE" -> "LICENSE",
mappings.in(Compile, packageBin) += baseDirectory.in(ThisBuild).value / "LICENSE" -> "LICENSE",
scalacOptions ++= Seq(
"-unchecked",
"-deprecation",
"-language:_",
"-target:jvm-1.8",
"-encoding", "UTF-8"
),
javacOptions ++= Seq(
"-source", "1.8",
"-target", "1.8"
),
unmanagedSourceDirectories.in(Compile) :=
Seq(scalaSource.in(Compile).value),
unmanagedSourceDirectories.in(Test) :=
Seq(scalaSource.in(Test).value)
unmanagedSourceDirectories.in(Compile) := Seq(scalaSource.in(Compile).value),
unmanagedSourceDirectories.in(Test) := Seq(scalaSource.in(Test).value)
)

lazy val scalafmtSettings =
reformatOnCompileSettings ++
Seq(
formatSbtFiles := false,
scalafmtConfig :=
Some(baseDirectory.in(ThisBuild).value / ".scalafmt.conf"),
ivyScala :=
ivyScala.value.map(_.copy(overrideScalaVersion = sbtPlugin.value)) // TODO Remove once this workaround no longer needed (https://github.com/sbt/sbt/issues/2786)!
)

lazy val gitSettings =
Seq(
git.useGitDescribe := true
Expand Down Expand Up @@ -164,21 +146,11 @@ lazy val bintraySettings =
bintrayPackage := "constructr"
)

import ScalaFmtPlugin.configScalafmtSettings
lazy val multiJvmSettings =
automateScalafmtFor(MultiJvm) ++
AutomateHeaderPlugin.automateFor(Compile, Test, MultiJvm) ++
HeaderPlugin.settingsFor(Compile, Test, MultiJvm) ++
inConfig(MultiJvm)(configScalafmtSettings) ++
Seq(
unmanagedSourceDirectories.in(MultiJvm) :=
Seq(scalaSource.in(MultiJvm).value),
test.in(Test) := {
val testValue = test.in(Test).value
test.in(MultiJvm).value
testValue
},
compileInputs.in(MultiJvm, compile) := {
val scalafmtValue = scalafmt.in(MultiJvm).value
compileInputs.in(MultiJvm, compile).value
}
unmanagedSourceDirectories.in(MultiJvm) := Seq(scalaSource.in(MultiJvm).value),
test.in(Test) := test.in(MultiJvm).dependsOn(test.in(Test)).value
)
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,8 @@ import akka.Done
import akka.actor.{ ActorSystem, Address, AddressFromURIString }
import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding.{ Get, Put }
import akka.http.scaladsl.model.StatusCodes.{
Created,
NotFound,
OK,
PreconditionFailed
}
import akka.http.scaladsl.model.{
HttpRequest,
HttpResponse,
ResponseEntity,
StatusCode,
Uri
}
import akka.http.scaladsl.model.StatusCodes.{ Created, NotFound, OK, PreconditionFailed }
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, ResponseEntity, StatusCode, Uri }
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
Expand All @@ -54,8 +43,7 @@ object EtcdCoordination {
private def toSeconds(duration: Duration) = (duration.toSeconds + 1).toString
}

final class EtcdCoordination(clusterName: String, system: ActorSystem)
extends Coordination {
final class EtcdCoordination(clusterName: String, system: ActorSystem) extends Coordination {
import EtcdCoordination._

private implicit val mat = ActorMaterializer()(system)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@ object EtcdCoordinationSpec {
}
}

class EtcdCoordinationSpec
extends WordSpec
with Matchers
with BeforeAndAfterAll {
class EtcdCoordinationSpec extends WordSpec with Matchers with BeforeAndAfterAll {
import EtcdCoordinationSpec._

private implicit val system = {
Expand Down Expand Up @@ -83,8 +80,7 @@ class EtcdCoordinationSpec
super.afterAll()
}

private def resultOf[A](awaitable: Awaitable[A],
max: FiniteDuration = 3.seconds.dilated) =
private def resultOf[A](awaitable: Awaitable[A], max: FiniteDuration = 3.seconds.dilated) =
Await.result(awaitable, max)

private def randomString() = math.abs(Random.nextInt).toString
Expand Down
16 changes: 2 additions & 14 deletions core/src/main/scala/de/heikoseeberger/constructr/Constructr.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,9 @@

package de.heikoseeberger.constructr

import akka.actor.{
Actor,
ActorLogging,
ActorRef,
Props,
SupervisorStrategy,
Terminated
}
import akka.actor.{ Actor, ActorLogging, ActorRef, Props, SupervisorStrategy, Terminated }
import akka.cluster.{ Cluster, Member }
import akka.cluster.ClusterEvent.{
InitialStateAsEvents,
MemberExited,
MemberLeft,
MemberRemoved
}
import akka.cluster.ClusterEvent.{ InitialStateAsEvents, MemberExited, MemberLeft, MemberRemoved }
import akka.cluster.MemberStatus.Up
import de.heikoseeberger.constructr.coordination.Coordination
import scala.concurrent.duration.{ FiniteDuration, NANOSECONDS }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ import akka.actor.{ ExtendedActorSystem, Extension, ExtensionKey }

object ConstructrExtension extends ExtensionKey[ConstructrExtension]

final class ConstructrExtension private (system: ExtendedActorSystem)
extends Extension {
final class ConstructrExtension private (system: ExtendedActorSystem) extends Extension {
system.systemActorOf(Constructr.props, Constructr.Name)
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,7 @@ import akka.Done
import akka.actor.FSM.Failure
import akka.actor.{ Address, FSM, Props, Status }
import akka.cluster.Cluster
import akka.cluster.ClusterEvent.{
InitialStateAsEvents,
MemberJoined,
MemberUp
}
import akka.cluster.ClusterEvent.{ InitialStateAsEvents, MemberJoined, MemberUp }
import akka.pattern.pipe
import akka.stream.ActorMaterializer
import de.heikoseeberger.constructr.coordination.Coordination
Expand Down Expand Up @@ -52,9 +48,7 @@ object ConstructrMachine {
case object RetryScheduled extends State
}

final case class Data(nodes: Set[Address],
retryState: State,
nrOfRetriesLeft: Int)
final case class Data(nodes: Set[Address], retryState: State, nrOfRetriesLeft: Int)

final case class StateTimeoutException(state: State)
extends RuntimeException(s"State timeout triggered in state $state!")
Expand Down Expand Up @@ -111,8 +105,7 @@ final class ConstructrMachine(
private implicit val mat = ActorMaterializer()
private val cluster = Cluster(context.system)

startWith(State.GettingNodes,
Data(Set.empty, State.GettingNodes, nrOfRetries))
startWith(State.GettingNodes, Data(Set.empty, State.GettingNodes, nrOfRetries))

// Getting nodes

Expand Down Expand Up @@ -298,8 +291,7 @@ final class ConstructrMachine(
stop(FSM.Failure(s"Number of retries exhausted in $stateName!"))
else
goto(State.RetryScheduled).using(
stateData.copy(retryState = retryState,
nrOfRetriesLeft = stateData.nrOfRetriesLeft - 1)
stateData.copy(retryState = retryState, nrOfRetriesLeft = stateData.nrOfRetriesLeft - 1)
)

private def maxCoordinationTimeout =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,7 @@ object ConstructrMultiNodeConfig {
}
}

class ConstructrMultiNodeConfig(coordinationPort: Int)
extends MultiNodeConfig {
class ConstructrMultiNodeConfig(coordinationPort: Int) extends MultiNodeConfig {
import ConstructrMultiNodeConfig._

commonConfig(ConfigFactory.load())
Expand Down Expand Up @@ -109,20 +108,17 @@ abstract class MultiNodeConstructrSpec(
become {
case "isMember" => sender() ! isMember

case MemberJoined(member)
if member.address == Cluster(context.system).selfAddress =>
case MemberJoined(member) if member.address == Cluster(context.system).selfAddress =>
isMember = true

case MemberUp(member)
if member.address == Cluster(context.system).selfAddress =>
case MemberUp(member) if member.address == Cluster(context.system).selfAddress =>
isMember = true
}
})
within(20.seconds.dilated) {
awaitAssert {
implicit val timeout = Timeout(1.second.dilated)
val isMember = Await.result((listener ? "isMember").mapTo[Boolean],
1.second.dilated)
val isMember = Await.result((listener ? "isMember").mapTo[Boolean], 1.second.dilated)
isMember shouldBe true
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,11 @@ import io.circe.Json
import io.circe.parser.parse
import java.util.Base64

class MultiNodeEtcdConstructrSpecMultiJvmNode1
extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode2
extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode3
extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode4
extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode5
extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode1 extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode2 extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode3 extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode4 extends MultiNodeEtcdConstructrSpec
class MultiNodeEtcdConstructrSpecMultiJvmNode5 extends MultiNodeEtcdConstructrSpec

object MultiNodeEtcdConstructrSpec {
def toNodes(s: String): Set[Address] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@ import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }
import scala.concurrent.duration.{ Duration, DurationInt }
import scala.concurrent.{ Await, Future }

final class ConstructrMachineSpec
extends WordSpec
with Matchers
with BeforeAndAfterAll {
final class ConstructrMachineSpec extends WordSpec with Matchers with BeforeAndAfterAll {
import ConstructrMachine._
import Mockito._

Expand Down
66 changes: 66 additions & 0 deletions project/AutomateScalafmtPlugin.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import org.scalafmt.bootstrap.ScalafmtBootstrap
import org.scalafmt.sbt.ScalafmtPlugin
import sbt._
import sbt.Keys._
import sbt.inc.Analysis

object AutomateScalafmtPlugin extends AutoPlugin {

object autoImport {
def automateScalafmtFor(configurations: Configuration*): Seq[Setting[_]] =
configurations.flatMap { c =>
inConfig(c)(
Seq(
compileInputs.in(compile) := {
scalafmtInc.value
compileInputs.in(compile).value
},
sourceDirectories.in(scalafmtInc) := Seq(scalaSource.value),
scalafmtInc := {
val cache = streams.value.cacheDirectory / "scalafmt"
val include = includeFilter.in(scalafmtInc).value
val exclude = excludeFilter.in(scalafmtInc).value
val sources =
sourceDirectories
.in(scalafmtInc)
.value
.descendantsExcept(include, exclude)
.get
.toSet
def format(handler: Set[File] => Unit, msg: String) = {
def update(handler: Set[File] => Unit, msg: String)(in: ChangeReport[File],
out: ChangeReport[File]) = {
val label = Reference.display(thisProjectRef.value)
val files = in.modified -- in.removed
Analysis
.counted("Scala source", "", "s", files.size)
.foreach(count => streams.value.log.info(s"$msg $count in $label ..."))
handler(files)
files
}
FileFunction.cached(cache)(FilesInfo.hash, FilesInfo.exists)(update(handler, msg))(
sources
)
}
def formattingHandler(files: Set[File]) =
if (files.nonEmpty) {
val filesArg = files.map(_.getAbsolutePath).mkString(",")
ScalafmtBootstrap.main(List("--quiet", "-i", "-f", filesArg))
}
format(formattingHandler, "Formatting")
format(_ => (), "Reformatted") // Recalculate the cache
}
)
)
}
}

private val scalafmtInc = taskKey[Unit]("Incrementally format modified sources")

override def requires = ScalafmtPlugin

override def trigger = allRequirements

override def projectSettings =
(includeFilter.in(scalafmtInc) := "*.scala") +: autoImport.automateScalafmtFor(Compile, Test)
}
10 changes: 6 additions & 4 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
addSbtPlugin("com.dwijnand" % "sbt-travisci" % "1.0.0")
addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "0.4.10")
addSbtPlugin("com.dwijnand" % "sbt-travisci" % "1.1.0")
addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "0.6.6")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.8.5")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.9.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.3.11")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "1.6.0")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "1.8.0")
addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")

libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.25" // Needed by sbt-git

0 comments on commit 6bf4b89

Please sign in to comment.