Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package coop.rchain.blockstorage

import cats.effect.{Concurrent, Sync}
import cats.effect.{Async, Sync}
import cats.syntax.all._
import coop.rchain.blockstorage.BlockStore.BlockStore
import coop.rchain.casper.PrettyPrinter
Expand Down Expand Up @@ -37,7 +37,7 @@ final class BlockStoreOps[F[_]](

def getUnsafe(
hashes: Seq[BlockHash]
)(implicit concurrent: Concurrent[F]): fs2.Stream[F, BlockMessage] = {
)(implicit concurrent: Async[F]): fs2.Stream[F, BlockMessage] = {
val streams = hashes.map(h => fs2.Stream.eval(getUnsafe(h)))
fs2.Stream
.emits(streams)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package coop.rchain.blockstorage.dag

import cats.effect.{Concurrent, Sync}
import cats.effect.{Async, Sync}
import cats.syntax.all._
import coop.rchain.casper.PrettyPrinter
import coop.rchain.casper.protocol.BlockMessage
Expand All @@ -26,7 +26,7 @@ final class BlockDagStorageOps[F[_]](

def lookupUnsafe(
hashes: Seq[BlockHash]
)(implicit concurrent: Concurrent[F]): F[List[BlockMetadata]] = {
)(implicit concurrent: Async[F]): F[List[BlockMetadata]] = {
val streams = hashes.map(h => fs2.Stream.eval(lookupUnsafe(h)))
fs2.Stream.emits(streams).parJoinUnbounded.compile.toList
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package coop.rchain.blockstorage.dag

import cats.Monad
import cats.effect.Sync
import cats.effect.concurrent.Ref
import cats.syntax.all._
import coop.rchain.casper.PrettyPrinter
import coop.rchain.models.BlockHash.BlockHash
Expand All @@ -12,6 +11,7 @@ import coop.rchain.shared.syntax._
import coop.rchain.store.KeyValueTypedStore

import scala.collection.immutable.SortedMap
import cats.effect.Ref

object BlockMetadataStore {
def apply[F[_]: Sync: Log](
Expand Down
75 changes: 50 additions & 25 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import BNFC._
import Rholang._
import NativePackagerHelper._
import com.typesafe.sbt.packager.docker._
import protocbridge.Target
//allow stopping sbt tasks using ctrl+c without killing sbt itself
Global / cancelable := true

Expand All @@ -17,8 +18,8 @@ lazy val projectSettings = Seq(
version := "0.1.0-SNAPSHOT",
resolvers ++=
Resolver.sonatypeOssRepos("releases") ++
Resolver.sonatypeOssRepos("snapshots") ++
Seq("jitpack" at "https://jitpack.io"),
Resolver.sonatypeOssRepos("snapshots") ++
Seq("jitpack" at "https://jitpack.io"),
wartremoverExcluded += sourceManaged.value,
Compile / compile / wartremoverErrors ++= Warts.allBut(
// those we want
Expand Down Expand Up @@ -53,6 +54,7 @@ lazy val projectSettings = Seq(
),
scalafmtOnCompile := !sys.env.contains("CI"), // disable in CI environments
ThisBuild / scapegoatVersion := "1.4.11",
ThisBuild / scalacOptions += "semanticdb:synthetics:on",
Test / testOptions += Tests.Argument("-oD"), //output test durations
javacOptions ++= Seq("-source", "11", "-target", "11"),
Test / fork := true,
Expand All @@ -76,7 +78,7 @@ lazy val projectSettings = Seq(
Seq(
Compile / packageDoc / publishArtifact := false,
packageDoc / publishArtifact := false,
Compile / doc / sources := Seq.empty,
Compile / doc / sources := Seq.empty
)
}

Expand Down Expand Up @@ -131,7 +133,6 @@ lazy val shared = (project in file("shared"))
catsTagless,
fs2Core,
lz4,
monix,
scodecCore,
scodecCats,
scodecBits,
Expand All @@ -141,7 +142,8 @@ lazy val shared = (project in file("shared"))
catsLawsTest,
catsLawsTestkitTest,
enumeratum,
jaxb
jaxb,
monix // remove when monix TestSheduler is replaced
)
)
.dependsOn(sdk)
Expand Down Expand Up @@ -170,7 +172,6 @@ lazy val casper = (project in file("casper"))
catsCore,
catsRetry,
catsMtl,
monix,
fs2Core,
fs2Io,
scalacheck % "slowcooker"
Expand All @@ -188,8 +189,19 @@ lazy val casper = (project in file("casper"))
)

lazy val comm = (project in file("comm"))
.enablePlugins(Fs2Grpc)
.settings(commonSettings: _*)
.settings(
scalapbCodeGeneratorOptions += CodeGeneratorOption.FlatPackage,
// it turns out that Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.head is generator that is overridden by
// StacksafeScalapbGenerator, so to resolve conflicts it is just dropped. This is found empirically, so
// might break when upgrading the version of Fs2Grpc plugin.
scalapbCodeGenerators := Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.tail :+
new Target(
coop.rchain.scalapb.gen(flatPackage = true)._1,
(Compile / sourceManaged).value,
coop.rchain.scalapb.gen(flatPackage = true)._2
),
version := "0.1",
libraryDependencies ++= commonDependencies ++ kamonDependencies ++ protobufDependencies ++ Seq(
grpcNetty,
Expand All @@ -201,12 +213,7 @@ lazy val comm = (project in file("comm"))
catsCore,
catsMtl,
catsTagless,
monix,
guava
),
Compile / PB.targets := Seq(
scalapb.gen(grpc = false) -> (Compile / sourceManaged).value,
grpcmonix.generators.gen() -> (Compile / sourceManaged).value
)
)
.dependsOn(shared % "compile->compile;test->test", crypto, models)
Expand All @@ -230,26 +237,47 @@ lazy val crypto = (project in file("crypto"))

lazy val models = (project in file("models"))
.settings(commonSettings: _*)
.enablePlugins(Fs2Grpc)
.settings(
scalapbCodeGeneratorOptions += CodeGeneratorOption.FlatPackage,
// it turns out that Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.head is generator that is overridden by
// StacksafeScalapbGenerator, so to resolve conflicts it is just dropped. This is found empirically, so
// might break when upgrading the version of Fs2Grpc plugin.
scalapbCodeGenerators := Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.tail :+
new Target(
coop.rchain.scalapb.gen(flatPackage = true)._1,
(Compile / sourceManaged).value,
coop.rchain.scalapb.gen(flatPackage = true)._2
),
libraryDependencies ++= commonDependencies ++ protobufDependencies ++ Seq(
catsCore,
magnolia,
scalapbCompiler,
scalacheck % "test",
scalacheckShapeless,
scalapbRuntimegGrpc
),
Compile / PB.targets := Seq(
coop.rchain.scalapb.gen(flatPackage = true, grpc = false) -> (Compile / sourceManaged).value,
grpcmonix.generators.gen() -> (Compile / sourceManaged).value
)
)
.dependsOn(shared % "compile->compile;test->test", rspace)

lazy val node = (project in file("node"))
.settings(commonSettings: _*)
.enablePlugins(RpmPlugin, DebianPlugin, JavaAppPackaging, BuildInfoPlugin)
.enablePlugins(RpmPlugin, DebianPlugin, JavaAppPackaging, BuildInfoPlugin, Fs2Grpc)
.settings(
scalapbCodeGeneratorOptions += CodeGeneratorOption.FlatPackage,
// it turns out that Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.head is exactly the
// generator that is overridden by StacksafeScalapbGenerator. To resolve conflicts it is just dropped.
// This is found empirically, so might break when upgrading the version of Fs2Grpc plugin.
// If both versions are generated, multiple copies of the same traits are produced leading to compilation error.
scalapbCodeGenerators := Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.tail :+
new Target(
coop.rchain.scalapb.gen(flatPackage = true)._1,
(Compile / sourceManaged).value,
coop.rchain.scalapb.gen(flatPackage = true)._2
),
// if this is not specified similar error happens https://github.com/sbt/sbt-buildinfo/issues/149
// looks like fs2 grpc plugin pipeline removes BuildInfo.scala
PB.deleteTargetDirectory := false,
version := git.gitDescribedVersion.value.getOrElse({
val v = "0.0.0-unknown"
System.err.println("Could not get version from `git describe`.")
Expand All @@ -273,12 +301,9 @@ lazy val node = (project in file("node"))
scalapbRuntimegGrpc,
circeParser,
circeGenericExtras,
pureconfig
pureconfig,
monix // remove when BatchInfluxDBReporter is adjusted to work w/o monix
),
Compile / PB.targets := Seq(
scalapb.gen(grpc = false) -> (Compile / sourceManaged).value / "protobuf",
grpcmonix.generators.gen() -> (Compile / sourceManaged).value / "protobuf"
),
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion, git.gitHeadCommit),
buildInfoPackage := "coop.rchain.node",
Compile / mainClass := Some("coop.rchain.node.Main"),
Expand Down Expand Up @@ -379,7 +404,7 @@ lazy val node = (project in file("node"))
rpmUrl := Some("https://rchain.coop"),
rpmLicense := Some("Apache 2.0"),
Rpm / packageArchitecture := "noarch",
Rpm / maintainerScripts := maintainerScriptsAppendFromFile((Rpm/maintainerScripts).value)(
Rpm / maintainerScripts := maintainerScriptsAppendFromFile((Rpm / maintainerScripts).value)(
RpmConstants.Post -> (sourceDirectory.value / "rpm" / "scriptlets" / "post")
),
rpmPrerequisites := Seq(
Expand Down Expand Up @@ -410,13 +435,12 @@ lazy val rholang = (project in file("rholang"))
"-Xfatal-warnings",
"-Xlint:_,-missing-interpolator" // disable "possible missing interpolator" warning
),
Compile / packageDoc/ publishArtifact := false,
Compile / packageDoc / publishArtifact := false,
packageDoc / publishArtifact := false,
Compile / doc / sources := Seq.empty,
libraryDependencies ++= commonDependencies ++ Seq(
catsMtl,
catsEffect,
monix,
scallop,
lightningj,
catsLawsTest,
Expand Down Expand Up @@ -481,7 +505,8 @@ lazy val rspace = (project in file("rspace"))
catsCore,
fs2Core,
scodecCore,
scodecBits
scodecBits,
monix // remove when AtomicAny migrated to Ref
),
/* Tutorial */
/* Publishing Settings */
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
package coop.rchain.casper

import cats.effect.Sync
import cats.effect.concurrent.Ref
import cats.syntax.all._
import coop.rchain.blockstorage.dag.BlockDagStorage.DeployId
import coop.rchain.rholang.interpreter.EvaluateResult
import coop.rchain.sdk.syntax.all._
import cats.effect.Ref

trait BlockExecutionTracker[F[_]] {
def execStarted(d: DeployId): F[Unit]
Expand All @@ -19,7 +19,7 @@ final case class DeployStatusError(status: String) extends DeployStatus
object StatefulExecutionTracker {
def apply[F[_]: Sync]: F[StatefulExecutionTracker[F]] =
for {
ref <- Ref.of(Map.empty[DeployId, DeployStatus])
ref <- Ref[F].of(Map.empty[DeployId, DeployStatus])
} yield new StatefulExecutionTracker(ref)
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package coop.rchain.casper

import cats.data.EitherT
import cats.effect.{Concurrent, Sync, Timer}
import cats.effect.{Async, Sync}
import cats.syntax.all._
import com.google.protobuf.ByteString
import coop.rchain.blockstorage.BlockStore
Expand All @@ -19,6 +19,7 @@ import coop.rchain.models.syntax._
import coop.rchain.models.{BlockHash => _, _}
import coop.rchain.sdk.error.FatalError
import coop.rchain.shared._
import cats.effect.Temporal

final case class ParsingError(details: String)

Expand All @@ -34,7 +35,7 @@ object MultiParentCasper {
// Required to enable protection from re-submitting duplicate deploys
val deployLifespan = 50

def getPreStateForNewBlock[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log]
def getPreStateForNewBlock[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log]
: F[ParentsMergedState] =
for {
dag <- BlockDagStorage[F].getRepresentation
Expand All @@ -47,7 +48,7 @@ object MultiParentCasper {
preState <- getPreStateForParents(parentHashes)
} yield preState

def getPreStateForParents[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log](
def getPreStateForParents[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log](
parentHashes: Set[BlockHash]
): F[ParentsMergedState] =
for {
Expand Down Expand Up @@ -168,7 +169,7 @@ object MultiParentCasper {
rejectedDeploys = csRejectedDeploys
)

def validate[F[_]: Concurrent: Timer: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span](
def validate[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span](
block: BlockMessage,
shardId: String,
minPhloPrice: Long
Expand Down
6 changes: 3 additions & 3 deletions casper/src/main/scala/coop/rchain/casper/Validate.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package coop.rchain.casper

import cats.data.EitherT
import cats.effect.{Concurrent, Sync}
import cats.effect.{Async, Sync}
import cats.syntax.all._
import cats.{Applicative, Monad}
import com.google.protobuf.ByteString
Expand Down Expand Up @@ -359,7 +359,7 @@ object Validate {
}
} yield result

def bondsCache[F[_]: Concurrent: RuntimeManager: Log](
def bondsCache[F[_]: Async: RuntimeManager: Log](
b: BlockMessage
): F[ValidBlockProcessing] = {
val bonds = b.bonds
Expand All @@ -379,7 +379,7 @@ object Validate {
/**
* All of deploys must have greater or equal phloPrice then minPhloPrice
*/
def phloPrice[F[_]: Log: Concurrent](
def phloPrice[F[_]: Log: Async](
b: BlockMessage,
minPhloPrice: Long
): F[ValidBlockProcessing] =
Expand Down
10 changes: 5 additions & 5 deletions casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
package coop.rchain.casper.api

import cats.data.OptionT
import cats.effect.concurrent.Ref
import cats.effect.{Concurrent, Sync}
import cats.effect.{Async, Sync}
import cats.syntax.all._
import com.google.protobuf.ByteString
import coop.rchain.blockstorage.BlockStore
Expand Down Expand Up @@ -45,9 +44,10 @@ import coop.rchain.shared.syntax._
import fs2.Stream

import scala.collection.immutable.SortedMap
import cats.effect.Ref

object BlockApiImpl {
def apply[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Span](
def apply[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Span](
validatorOpt: Option[ValidatorIdentity],
networkId: String,
shardId: String,
Expand Down Expand Up @@ -89,7 +89,7 @@ object BlockApiImpl {
final case class BlockRetrievalError(message: String) extends Exception
}

class BlockApiImpl[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Span](
class BlockApiImpl[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Span](
validatorOpt: Option[ValidatorIdentity],
networkId: String,
shardId: String,
Expand Down Expand Up @@ -312,7 +312,7 @@ class BlockApiImpl[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore
val reverseHeightMap = heightMap.toIndexedSeq.reverse
val iterBlockHashes = reverseHeightMap.iterator.map(_._2.toList)
Stream
.fromIterator(iterBlockHashes)
.fromIterator(iterBlockHashes, 1)
.evalMap(_.traverse(BlockStore[F].getUnsafe))
.evalMap(_.traverse(transform))
}
Expand Down
Loading