From e9e05527d04d261c1726996ed63d646c5085afc7 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Tue, 21 Mar 2023 23:54:44 +0400 Subject: [PATCH 01/17] Update scalapb version --- project/Dependencies.scala | 38 ++++----- project/StacksafeScalapbGenerator.scala | 14 ++-- project/plugins.sbt | 2 +- .../main/scala/coop/rchain/shared/Debug.scala | 77 ------------------- 4 files changed, 28 insertions(+), 103 deletions(-) delete mode 100644 shared/src/main/scala/coop/rchain/shared/Debug.scala diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 11fd335f0c2..4cb49a70c1e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -44,7 +44,7 @@ object Dependencies { val endpointsOpenApi = "org.endpoints4s" %% "openapi" % "3.0.0" val fs2Core = "co.fs2" %% "fs2-core" % fs2Version val fs2Io = "co.fs2" %% "fs2-io" % fs2Version - val guava = "com.google.guava" % "guava" % "30.1-jre" + val guava = "com.google.guava" % "guava" % "31.1-jre" val hasher = "com.roundeights" %% "hasher" % "1.2.0" val http4sBlazeClient = "org.http4s" %% "http4s-blaze-client" % http4sVersion val http4sBlazeServer = "org.http4s" %% "http4s-blaze-server" % http4sVersion @@ -83,10 +83,10 @@ object Dependencies { val scalapbRuntimegGrpc = "com.thesamet.scalapb" %% "scalapb-runtime-grpc" % scalapb.compiler.Version.scalapbVersion val grpcNetty = "io.grpc" % "grpc-netty" % scalapb.compiler.Version.grpcJavaVersion val grpcServices = "io.grpc" % "grpc-services" % scalapb.compiler.Version.grpcJavaVersion - val nettyBoringSsl = "io.netty" % "netty-tcnative-boringssl-static" % "2.0.36.Final" - val nettyTcnative = "io.netty" % "netty-tcnative" % "2.0.36.Final" classifier osClassifier - val nettyTcnativeLinux = "io.netty" % "netty-tcnative" % "2.0.36.Final" classifier "linux-x86_64" - val nettyTcnativeFedora = "io.netty" % "netty-tcnative" % "2.0.36.Final" classifier "linux-x86_64-fedora" + val nettyBoringSsl = "io.netty" % "netty-tcnative-boringssl-static" % "2.0.59.Final" + val nettyTcnative = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier osClassifier + val nettyTcnativeLinux = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier "linux-x86_64" + val nettyTcnativeFedora = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier "linux-x86_64-fedora" val scalaCompat = "org.scala-lang.modules" %% "scala-collection-compat" % "2.6.0" val scalatest = "org.scalatest" %% "scalatest" % "3.2.9" % "test" val scalatestPlus = "org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test" @@ -114,21 +114,23 @@ object Dependencies { scalaCompat, slf4j, kamonCore, - "org.typelevel" % "jawn-parser_2.12" % "1.0.1", - // Added to resolve conflicts in scalapb plugin v0.10.8 - "com.google.protobuf" % "protobuf-java" % "3.12.0", + // Overrides for transitive dependencies (we don't use them directly, hence no val-s) + "org.typelevel" % "jawn-parser_2.12" % "1.4.0", + "com.github.jnr" % "jnr-ffi" % "2.2.13", + "com.lihaoyi" %% "geny" % "1.0.0", + "org.scala-lang.modules" %% "scala-xml" % "2.1.0", + "com.typesafe" % "config" % "1.4.2", + // Added to resolve conflicts in scalapb plugin v0.11.3 + "com.google.code.gson" % "gson" % "2.10.1", + "com.google.protobuf" % "protobuf-java" % "3.12.2", + "com.google.errorprone" % "error_prone_annotations" % "2.18.0", + "io.perfmark" % "perfmark-api" % "0.23.0", + "org.codehaus.mojo" % "animal-sniffer-annotations" % "1.19", // Strange version conflict, it requires the same version but in square brackets (range?). - // e.g. io.grpc:grpc-core:1.30.2 ([1.30.2] wanted) + // e.g. io.grpc:grpc-core:1.37.0 ([1.37.0] wanted) // https://stackoverflow.com/questions/59423185/strange-versions-conflict-in-sbt-strict-mode - "io.grpc" % "grpc-api" % "1.30.2", - "io.grpc" % "grpc-core" % "1.30.2", - "io.netty" % "netty-codec-http2" % "4.1.48.Final", - // Overrides for transitive dependencies (we don't use them directly, hence no val-s) - "com.github.jnr" % "jnr-ffi" % "2.2.12", - "com.lihaoyi" %% "geny" % "0.6.10", - "com.lihaoyi" %% "sourcecode" % "0.2.1", - "org.scala-lang.modules" %% "scala-xml" % "1.3.0", - "com.typesafe" % "config" % "1.4.0" + "io.grpc" % "grpc-api" % scalapb.compiler.Version.grpcJavaVersion, + "io.grpc" % "grpc-core" % scalapb.compiler.Version.grpcJavaVersion ) private val kindProjector = compilerPlugin( diff --git a/project/StacksafeScalapbGenerator.scala b/project/StacksafeScalapbGenerator.scala index 79c7c346b01..8d05efef279 100644 --- a/project/StacksafeScalapbGenerator.scala +++ b/project/StacksafeScalapbGenerator.scala @@ -6,7 +6,7 @@ import com.google.protobuf.ExtensionRegistry import protocbridge.{Artifact, JvmGenerator} import protocgen.{CodeGenApp, CodeGenRequest, CodeGenResponse} import scalapb.compiler._ -import scalapb.options.compiler.Scalapb +import scalapb.options.Scalapb.registerAllExtensions object gen { def apply( @@ -32,7 +32,7 @@ object gen { object StacksafeScalapbGenerator extends CodeGenApp { override def registerExtensions(registry: ExtensionRegistry): Unit = - Scalapb.registerAllExtensions(registry) + registerAllExtensions(registry) override def suggestedDependencies: Seq[Artifact] = Seq( Artifact( @@ -44,17 +44,17 @@ object StacksafeScalapbGenerator extends CodeGenApp { ) // Adapted from scalapb ProtobufGenerator - // https://github.com/scalapb/ScalaPB/blob/v0.10.8/compiler-plugin/src/main/scala/scalapb/compiler/ProtobufGenerator.scala#L1732 + // https://github.com/scalapb/ScalaPB/blob/v0.11.3/compiler-plugin/src/main/scala/scalapb/compiler/ProtobufGenerator.scala#L1732 def process(request: CodeGenRequest): CodeGenResponse = ProtobufGenerator.parseParameters(request.parameter) match { case Right(params) => try { - val implicits = new DescriptorImplicits(params, request.allProtos) + val implicits = DescriptorImplicits.fromCodeGenRequest(params, request) // Inserted custom printer val generator = new StacksafeMessagePrinter(params, implicits) val validator = new ProtoValidation(implicits) validator.validateFiles(request.allProtos) - import implicits.FileDescriptorPimp + import implicits.ExtendedFileDescriptor val files = request.filesToGenerate.flatMap { file => if (file.scalaOptions.getSingleFile) generator.generateSingleScalaFileForFileDescriptor(file) @@ -75,7 +75,7 @@ class StacksafeMessagePrinter( implicits: DescriptorImplicits ) extends ProtobufGenerator(params, implicits) { - import DescriptorImplicits.AsSymbolPimp + import DescriptorImplicits.AsSymbolExtension import implicits._ // Override printing of the whole message @@ -168,7 +168,7 @@ class StacksafeMessagePrinter( ) else printer.add( - s"val __${field.scalaName} = (${field.collectionBuilder} ++= this.${field.scalaName.asSymbol})" + s"val __${field.scalaName} = (${field.collection.newBuilder} ++= this.${field.scalaName.asSymbol})" ) ) .when(message.preservesUnknownFields) { _ => diff --git a/project/plugins.sbt b/project/plugins.sbt index a46fd045556..1dcec98c8e0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.6") // Yes it's weird to do the following, but it's what is mandated by the scalapb documentation -libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.10.8" +libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.3" addSbtPlugin("com.typesafe.sbt" % "sbt-license-report" % "1.2.0") addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.10") diff --git a/shared/src/main/scala/coop/rchain/shared/Debug.scala b/shared/src/main/scala/coop/rchain/shared/Debug.scala deleted file mode 100644 index c5c2b3f69c0..00000000000 --- a/shared/src/main/scala/coop/rchain/shared/Debug.scala +++ /dev/null @@ -1,77 +0,0 @@ -package coop.rchain.shared - -import cats.effect.Sync - -/** - * A util class for effortless, systematic and useful debug messages. - * Best explained with example usage: - * - * {{{ - * def someMethod(x: Int, y: String, z: ByteString): F[Unit] = for { - * _ <- Debug.print(x, y, Base16.encode(z.toByteArray)) - * // do someMethod stuff - * } - * }}} - * - * when someMethod is called as below: - * - * {{{ - * someMethod(42, "Hi!", ByteString.fromHex("0xC0FFEE")) - * }}} - * - * the following will be written into the stdout: - * - * {{{ - * 1234.567 someMethod (SomeClass.scala:123) - * x = 42 - * y = Hi! - * Base16.encode(z.toByteArray) = "0xCOFFEE" - * }}} - * - * Works best with IntelliJ's AwesomeConsole plugin, which makes the source location a hyperlink (!). - */ -object Debug { - - // It's actually 'this class' loading time', but is good enough for having a relative measure - private val startupTime = System.currentTimeMillis() - - def print[F[_]: Sync](values: sourcecode.Text[Any]*)( - implicit enclosing: sourcecode.Enclosing, - file: sourcecode.File, - line: sourcecode.Line - ): F[Unit] = - Sync[F].delay { - printUnsafe(values: _*)(enclosing, file, line) - } - - def printUnsafe(values: sourcecode.Text[Any]*)( - implicit enclosing: sourcecode.Enclosing, - file: sourcecode.File, - line: sourcecode.Line - ): Unit = - println(string(values: _*)(enclosing, file, line)) - - def string(values: sourcecode.Text[Any]*)( - implicit enclosing: sourcecode.Enclosing, - file: sourcecode.File, - line: sourcecode.Line - ): String = { - - val name = suffixAfterLast(".", enclosing.value) - val filename = suffixAfterLast("/", file.value) - - val valueIndent = f"${""}%11s" //11 spaces. 8 for timestamp field, one for space past it, 2 for indent from method - val valuesText = - if (values.isEmpty) "" - else "\n" + values.map(v => s"$valueIndent${v.source} = ${v.value}").mkString("\n") - val timestamp = (System.currentTimeMillis() - startupTime) / 1e3d - - f"$timestamp% 8.3f $name($filename:${line.value})$valuesText" - } - - private def suffixAfterLast(pattern: String, string: String): String = { - val pos = string.lastIndexOf(pattern) - val start = Math.max(-1, Math.min(pos + pattern.length, string.length)) - string.substring(start) - } -} From 71f15e6bdc303240f139e45a5e4b9b93ead6ad7d Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Wed, 22 Mar 2023 12:12:07 +0400 Subject: [PATCH 02/17] Make sourcecode direct dependency --- project/Dependencies.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 4cb49a70c1e..0f969655b4e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -99,6 +99,8 @@ object Dependencies { val shapeless = "com.chuusai" %% "shapeless" % "2.3.8" val slf4j = "org.slf4j" % "slf4j-api" % slf4jVersion val weupnp = "org.bitlet" % "weupnp" % "0.1.4" + val sourcecode = "com.lihaoyi" %% "sourcecode" % "0.2.1" + // format: on val overrides = Seq( @@ -114,6 +116,7 @@ object Dependencies { scalaCompat, slf4j, kamonCore, + sourcecode, // Overrides for transitive dependencies (we don't use them directly, hence no val-s) "org.typelevel" % "jawn-parser_2.12" % "1.4.0", "com.github.jnr" % "jnr-ffi" % "2.2.13", @@ -177,5 +180,5 @@ object Dependencies { http4sDependencies ++ circeDependencies val commonDependencies: Seq[ModuleID] = - logging ++ testing :+ kindProjector :+ macroParadise :+ scalaCompat + logging ++ testing :+ kindProjector :+ macroParadise :+ scalaCompat :+ sourcecode } From 8e23f27ce37bfc2a5b9e44ca8b4674477688489a Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Fri, 24 Mar 2023 11:44:13 +0400 Subject: [PATCH 03/17] Add fs2-grpc dependency, adjust generation to omit duplication --- build.sbt | 48 ++++++++++++++----- .../coop/rchain/comm/protocol/kademlia.proto | 1 + node/src/main/protobuf/repl.proto | 1 + project/Dependencies.scala | 1 + project/build.properties | 2 +- project/plugins.sbt | 3 ++ 6 files changed, 42 insertions(+), 14 deletions(-) diff --git a/build.sbt b/build.sbt index 16e90b5636d..50b221f8c53 100644 --- a/build.sbt +++ b/build.sbt @@ -3,6 +3,7 @@ import BNFC._ import Rholang._ import NativePackagerHelper._ import com.typesafe.sbt.packager.docker._ +import protocbridge.Target //allow stopping sbt tasks using ctrl+c without killing sbt itself Global / cancelable := true @@ -188,8 +189,19 @@ lazy val casper = (project in file("casper")) ) lazy val comm = (project in file("comm")) + .enablePlugins(Fs2Grpc) .settings(commonSettings: _*) .settings( + scalapbCodeGeneratorOptions += CodeGeneratorOption.FlatPackage, + // it turns out that Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.head is generator that is overridden by + // StacksafeScalapbGenerator, so to resolve conflicts it is just dropped. This is found empirically, so + // might break when upgrading the version of Fs2Grpc plugin. + scalapbCodeGenerators := Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.tail :+ + new Target( + coop.rchain.scalapb.gen(flatPackage = true)._1, + (Compile / sourceManaged).value, + coop.rchain.scalapb.gen(flatPackage = true)._2 + ), version := "0.1", libraryDependencies ++= commonDependencies ++ kamonDependencies ++ protobufDependencies ++ Seq( grpcNetty, @@ -203,10 +215,6 @@ lazy val comm = (project in file("comm")) catsTagless, monix, guava - ), - Compile / PB.targets := Seq( - scalapb.gen(grpc = false) -> (Compile / sourceManaged).value, - grpcmonix.generators.gen() -> (Compile / sourceManaged).value ) ) .dependsOn(shared % "compile->compile;test->test", crypto, models) @@ -230,7 +238,18 @@ lazy val crypto = (project in file("crypto")) lazy val models = (project in file("models")) .settings(commonSettings: _*) + .enablePlugins(Fs2Grpc) .settings( + scalapbCodeGeneratorOptions += CodeGeneratorOption.FlatPackage, + // it turns out that Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.head is generator that is overridden by + // StacksafeScalapbGenerator, so to resolve conflicts it is just dropped. This is found empirically, so + // might break when upgrading the version of Fs2Grpc plugin. + scalapbCodeGenerators := Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.tail :+ + new Target( + coop.rchain.scalapb.gen(flatPackage = true)._1, + (Compile / sourceManaged).value, + coop.rchain.scalapb.gen(flatPackage = true)._2 + ), libraryDependencies ++= commonDependencies ++ protobufDependencies ++ Seq( catsCore, magnolia, @@ -238,18 +257,25 @@ lazy val models = (project in file("models")) scalacheck % "test", scalacheckShapeless, scalapbRuntimegGrpc - ), - Compile / PB.targets := Seq( - coop.rchain.scalapb.gen(flatPackage = true, grpc = false) -> (Compile / sourceManaged).value, - grpcmonix.generators.gen() -> (Compile / sourceManaged).value ) ) .dependsOn(shared % "compile->compile;test->test", rspace) lazy val node = (project in file("node")) .settings(commonSettings: _*) - .enablePlugins(RpmPlugin, DebianPlugin, JavaAppPackaging, BuildInfoPlugin) + .enablePlugins(RpmPlugin, DebianPlugin, JavaAppPackaging, BuildInfoPlugin, Fs2Grpc) .settings( + scalapbCodeGeneratorOptions += CodeGeneratorOption.FlatPackage, + // it turns out that Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.head is exactly the + // generator that is overridden by StacksafeScalapbGenerator. To resolve conflicts it is just dropped. + // This is found empirically, so might break when upgrading the version of Fs2Grpc plugin. + // If both versions are generated, multiple copies of the same traits are produced leading to compilation error. + scalapbCodeGenerators := Fs2GrpcPlugin.autoImport.scalapbCodeGenerators.value.tail :+ + new Target( + coop.rchain.scalapb.gen(flatPackage = true)._1, + (Compile / sourceManaged).value, + coop.rchain.scalapb.gen(flatPackage = true)._2 + ), version := git.gitDescribedVersion.value.getOrElse({ val v = "0.0.0-unknown" System.err.println("Could not get version from `git describe`.") @@ -275,10 +301,6 @@ lazy val node = (project in file("node")) circeGenericExtras, pureconfig ), - Compile / PB.targets := Seq( - scalapb.gen(grpc = false) -> (Compile / sourceManaged).value / "protobuf", - grpcmonix.generators.gen() -> (Compile / sourceManaged).value / "protobuf" - ), buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion, git.gitHeadCommit), buildInfoPackage := "coop.rchain.node", Compile / mainClass := Some("coop.rchain.node.Main"), diff --git a/comm/src/main/protobuf/coop/rchain/comm/protocol/kademlia.proto b/comm/src/main/protobuf/coop/rchain/comm/protocol/kademlia.proto index 3a17e14eb62..dbd76949d30 100644 --- a/comm/src/main/protobuf/coop/rchain/comm/protocol/kademlia.proto +++ b/comm/src/main/protobuf/coop/rchain/comm/protocol/kademlia.proto @@ -6,6 +6,7 @@ import "scalapb/scalapb.proto"; option (scalapb.options) = { package_name: "coop.rchain.comm.discovery" flat_package: true + preserve_unknown_fields: false }; message Node { diff --git a/node/src/main/protobuf/repl.proto b/node/src/main/protobuf/repl.proto index 01e5badd287..1ea3cba9523 100644 --- a/node/src/main/protobuf/repl.proto +++ b/node/src/main/protobuf/repl.proto @@ -10,6 +10,7 @@ import "scalapb/scalapb.proto"; option (scalapb.options) = { package_name: "coop.rchain.node.model" + preserve_unknown_fields: false }; service Repl { diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 0f969655b4e..c5b764475d3 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -100,6 +100,7 @@ object Dependencies { val slf4j = "org.slf4j" % "slf4j-api" % slf4jVersion val weupnp = "org.bitlet" % "weupnp" % "0.1.4" val sourcecode = "com.lihaoyi" %% "sourcecode" % "0.2.1" + val grpcNettyShaded = "io.grpc" % "grpc-netty-shaded" % scalapb.compiler.Version.grpcJavaVersion // format: on diff --git a/project/build.properties b/project/build.properties index 22af2628c41..46e43a97ed8 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.2 diff --git a/project/plugins.sbt b/project/plugins.sbt index 1dcec98c8e0..0959c34805c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -18,3 +18,6 @@ addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.6") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.1.1") +// TODO replace with addSbtPlugin("org.typelevel" % "sbt-fs2-grpc" % "") +// when migrated top CE3 since latest fs2-grpc is not available for CE2 +addSbtPlugin("org.lyranthe.fs2-grpc" % "sbt-java-gen" % "0.11.2") From 938a7361c56c8bbf5b46599b3e75e367e6bf0d86 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Fri, 24 Mar 2023 11:44:36 +0400 Subject: [PATCH 04/17] Adjust code to use fs2-grpc --- .../protocol/client/DeployService.scala | 76 +++++------ .../protocol/client/ProposeService.scala | 17 ++- .../comm/discovery/GrpcKademliaRPC.scala | 37 +++--- .../discovery/GrpcKademliaRPCServer.scala | 17 +-- .../coop/rchain/comm/discovery/package.scala | 11 +- .../rchain/comm/transport/GrpcTransport.scala | 24 ++-- .../comm/transport/GrpcTransportClient.scala | 16 +-- .../transport/GrpcTransportReceiver.scala | 25 ++-- .../comm/transport/GrpcTransportServer.scala | 6 +- .../rchain/comm/transport/StreamHandler.scala | 48 +++---- .../rchain/node/api/DeployGrpcServiceV1.scala | 122 ++++++++++-------- .../node/api/ProposeGrpcServiceV1.scala | 27 ++-- .../rchain/node/api/ReplGrpcService.scala | 27 ++-- .../scala/coop/rchain/node/api/package.scala | 40 +++--- .../coop/rchain/node/effects/ReplClient.scala | 18 +-- .../coop/rchain/node/effects/package.scala | 6 +- .../rchain/node/runtime/GrpcServices.scala | 20 +-- .../rchain/node/runtime/NetworkServers.scala | 27 ++-- .../coop/rchain/node/runtime/NodeMain.scala | 2 +- .../coop/rchain/node/runtime/Setup.scala | 2 +- 20 files changed, 257 insertions(+), 311 deletions(-) diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala index 79930effabb..04b92c85501 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala @@ -1,15 +1,15 @@ package coop.rchain.casper.protocol.client -import cats.effect.Sync +import cats.effect.{ConcurrentEffect, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol._ -import coop.rchain.casper.protocol.deploy.v1.{DeployExecStatus, DeployServiceV1GrpcMonix} +import coop.rchain.casper.protocol.deploy.v1.{DeployExecStatus, DeployServiceFs2Grpc} import coop.rchain.crypto.signatures.Signed import coop.rchain.models.Par -import coop.rchain.models.either.implicits._ -import coop.rchain.monix.Monixable import coop.rchain.shared.syntax._ -import io.grpc.{ManagedChannel, ManagedChannelBuilder} +import coop.rchain.models.either.implicits._ +import io.grpc.netty.NettyChannelBuilder +import io.grpc.{ManagedChannel, Metadata} import java.io.Closeable import java.util.concurrent.TimeUnit @@ -39,23 +39,22 @@ object DeployService { def apply[F[_]](implicit ev: DeployService[F]): DeployService[F] = ev } -class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessageSize: Int) +class GrpcDeployService[F[_]: Sync: ConcurrentEffect](host: String, port: Int, maxMessageSize: Int) extends DeployService[F] with Closeable { private val channel: ManagedChannel = - ManagedChannelBuilder + NettyChannelBuilder .forAddress(host, port) .maxInboundMessageSize(maxMessageSize) .usePlaintext() .build - private val stub = DeployServiceV1GrpcMonix.stub(channel) + private val stub = DeployServiceFs2Grpc.stub(channel) def deploy(d: Signed[DeployData]): F[Either[Seq[String], String]] = stub - .doDeploy(DeployData.toProto(d)) - .fromTask + .doDeploy(DeployData.toProto(d), new Metadata) .toEitherF( _.message.error, _.message.result @@ -63,8 +62,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def deployStatus(deployId: FindDeployQuery): F[Either[Seq[String], DeployExecStatus]] = stub - .deployStatus(deployId) - .fromTask + .deployStatus(deployId, new Metadata) .toEitherF( _.message.error, _.message.deployExecStatus @@ -72,8 +70,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def getBlock(q: BlockQuery): F[Either[Seq[String], String]] = stub - .getBlock(q) - .fromTask + .getBlock(q, new Metadata) .toEitherF( _.message.error, _.message.blockInfo.map(_.toProtoString) @@ -81,8 +78,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def findDeploy(q: FindDeployQuery): F[Either[Seq[String], String]] = stub - .findDeploy(q) - .fromTask + .findDeploy(q, new Metadata) .toEitherF( _.message.error, _.message.blockInfo.map(_.toProtoString) @@ -90,10 +86,10 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def visualizeDag(q: VisualizeDagQuery): F[Either[Seq[String], String]] = stub - .visualizeDag(q) - .mapEval(_.pure[F].toTask.toEitherF(_.message.error, _.message.content)) - .toListL - .fromTask + .visualizeDag(q, new Metadata) + .evalMap(_.pure[F].toEitherF(_.message.error, _.message.content)) + .compile + .toList .map { bs => val (l, r) = bs.partition(_.isLeft) if (l.isEmpty) Right(r.map(_.right.get).mkString) @@ -102,8 +98,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def machineVerifiableDag(q: MachineVerifyQuery): F[Either[Seq[String], String]] = stub - .machineVerifiableDag(q) - .fromTask + .machineVerifiableDag(q, new Metadata) .toEitherF( _.message.error, _.message.content @@ -111,8 +106,8 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def getBlocks(q: BlocksQuery): F[Either[Seq[String], String]] = stub - .getBlocks(q) - .mapEval(_.pure[F].toTask.toEitherF(_.message.error, _.message.blockInfo)) + .getBlocks(q, new Metadata) + .evalMap(_.pure[F].toEitherF(_.message.error, _.message.blockInfo)) .map(_.map { bi => s""" |------------- block ${bi.blockNumber} --------------- @@ -120,8 +115,8 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa |----------------------------------------------------- |""".stripMargin }) - .toListL - .fromTask + .compile + .toList .map { bs => val (l, r) = bs.partition(_.isLeft) if (l.isEmpty) { @@ -138,8 +133,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa request: DataAtNameQuery ): F[Either[Seq[String], Seq[DataWithBlockInfo]]] = stub - .listenForDataAtName(request) - .fromTask + .listenForDataAtName(request, new Metadata) .toEitherF( _.message.error, _.message.payload.map(_.blockInfo) @@ -149,8 +143,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa request: ContinuationAtNameQuery ): F[Either[Seq[String], Seq[ContinuationsWithBlockInfo]]] = stub - .listenForContinuationAtName(request) - .fromTask + .listenForContinuationAtName(request, new Metadata) .toEitherF( _.message.error, _.message.payload.map(_.blockResults) @@ -160,8 +153,7 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa request: DataAtNameByBlockQuery ): F[Either[Seq[String], (Seq[Par], LightBlockInfo)]] = stub - .getDataAtName(request) - .fromTask + .getDataAtName(request, new Metadata) .toEitherF( _.message.error, _.message.payload.map(r => (r.par, r.block)) @@ -169,18 +161,15 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa def lastFinalizedBlock: F[Either[Seq[String], String]] = stub - .lastFinalizedBlock(LastFinalizedBlockQuery()) - .fromTask + .lastFinalizedBlock(LastFinalizedBlockQuery(), new Metadata) .toEitherF( _.message.error, _.message.blockInfo.map(_.toProtoString) ) - def isFinalized(request: IsFinalizedQuery): F[Either[Seq[String], String]] = { - import cats.instances.either._ + def isFinalized(request: IsFinalizedQuery): F[Either[Seq[String], String]] = stub - .isFinalized(request) - .fromTask + .isFinalized(request, new Metadata) .toEitherF(_.message.error, _.message.isFinalized) .map( _.ifM( @@ -188,13 +177,10 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa Seq("Block is not finalized").asLeft ) ) - } - def bondStatus(request: BondStatusQuery): F[Either[Seq[String], String]] = { - import cats.instances.either._ + def bondStatus(request: BondStatusQuery): F[Either[Seq[String], String]] = stub - .bondStatus(request) - .fromTask + .bondStatus(request, new Metadata) .toEitherF(_.message.error, _.message.isBonded) .map( _.ifM( @@ -202,12 +188,10 @@ class GrpcDeployService[F[_]: Monixable: Sync](host: String, port: Int, maxMessa Seq("Validator is not bonded").asLeft ) ) - } def status: F[Either[Seq[String], String]] = stub - .status(com.google.protobuf.empty.Empty()) - .fromTask + .status(com.google.protobuf.empty.Empty(), new Metadata) .toEitherF( _.message.error, _.message.status.map(_.toProtoString) diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala index 0308491265a..19466b56043 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala @@ -1,12 +1,13 @@ package coop.rchain.casper.protocol.client -import cats.effect.Sync +import cats.effect.{ConcurrentEffect, Sync} import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.propose.v1._ import coop.rchain.models.either.implicits._ import coop.rchain.monix.Monixable import coop.rchain.shared.syntax._ -import io.grpc.{ManagedChannel, ManagedChannelBuilder} +import io.grpc.netty.NettyChannelBuilder +import io.grpc.{ManagedChannel, ManagedChannelBuilder, Metadata} import java.io.Closeable import java.util.concurrent.TimeUnit @@ -19,23 +20,22 @@ object ProposeService { def apply[F[_]](implicit ev: ProposeService[F]): ProposeService[F] = ev } -class GrpcProposeService[F[_]: Monixable: Sync](host: String, port: Int, maxMessageSize: Int) +class GrpcProposeService[F[_]: Sync: ConcurrentEffect](host: String, port: Int, maxMessageSize: Int) extends ProposeService[F] with Closeable { private val channel: ManagedChannel = - ManagedChannelBuilder + NettyChannelBuilder .forAddress(host, port) .maxInboundMessageSize(maxMessageSize) .usePlaintext() .build - private val stub = ProposeServiceV1GrpcMonix.stub(channel) + private val stub = ProposeServiceFs2Grpc.stub(channel) def propose(isAsync: Boolean): F[Either[Seq[String], String]] = stub - .propose(ProposeQuery(isAsync)) - .fromTask + .propose(ProposeQuery(isAsync), new Metadata) .toEitherF( _.message.error, _.message.result @@ -43,8 +43,7 @@ class GrpcProposeService[F[_]: Monixable: Sync](host: String, port: Int, maxMess def proposeResult: F[Either[Seq[String], String]] = stub - .proposeResult(ProposeResultQuery()) - .fromTask + .proposeResult(ProposeResultQuery(), new Metadata) .toEitherF( _.message.error, _.message.result diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala index 187e22ee5de..9d1dd180b46 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala @@ -1,15 +1,13 @@ package coop.rchain.comm.discovery -import cats.effect.Sync +import cats.effect.{ConcurrentEffect, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.catscontrib.ski._ import coop.rchain.comm._ -import coop.rchain.comm.discovery.KademliaGrpcMonix.KademliaRPCServiceStub import coop.rchain.comm.rp.Connect.RPConfAsk -import coop.rchain.grpc.implicits._ import coop.rchain.metrics.Metrics -import coop.rchain.metrics.implicits._ +import coop.rchain.metrics.implicits.MetricsSyntaxConversion import coop.rchain.monix.Monixable import coop.rchain.shared.syntax._ import io.grpc._ @@ -19,7 +17,7 @@ import monix.execution.Scheduler import scala.concurrent.duration._ -class GrpcKademliaRPC[F[_]: Monixable: Sync: RPConfAsk: Metrics]( +class GrpcKademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( networkId: String, timeout: FiniteDuration )(implicit scheduler: Scheduler) @@ -30,10 +28,12 @@ class GrpcKademliaRPC[F[_]: Monixable: Sync: RPConfAsk: Metrics]( def ping(peer: PeerNode): F[Boolean] = for { - _ <- Metrics[F].incrementCounter("ping") - local <- RPConfAsk[F].ask.map(_.local) - ping = Ping().withSender(toNode(local)).withNetworkId(networkId) - pongErr <- withClient(peer, timeout)(_.sendPing(ping).fromTask.timer("ping-time")).attempt + _ <- Metrics[F].incrementCounter("ping") + local <- RPConfAsk[F].ask.map(_.local) + ping = Ping().withSender(toNode(local)).withNetworkId(networkId) + pongErr <- withClient(peer, timeout)( + _.sendPing(ping, new Metadata).timer("ping-time") + ).attempt } yield pongErr.fold(kp(false), _.networkId == networkId) def lookup(key: Seq[Byte], peer: PeerNode): F[Seq[PeerNode]] = @@ -44,7 +44,9 @@ class GrpcKademliaRPC[F[_]: Monixable: Sync: RPConfAsk: Metrics]( .withId(ByteString.copyFrom(key.toArray)) .withSender(toNode(local)) .withNetworkId(networkId) - responseErr <- withClient(peer, timeout)(_.sendLookup(lookup).fromTask.timer("lookup-time")).attempt + responseErr <- withClient(peer, timeout)( + _.sendLookup(lookup, new Metadata).timer("lookup-time") + ).attempt peers = responseErr match { case Right(r) if r.networkId == networkId => r.nodes.map(toPeerNode) @@ -53,22 +55,21 @@ class GrpcKademliaRPC[F[_]: Monixable: Sync: RPConfAsk: Metrics]( } yield peers private def withClient[A](peer: PeerNode, timeout: FiniteDuration, enforce: Boolean = false)( - f: KademliaRPCServiceStub => F[A] + f: KademliaRPCServiceFs2Grpc[F, Metadata] => F[A] ): F[A] = for { - channel <- clientChannel(peer) - stub <- Sync[F].delay(KademliaGrpcMonix.stub(channel).withDeadlineAfter(timeout)) - result <- f(stub).toTask - .doOnFinish(kp(Task.delay(channel.shutdown()).attempt.void)) - .fromTask - _ <- Task.unit.asyncBoundary.fromTask // return control to caller thread + channel <- clientChannel(peer, timeout) + stub = KademliaRPCServiceFs2Grpc.stub(channel) + result <- f(stub) + _ <- Sync[F].delay(channel.shutdown()) } yield result - private def clientChannel(peer: PeerNode): F[ManagedChannel] = + private def clientChannel(peer: PeerNode, timeout: FiniteDuration): F[ManagedChannel] = for { c <- Sync[F].delay { NettyChannelBuilder .forAddress(peer.endpoint.host, peer.endpoint.udpPort) + .idleTimeout(timeout.toMillis, MILLISECONDS) .executor(scheduler) .usePlaintext() .build() diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala index a56319b6bc4..bc3dba4d0cf 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala @@ -4,31 +4,28 @@ import cats.effect.Sync import cats.syntax.all._ import coop.rchain.comm.PeerNode import coop.rchain.monix.Monixable -import coop.rchain.shared.syntax._ -import monix.eval.Task +import io.grpc.Metadata class GrpcKademliaRPCServer[F[_]: Monixable: Sync]( networkId: String, pingHandler: PeerNode => F[Unit], lookupHandler: (PeerNode, Array[Byte]) => F[Seq[PeerNode]] -) extends KademliaGrpcMonix.KademliaRPCService { +) extends KademliaRPCServiceFs2Grpc[F, Metadata] { // TODO: legacy code generates KademliaGrpcMonix methods with Task // so these methods cannot be abstracted over effect type - def sendLookup(lookup: Lookup): Task[LookupResponse] = + override def sendLookup(lookup: Lookup, ctx: Metadata): F[LookupResponse] = if (lookup.networkId == networkId) { val id = lookup.id.toByteArray val sender: PeerNode = toPeerNode(lookup.sender.get) lookupHandler(sender, id) .map(peers => LookupResponse().withNodes(peers.map(toNode)).withNetworkId(networkId)) - .toTask - } else Sync[F].delay(LookupResponse().withNodes(Nil)).toTask + } else Sync[F].delay(LookupResponse().withNodes(Nil)) - def sendPing(ping: Ping): Task[Pong] = + override def sendPing(ping: Ping, ctx: Metadata): F[Pong] = if (ping.networkId == networkId) { val sender: PeerNode = toPeerNode(ping.sender.get) - pingHandler(sender).toTask.as(Pong().withNetworkId(networkId)) - } else Sync[F].delay(Pong().withNetworkId(networkId)).toTask - + pingHandler(sender).as(Pong().withNetworkId(networkId)) + } else Sync[F].delay(Pong().withNetworkId(networkId)) } diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala index 10e1309c293..99a7615fc6a 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala @@ -1,6 +1,6 @@ package coop.rchain.comm -import cats.effect.{Resource, Sync} +import cats.effect.{ConcurrentEffect, Resource, Sync} import com.google.protobuf.ByteString import coop.rchain.metrics.Metrics import coop.rchain.monix.Monixable @@ -13,7 +13,7 @@ package object discovery { val DiscoveryMetricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "discovery.kademlia") - def acquireKademliaRPCServer[F[_]: Monixable: Sync]( + def acquireKademliaRPCServer[F[_]: Monixable: Sync: ConcurrentEffect]( networkId: String, port: Int, pingHandler: PeerNode => F[Unit], @@ -24,11 +24,8 @@ package object discovery { .forPort(port) .executor(grpcScheduler) .addService( - KademliaGrpcMonix - .bindService( - new GrpcKademliaRPCServer(networkId, pingHandler, lookupHandler), - grpcScheduler - ) + KademliaRPCServiceFs2Grpc + .bindService(new GrpcKademliaRPCServer(networkId, pingHandler, lookupHandler)) ) .build diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala index 0f82af810a7..4915e198498 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala @@ -4,13 +4,12 @@ import cats.effect.Sync import cats.syntax.all._ import coop.rchain.comm.CommError._ import coop.rchain.comm._ -import coop.rchain.comm.protocol.routing.{RoutingGrpcMonix, _} +import coop.rchain.comm.protocol.routing._ import coop.rchain.metrics.Metrics import coop.rchain.metrics.implicits._ import coop.rchain.monix.Monixable -import coop.rchain.shared.syntax._ -import io.grpc.{Status, StatusRuntimeException} -import monix.reactive.Observable +import io.grpc.{Metadata, Status, StatusRuntimeException} +import fs2.Stream import scala.util.{Either, Left, Right} @@ -85,7 +84,7 @@ object GrpcTransport { } def send[F[_]: Monixable: Sync]( - transport: RoutingGrpcMonix.TransportLayer, + transport: TransportLayerFs2Grpc[F, Metadata], peer: PeerNode, msg: Protocol )( @@ -94,25 +93,22 @@ object GrpcTransport { for { _ <- metrics.incrementCounter("send") result <- transport - .send(TLRequest(msg)) - .fromTask + .send(TLRequest(msg), new Metadata) .attempt .timer("send-time") .map(processResponse(peer, _)) } yield result def stream[F[_]: Monixable: Sync]( - transport: RoutingGrpcMonix.TransportLayer, + transport: TransportLayerFs2Grpc[F, Metadata], peer: PeerNode, networkId: String, blob: Blob, packetChunkSize: Int ): F[CommErr[Unit]] = { - val chunkIt = Chunker.chunkIt[F](networkId, blob, packetChunkSize) - transport - .stream(Observable.fromIterator(chunkIt.toTask)) - .fromTask - .attempt - .map(processResponse(peer, _)) + val chunkIt = Stream.eval(Chunker.chunkIt[F](networkId, blob, packetChunkSize)).flatMap { i => + Stream.fromIterator(i) + } + transport.stream(chunkIt, new Metadata).attempt.map(processResponse(peer, _)) } } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala index 672128f7bb5..254aabf5d10 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala @@ -3,7 +3,7 @@ package coop.rchain.comm.transport import cats.Applicative import cats.effect.concurrent.{Deferred, Ref} import cats.effect.syntax.all._ -import cats.effect.{Concurrent, Sync} +import cats.effect.{Concurrent, ConcurrentEffect, Sync} import cats.syntax.all._ import coop.rchain.comm.CommError.{protocolException, CommErr} import coop.rchain.comm._ @@ -16,7 +16,7 @@ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream import fs2.concurrent.{Signal, SignallingRef} -import io.grpc.ManagedChannel +import io.grpc.{CallOptions, ManagedChannel, Metadata} import io.grpc.netty._ import io.netty.handler.ssl.SslContext import monix.eval.Task @@ -39,7 +39,7 @@ final case class BufferedGrpcStreamChannel[F[_]]( buferSubscriber: Stream[F, Unit] ) -class GrpcTransportClient[F[_]: Monixable: Concurrent: Log: Metrics]( +class GrpcTransportClient[F[_]: Monixable: Concurrent: ConcurrentEffect: Log: Metrics]( networkId: String, cert: String, key: String, @@ -133,15 +133,13 @@ class GrpcTransportClient[F[_]: Monixable: Concurrent: Log: Metrics]( } yield r private def withClient[A](peer: PeerNode, timeout: FiniteDuration)( - request: RoutingGrpcMonix.TransportLayer => F[CommErr[A]] + request: TransportLayerFs2Grpc[F, Metadata] => F[CommErr[A]] ): F[CommErr[A]] = (for { channel <- getChannel(peer) - stub <- Sync[F].delay( - RoutingGrpcMonix.stub(channel.grpcTransport).withDeadlineAfter(timeout) - ) - result <- request(stub) - _ <- Task.unit.asyncBoundary.fromTask // return control to caller thread + co = CallOptions.DEFAULT.withDeadlineAfter(timeout.toMillis, MILLISECONDS) + stub = TransportLayerFs2Grpc.stub(channel.grpcTransport, co) + result <- request(stub) } yield result).attempt.map(_.fold(e => Left(protocolException(e)), identity)) def send(peer: PeerNode, msg: Protocol): F[CommErr[Unit]] = diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala index 57ebd073a25..7e65962618e 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.transport import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, Sync, Timer} +import cats.effect.{Concurrent, Sync, ConcurrentEffect, Timer} import cats.syntax.all._ import cats.effect.syntax.all._ import coop.rchain.comm.protocol.routing._ @@ -13,12 +13,11 @@ import coop.rchain.monix.Monixable import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream +import io.grpc.Metadata import fs2.concurrent.Queue import io.grpc.netty.NettyServerBuilder import io.netty.handler.ssl.SslContext -import monix.eval.Task import monix.execution.{Cancelable, Scheduler} -import monix.reactive.Observable import scala.collection.concurrent.TrieMap import scala.concurrent.duration.DurationInt @@ -31,7 +30,7 @@ object GrpcTransportReceiver { type MessageBuffers[F[_]] = (Send => F[Boolean], StreamMessage => F[Boolean], Stream[F, Unit]) type MessageHandlers[F[_]] = (Send => F[Unit], StreamMessage => F[Unit]) - def create[F[_]: Monixable: Concurrent: RPConfAsk: Log: Metrics: Timer]( + def create[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( networkId: String, port: Int, serverSslContext: SslContext, @@ -43,7 +42,7 @@ object GrpcTransportReceiver { cache: TrieMap[String, Array[Byte]] )(implicit mainScheduler: Scheduler): F[Cancelable] = { - val service = new RoutingGrpcMonix.TransportLayer { + val service = new TransportLayerFs2Grpc[F, Metadata] { private val circuitBreaker: StreamHandler.CircuitBreaker = streamed => if (streamed.header.exists(_.networkId != networkId)) @@ -99,8 +98,8 @@ object GrpcTransportReceiver { } yield c } - def send(request: TLRequest): Task[TLResponse] = - (for { + def send(request: TLRequest, c: Metadata): F[TLResponse] = + for { _ <- Metrics[F].incrementCounter("packets.received") self <- RPConfAsk[F].reader(_.local) peer = PeerNode.from(request.protocol.header.sender) @@ -112,20 +111,20 @@ object GrpcTransportReceiver { Metrics[F].incrementCounter("packets.dropped") >> internalServerError(packetDroppedMsg).pure[F] ) - } yield r).toTask + } yield r - def stream(observable: Observable[Chunk]): Task[TLResponse] = { + def stream(observable: Stream[F, Chunk], c: Metadata): F[TLResponse] = { import StreamHandler._ import StreamError.StreamErrorToMessage - val result = handleStream(observable, circuitBreaker, cache) >>= { + handleStream(observable, circuitBreaker, cache) >>= { case Left(error @ StreamError.Unexpected(t)) => Log[F].error(error.message, t).as(internalServerError(error.message)) case Left(error) => Log[F].warn(error.message).as(internalServerError(error.message)) - case Right(msg) => { + case Right(msg) => val msgEnqueued = s"Stream chunk pushed to message buffer. Sender ${msg.sender.endpoint.host}, message ${msg.typeId}, " + s"size ${msg.contentLength}, file ${msg.key}." @@ -146,9 +145,7 @@ object GrpcTransportReceiver { internalServerError(msgDropped).pure[F] ) } yield r - } } - result.toTask } // TODO InternalServerError should take msg in constructor @@ -169,7 +166,7 @@ object GrpcTransportReceiver { .executor(mainScheduler) .maxInboundMessageSize(maxMessageSize) .sslContext(serverSslContext) - .addService(RoutingGrpcMonix.bindService(service, mainScheduler)) + .addService(TransportLayerFs2Grpc.bindService(service)) .intercept(new SslSessionServerInterceptor(networkId)) .build .start diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala index 07aceb36dfa..90ce55d966a 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.transport import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, Resource, Sync, Timer} +import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync, Timer} import cats.syntax.all._ import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.comm.protocol.routing.Protocol @@ -39,7 +39,7 @@ object TransportLayerServer { } } -class GrpcTransportServer[F[_]: Monixable: Concurrent: RPConfAsk: Log: Metrics: Timer]( +class GrpcTransportServer[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( networkId: String, port: Int, cert: String, @@ -109,7 +109,7 @@ class GrpcTransportServer[F[_]: Monixable: Concurrent: RPConfAsk: Log: Metrics: object GrpcTransportServer { - def acquireServer[F[_]: Monixable: Concurrent: RPConfAsk: Log: Metrics: Timer]( + def acquireServer[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( networkId: String, port: Int, certPath: Path, diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala index a3f91bc4a62..74daa53eb81 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala @@ -2,6 +2,7 @@ package coop.rchain.comm.transport import cats.data._ import cats.effect.Sync +import cats.effect.implicits.catsEffectSyntaxBracket import cats.syntax.all._ import coop.rchain.comm.PeerNode import coop.rchain.comm.protocol.routing._ @@ -12,6 +13,7 @@ import coop.rchain.shared.Compression._ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import monix.reactive.Observable +import fs2.Stream import scala.collection.concurrent.TrieMap @@ -72,47 +74,35 @@ object StreamHandler { } } - def handleStream[F[_]: Monixable: Sync: Log]( - stream: Observable[Chunk], + def handleStream[F[_]: Sync: Log]( + stream: Stream[F, Chunk], circuitBreaker: CircuitBreaker, cache: TrieMap[String, Array[Byte]] ): F[Either[StreamError, StreamMessage]] = - init(cache).toTask - .bracketE { initStmd => - (collect(initStmd, stream, circuitBreaker, cache) >>= toResult[F]).value.toTask - }({ - // failed while collecting stream - case (stmd, Right(Left(_))) => - (Log[F].warn("Failed collecting stream.") >> - Sync[F].delay(cache.remove(stmd.key)).void).toTask - // should not happend (errors handled witin bracket) but covered for safety - case (stmd, Left(_)) => - (Log[F].error( - "Stream collection ended unexpected way. Please contact RNode code maintainer." - ) >> Sync[F].delay(cache.remove(stmd.key)).void).toTask - // succesfully collected - case (_, _) => - Log[F].debug("Stream collected.").toTask - }) + init(cache) + .flatMap { initStmd => + (collect(initStmd, stream, circuitBreaker, cache) >>= toResult[F]).value.flatTap( + _.leftMap(_ => cache.remove(initStmd.key)).pure + ) + } .attempt .map(_.leftMap(StreamError.unexpected).flatten) - .fromTask private def init[F[_]: Sync](cache: TrieMap[String, Array[Byte]]): F[Streamed] = createCacheEntry[F]("packet_send/", cache) map (key => Streamed(key = key)) - private def collect[F[_]: Monixable: Sync]( + private def collect[F[_]: Sync]( init: Streamed, - stream: Observable[Chunk], + stream: Stream[F, Chunk], circuitBreaker: CircuitBreaker, cache: TrieMap[String, Array[Byte]] ): EitherT[F, StreamError, Streamed] = { def collectStream: F[Streamed] = stream - .foldWhileLeftL(init) { + .scan(init.asLeft[Streamed]) { case ( - stmd, + Left(stmd), Chunk(Chunk.Content.Header(ChunkHeader(sender, typeId, compressed, cl, nid))) ) => val newStmd = stmd.copy( @@ -122,7 +112,7 @@ object StreamHandler { if (circuit.broken) Right(newStmd.copy(circuit = circuit)) else Left(newStmd) - case (stmd, Chunk(Chunk.Content.Data(ChunkData(newData)))) => + case (Left(stmd), Chunk(Chunk.Content.Data(ChunkData(newData)))) => val receivedBytes = newData.toByteArray // Write data to cache @@ -136,14 +126,18 @@ object StreamHandler { if (circuit.broken) Right(newStmd.copy(circuit = circuit)) else Left(newStmd) - case (stmd, _) => + case (Left(stmd), _) => Right( stmd.copy( circuit = Opened(StreamHandler.StreamError.notFullMessage("Not all data received")) ) ) + case (x @ Right(_), _) => x } - .fromTask + .takeThrough(x => x.isLeft) + .map(_.merge) + .compile + .lastOrError EitherT(collectStream.attempt.map { case Right(Streamed(_, _, Opened(error), _)) => error.asLeft diff --git a/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala b/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala index 0bc5f5b00c1..278b974870d 100644 --- a/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala +++ b/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala @@ -1,41 +1,35 @@ package coop.rchain.node.api -import cats.effect.Concurrent +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import cats.{Applicative, Foldable} import com.google.protobuf.ByteString import coop.rchain.casper.api._ import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.deploy.v1._ -import coop.rchain.catscontrib.TaskContrib._ +import coop.rchain.catscontrib.TaskContrib.AbstractTaskOps import coop.rchain.models.StacksafeMessage import coop.rchain.models.syntax._ import coop.rchain.monix.Monixable import coop.rchain.shared.Log -import coop.rchain.shared.ThrowableOps._ -import coop.rchain.shared.syntax._ -import monix.eval.Task -import monix.execution.Scheduler -import monix.reactive.Observable +import coop.rchain.shared.ThrowableOps.RichThrowable +import io.grpc.Metadata +import fs2.Stream object DeployGrpcServiceV1 { def apply[F[_]: Monixable: Concurrent: Log]( blockApi: BlockApi[F], blockReportAPI: BlockReportApi[F] - )( - implicit worker: Scheduler - ): DeployServiceV1GrpcMonix.DeployService = - new DeployServiceV1GrpcMonix.DeployService { + ): DeployServiceFs2Grpc[F, Metadata] = + new DeployServiceFs2Grpc[F, Metadata] { private def defer[A, R <: StacksafeMessage[R]]( task: F[Either[String, A]] )( response: Either[ServiceError, A] => R - ): Task[R] = - task.toTask - .executeOn(worker) - .fromTask + ): F[R] = + task .logOnError("Deploy service method error.") .attempt .map( @@ -44,36 +38,29 @@ object DeployGrpcServiceV1 { r => response(r.leftMap(e => ServiceError(Seq(e)))) ) ) - .toTask private def deferCollection[A, R <: StacksafeMessage[R], Collection[_]: Applicative: Foldable]( task: F[Collection[A]] )( response: Either[ServiceError, A] => R - ): Task[Collection[R]] = - task.toTask - .executeOn(worker) - .fromTask + ): F[Collection[R]] = + task.attempt .logOnError("Deploy service method error.") - .attempt .map( _.fold( t => response(ServiceError(t.toMessageList()).asLeft).pure[Collection], _.map(r => response(r.asRight[ServiceError])) ) ) - .toTask - def doDeploy(request: DeployDataProto): Task[DeployResponse] = + override def doDeploy(request: DeployDataProto, ctx: Metadata): F[DeployResponse] = DeployData .from(request) .fold( errMsg => { import DeployResponse.Message._ - Task({ - val error = ServiceError(Seq[String](errMsg)) - DeployResponse(Error(error)) - }) + val error = ServiceError(Seq[String](errMsg)) + Sync[F].delay(DeployResponse(Error(error))) }, dd => { defer(blockApi.deploy(dd)) { r => @@ -84,23 +71,26 @@ object DeployGrpcServiceV1 { } ) - def deployStatus(request: FindDeployQuery): Task[DeployStatusResponse] = + override def deployStatus(request: FindDeployQuery, ctx: Metadata): F[DeployStatusResponse] = defer(blockApi.deployStatus(request.deployId)) { r => import DeployStatusResponse.Message import DeployStatusResponse.Message._ DeployStatusResponse(r.fold[Message](Error, DeployExecStatus)) } - def getBlock(request: BlockQuery): Task[BlockResponse] = + override def getBlock(request: BlockQuery, ctx: Metadata): F[BlockResponse] = defer(blockApi.getBlock(request.hash)) { r => import BlockResponse.Message import BlockResponse.Message._ BlockResponse(r.fold[Message](Error, BlockInfo)) } - def visualizeDag(request: VisualizeDagQuery): Observable[VisualizeBlocksResponse] = - Observable - .fromTask( + override def visualizeDag( + request: VisualizeDagQuery, + ctx: Metadata + ): Stream[F, VisualizeBlocksResponse] = + Stream + .eval( deferCollection( blockApi .visualizeDag( @@ -115,18 +105,21 @@ object DeployGrpcServiceV1 { VisualizeBlocksResponse(r.fold[Message](Error, Content)) } ) - .flatMap(Observable.fromIterable) + .flatMap(Stream.emits) - def machineVerifiableDag(request: MachineVerifyQuery): Task[MachineVerifyResponse] = + override def machineVerifiableDag( + request: MachineVerifyQuery, + ctx: Metadata + ): F[MachineVerifyResponse] = defer(blockApi.machineVerifiableDag(request.depth)) { r => import MachineVerifyResponse.Message import MachineVerifyResponse.Message._ MachineVerifyResponse(r.fold[Message](Error, Content)) } - def getBlocks(request: BlocksQuery): Observable[BlockInfoResponse] = - Observable - .fromTask( + override def getBlocks(request: BlocksQuery, ctx: Metadata): Stream[F, BlockInfoResponse] = + Stream + .eval( deferCollection( blockApi.getBlocks(request.depth).map(_.getOrElse(List.empty[LightBlockInfo])) ) { r => @@ -135,9 +128,12 @@ object DeployGrpcServiceV1 { BlockInfoResponse(r.fold[Message](Error, BlockInfo)) } ) - .flatMap(Observable.fromIterable) + .flatMap(Stream.emits) - def listenForDataAtName(request: DataAtNameQuery): Task[ListeningNameDataResponse] = + override def listenForDataAtName( + request: DataAtNameQuery, + ctx: Metadata + ): F[ListeningNameDataResponse] = defer( blockApi.getListeningNameDataResponse(request.depth, request.name) ) { r => @@ -150,7 +146,10 @@ object DeployGrpcServiceV1 { ) } - def getDataAtName(request: DataAtNameByBlockQuery): Task[RhoDataResponse] = + override def getDataAtName( + request: DataAtNameByBlockQuery, + ctx: Metadata + ): F[RhoDataResponse] = defer(blockApi.getDataAtPar(request.par, request.blockHash, request.usePreStateHash)) { r => import RhoDataResponse.Message import RhoDataResponse.Message._ @@ -161,9 +160,10 @@ object DeployGrpcServiceV1 { ) } - def listenForContinuationAtName( - request: ContinuationAtNameQuery - ): Task[ContinuationAtNameResponse] = + override def listenForContinuationAtName( + request: ContinuationAtNameQuery, + ctx: Metadata + ): F[ContinuationAtNameResponse] = defer( blockApi.getListeningNameContinuationResponse(request.depth, request.names) ) { r => @@ -176,35 +176,41 @@ object DeployGrpcServiceV1 { ) } - def findDeploy(request: FindDeployQuery): Task[FindDeployResponse] = + override def findDeploy(request: FindDeployQuery, ctx: Metadata): F[FindDeployResponse] = defer(blockApi.findDeploy(request.deployId)) { r => import FindDeployResponse.Message import FindDeployResponse.Message._ FindDeployResponse(r.fold[Message](Error, BlockInfo)) } - def lastFinalizedBlock(request: LastFinalizedBlockQuery): Task[LastFinalizedBlockResponse] = + override def lastFinalizedBlock( + request: LastFinalizedBlockQuery, + ctx: Metadata + ): F[LastFinalizedBlockResponse] = defer(blockApi.lastFinalizedBlock) { r => import LastFinalizedBlockResponse.Message import LastFinalizedBlockResponse.Message._ LastFinalizedBlockResponse(r.fold[Message](Error, BlockInfo)) } - def isFinalized(request: IsFinalizedQuery): Task[IsFinalizedResponse] = + override def isFinalized(request: IsFinalizedQuery, ctx: Metadata): F[IsFinalizedResponse] = defer(blockApi.isFinalized(request.hash)) { r => import IsFinalizedResponse.Message import IsFinalizedResponse.Message._ IsFinalizedResponse(r.fold[Message](Error, IsFinalized)) } - def bondStatus(request: BondStatusQuery): Task[BondStatusResponse] = + override def bondStatus(request: BondStatusQuery, ctx: Metadata): F[BondStatusResponse] = defer(blockApi.bondStatus(request.publicKey)) { r => import BondStatusResponse.Message import BondStatusResponse.Message._ BondStatusResponse(r.fold[Message](Error, IsBonded)) } - def exploratoryDeploy(request: ExploratoryDeployQuery): Task[ExploratoryDeployResponse] = + override def exploratoryDeploy( + request: ExploratoryDeployQuery, + ctx: Metadata + ): F[ExploratoryDeployResponse] = defer( blockApi .exploratoryDeploy( @@ -220,7 +226,7 @@ object DeployGrpcServiceV1 { })) } - override def getEventByHash(request: ReportQuery): Task[EventInfoResponse] = + override def getEventByHash(request: ReportQuery, ctx: Metadata): F[EventInfoResponse] = defer( request.hash.decodeHex .fold(s"Request hash: ${request.hash} is not valid hex string".asLeft[Array[Byte]])( @@ -239,9 +245,12 @@ object DeployGrpcServiceV1 { EventInfoResponse(r.fold[Message](Error, Result)) } - def getBlocksByHeights(request: BlocksQueryByHeight): Observable[BlockInfoResponse] = - Observable - .fromTask( + override def getBlocksByHeights( + request: BlocksQueryByHeight, + ctx: Metadata + ): Stream[F, BlockInfoResponse] = + Stream + .eval( deferCollection( blockApi .getBlocksByHeights(request.startBlockNumber, request.endBlockNumber) @@ -252,9 +261,12 @@ object DeployGrpcServiceV1 { BlockInfoResponse(r.fold[Message](Error, BlockInfo)) } ) - .flatMap(Observable.fromIterable) + .flatMap(Stream.emits) - def status(request: com.google.protobuf.empty.Empty): Task[StatusResponse] = - blockApi.status.map(StatusResponse().withStatus).toTask + override def status( + request: com.google.protobuf.empty.Empty, + ctx: Metadata + ): F[StatusResponse] = + blockApi.status.map(StatusResponse().withStatus) } } diff --git a/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala b/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala index 00b03cb9f91..cd32f865d64 100644 --- a/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala +++ b/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala @@ -6,35 +6,29 @@ import coop.rchain.casper.api.BlockApi import coop.rchain.casper.protocol.propose.v1.{ ProposeResponse, ProposeResultResponse, - ProposeServiceV1GrpcMonix + ProposeServiceFs2Grpc } import coop.rchain.casper.protocol.{ProposeQuery, ProposeResultQuery, ServiceError} -import coop.rchain.catscontrib.TaskContrib._ +import coop.rchain.catscontrib.TaskContrib.AbstractTaskOps import coop.rchain.models.StacksafeMessage import coop.rchain.monix.Monixable import coop.rchain.shared.ThrowableOps._ import coop.rchain.shared._ -import coop.rchain.shared.syntax._ -import monix.eval.Task -import monix.execution.Scheduler +import io.grpc.Metadata object ProposeGrpcServiceV1 { def apply[F[_]: Monixable: Sync: Log]( blockApi: BlockApi[F] - )( - implicit worker: Scheduler - ): ProposeServiceV1GrpcMonix.ProposeService = - new ProposeServiceV1GrpcMonix.ProposeService { + ): ProposeServiceFs2Grpc[F, Metadata] = + new ProposeServiceFs2Grpc[F, Metadata] { private def defer[A, R <: StacksafeMessage[R]]( task: F[Either[String, A]] )( response: Either[ServiceError, A] => R - ): Task[R] = - task.toTask - .executeOn(worker) - .fromTask + ): F[R] = + task .logOnError("Propose service method error.") .attempt .map( @@ -43,12 +37,9 @@ object ProposeGrpcServiceV1 { r => response(r.leftMap(e => ServiceError(Seq(e)))) ) ) - .toTask // This method should return immediately, only trggerred propose if allowed - def propose( - request: ProposeQuery - ): Task[ProposeResponse] = + def propose(request: ProposeQuery, ctx: Metadata): F[ProposeResponse] = defer(blockApi.createBlock(request.isAsync)) { r => import ProposeResponse.Message import ProposeResponse.Message._ @@ -56,7 +47,7 @@ object ProposeGrpcServiceV1 { } // This method waits for propose to finish, returning result data - def proposeResult(request: ProposeResultQuery): Task[ProposeResultResponse] = + def proposeResult(request: ProposeResultQuery, ctx: Metadata): F[ProposeResultResponse] = defer(blockApi.getProposeResult) { r => import ProposeResultResponse.Message import ProposeResultResponse.Message._ diff --git a/node/src/main/scala/coop/rchain/node/api/ReplGrpcService.scala b/node/src/main/scala/coop/rchain/node/api/ReplGrpcService.scala index 075a1f64dc7..a4e555713b9 100644 --- a/node/src/main/scala/coop/rchain/node/api/ReplGrpcService.scala +++ b/node/src/main/scala/coop/rchain/node/api/ReplGrpcService.scala @@ -1,25 +1,21 @@ package coop.rchain.node.api import cats.effect.Sync -import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.models.Par -import coop.rchain.monix.Monixable -import coop.rchain.node.model.repl._ -import coop.rchain.rholang.interpreter.Interpreter._ +import coop.rchain.node.model.{CmdRequest, EvalRequest, ReplFs2Grpc, ReplResponse} +import coop.rchain.rholang.interpreter._ import coop.rchain.rholang.interpreter.accounting.Cost import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.rholang.interpreter.errors.InterpreterError import coop.rchain.rholang.interpreter.storage.StoragePrinter -import coop.rchain.rholang.interpreter.{RhoRuntime, _} -import coop.rchain.shared.syntax._ -import monix.eval.Task -import monix.execution.Scheduler +import io.grpc.Metadata +import cats.syntax.all._ object ReplGrpcService { - def apply[F[_]: Monixable: Sync](runtime: RhoRuntime[F], worker: Scheduler): ReplGrpcMonix.Repl = - new ReplGrpcMonix.Repl { + def apply[F[_]: Sync](runtime: RhoRuntime[F]): ReplFs2Grpc[F, Metadata] = + new ReplFs2Grpc[F, Metadata] { def exec(source: String, printUnmatchedSendsOnly: Boolean = false): F[ReplResponse] = Sync[F] .attempt( @@ -57,14 +53,11 @@ object ReplGrpcService { } .map(ReplResponse(_)) - private def defer[A](task: F[A]): Task[A] = - task.toTask.executeOn(worker) - - def run(request: CmdRequest): Task[ReplResponse] = - defer(exec(request.line)) + def run(request: CmdRequest, ctx: Metadata): F[ReplResponse] = + exec(request.line) - def eval(request: EvalRequest): Task[ReplResponse] = - defer(exec(request.program, request.printUnmatchedSendsOnly)) + def eval(request: EvalRequest, ctx: Metadata): F[ReplResponse] = + exec(request.program, request.printUnmatchedSendsOnly) private def printNormalizedTerm(normalizedTerm: Par): Unit = { Console.println("\nEvaluating:") diff --git a/node/src/main/scala/coop/rchain/node/api/package.scala b/node/src/main/scala/coop/rchain/node/api/package.scala index 42465aa8b4d..c82874efd49 100644 --- a/node/src/main/scala/coop/rchain/node/api/package.scala +++ b/node/src/main/scala/coop/rchain/node/api/package.scala @@ -1,11 +1,12 @@ package coop.rchain.node -import cats.effect.{Concurrent, Resource, Sync} -import coop.rchain.casper.protocol.deploy.v1.DeployServiceV1GrpcMonix -import coop.rchain.casper.protocol.propose.v1.ProposeServiceV1GrpcMonix -import coop.rchain.node.model.repl._ +import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} +import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc +import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc +import coop.rchain.node.model.ReplFs2Grpc import coop.rchain.shared._ import io.grpc +import io.grpc.Metadata import io.grpc.netty.NettyServerBuilder import io.grpc.protobuf.services.ProtoReflectionService import monix.execution.Scheduler @@ -15,13 +16,13 @@ import scala.concurrent.duration.FiniteDuration package object api { - def acquireInternalServer[F[_]: Sync]( + def acquireInternalServer[F[_]: Sync: ConcurrentEffect]( host: String, port: Int, grpcExecutor: Scheduler, - replGrpcService: ReplGrpcMonix.Repl, - deployGrpcService: DeployServiceV1GrpcMonix.DeployService, - proposeGrpcService: ProposeServiceV1GrpcMonix.ProposeService, + replService: ReplFs2Grpc[F, Metadata], + deployService: DeployServiceFs2Grpc[F, Metadata], + proposeService: ProposeServiceFs2Grpc[F, Metadata], maxMessageSize: Int, keepAliveTime: FiniteDuration, keepAliveTimeout: FiniteDuration, @@ -34,17 +35,9 @@ package object api { .forAddress(new InetSocketAddress(host, port)) .executor(grpcExecutor) .maxInboundMessageSize(maxMessageSize) - .addService( - ReplGrpcMonix.bindService(replGrpcService, grpcExecutor) - ) - .addService( - ProposeServiceV1GrpcMonix - .bindService(proposeGrpcService, grpcExecutor) - ) - .addService( - DeployServiceV1GrpcMonix - .bindService(deployGrpcService, grpcExecutor) - ) + .addService(ReplFs2Grpc.bindService(replService)) + .addService(ProposeServiceFs2Grpc.bindService(proposeService)) + .addService(DeployServiceFs2Grpc.bindService(deployService)) .keepAliveTime(keepAliveTime.length, keepAliveTime.unit) .keepAliveTimeout(keepAliveTimeout.length, keepAliveTimeout.unit) .permitKeepAliveTime(permitKeepAliveTime.length, permitKeepAliveTime.unit) @@ -58,11 +51,11 @@ package object api { Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.awaitTermination())) } - def acquireExternalServer[F[_]: Concurrent: Log]( + def acquireExternalServer[F[_]: Concurrent: ConcurrentEffect: Log]( host: String, port: Int, grpcExecutor: Scheduler, - deployGrpcService: DeployServiceV1GrpcMonix.DeployService, + deployGrpcService: DeployServiceFs2Grpc[F, Metadata], maxMessageSize: Int, keepAliveTime: FiniteDuration, keepAliveTimeout: FiniteDuration, @@ -75,10 +68,7 @@ package object api { .forAddress(new InetSocketAddress(host, port)) .executor(grpcExecutor) .maxInboundMessageSize(maxMessageSize) - .addService( - DeployServiceV1GrpcMonix - .bindService(deployGrpcService, grpcExecutor) - ) + .addService(DeployServiceFs2Grpc.bindService(deployGrpcService)) .compressorRegistry(null) .keepAliveTime(keepAliveTime.length, keepAliveTime.unit) .keepAliveTimeout(keepAliveTimeout.length, keepAliveTimeout.unit) diff --git a/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala b/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala index dfcab2e1659..420d278b832 100644 --- a/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala +++ b/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala @@ -1,12 +1,10 @@ package coop.rchain.node.effects -import cats.effect.Sync +import cats.effect.{ConcurrentEffect, Sync} import cats.syntax.all._ -import coop.rchain.monix.Monixable -import coop.rchain.node.model.repl._ -import coop.rchain.shared.syntax._ -import io.grpc.ManagedChannel +import coop.rchain.node.model._ import io.grpc.netty.NettyChannelBuilder +import io.grpc.{ManagedChannel, Metadata} import java.io.{Closeable, FileNotFoundException} import java.nio.file._ @@ -26,7 +24,7 @@ object ReplClient { def apply[F[_]](implicit ev: ReplClient[F]): ReplClient[F] = ev } -class GrpcReplClient[F[_]: Monixable: Sync](host: String, port: Int, maxMessageSize: Int) +class GrpcReplClient[F[_]: Sync: ConcurrentEffect](host: String, port: Int, maxMessageSize: Int) extends ReplClient[F] with Closeable { @@ -37,12 +35,11 @@ class GrpcReplClient[F[_]: Monixable: Sync](host: String, port: Int, maxMessageS .usePlaintext() .build - private val stub = ReplGrpcMonix.stub(channel) + private val stub = ReplFs2Grpc.stub(channel) def run(line: String): F[Either[Throwable, String]] = stub - .run(CmdRequest(line)) - .fromTask + .run(CmdRequest(line), new Metadata()) .map(_.output) .attempt .map(_.leftMap(processError)) @@ -57,8 +54,7 @@ class GrpcReplClient[F[_]: Monixable: Sync](host: String, port: Int, maxMessageS val filePath = Paths.get(fileName) if (Files.exists(filePath)) stub - .eval(EvalRequest(readContent(filePath), printUnmatchedSendsOnly)) - .fromTask + .eval(EvalRequest(readContent(filePath), printUnmatchedSendsOnly), new Metadata()) .map(_.output) .attempt .map(_.leftMap(processError)) diff --git a/node/src/main/scala/coop/rchain/node/effects/package.scala b/node/src/main/scala/coop/rchain/node/effects/package.scala index 75ef923945f..ad0ed156d86 100644 --- a/node/src/main/scala/coop/rchain/node/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/effects/package.scala @@ -1,7 +1,7 @@ package coop.rchain.node import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, Sync} +import cats.effect.{Concurrent, ConcurrentEffect, Sync} import cats.mtl._ import cats.syntax.all._ import cats.{Applicative, Monad, Parallel} @@ -32,12 +32,12 @@ package object effects { def nodeDiscovery[F[_]: Monad: KademliaStore: KademliaRPC](id: NodeIdentifier): NodeDiscovery[F] = NodeDiscovery.kademlia(id) - def kademliaRPC[F[_]: Monixable: Sync: RPConfAsk: Metrics]( + def kademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( networkId: String, timeout: FiniteDuration )(implicit scheduler: Scheduler): KademliaRPC[F] = new GrpcKademliaRPC(networkId, timeout) - def transportClient[F[_]: Monixable: Concurrent: Parallel: Log: Metrics]( + def transportClient[F[_]: Monixable: Concurrent: ConcurrentEffect: Parallel: Log: Metrics]( networkId: String, certPath: Path, keyPath: Path, diff --git a/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala b/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala index 444926e756d..fc6c87d8ba1 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala @@ -2,19 +2,19 @@ package coop.rchain.node.runtime import cats.effect.Concurrent import coop.rchain.casper.api.{BlockApi, BlockReportApi} -import coop.rchain.casper.protocol.deploy.v1.DeployServiceV1GrpcMonix -import coop.rchain.casper.protocol.propose.v1.ProposeServiceV1GrpcMonix +import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc +import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc import coop.rchain.monix.Monixable import coop.rchain.node.api.{DeployGrpcServiceV1, ProposeGrpcServiceV1, ReplGrpcService} -import coop.rchain.node.model.repl.ReplGrpcMonix +import coop.rchain.node.model.ReplFs2Grpc import coop.rchain.rholang.interpreter.RhoRuntime import coop.rchain.shared.Log -import monix.execution.Scheduler +import io.grpc.Metadata -final case class GrpcServices( - deploy: DeployServiceV1GrpcMonix.DeployService, - propose: ProposeServiceV1GrpcMonix.ProposeService, - repl: ReplGrpcMonix.Repl +final case class GrpcServices[F[_]]( + deployService: DeployServiceFs2Grpc[F, Metadata], + proposeService: ProposeServiceFs2Grpc[F, Metadata], + replService: ReplFs2Grpc[F, Metadata] ) object GrpcServices { @@ -22,8 +22,8 @@ object GrpcServices { blockApi: BlockApi[F], blockReportAPI: BlockReportApi[F], runtime: RhoRuntime[F] - )(implicit mainScheduler: Scheduler): GrpcServices = { - val repl = ReplGrpcService(runtime, mainScheduler) + ): GrpcServices[F] = { + val repl = ReplGrpcService(runtime) val deploy = DeployGrpcServiceV1(blockApi, blockReportAPI) val propose = ProposeGrpcServiceV1(blockApi) diff --git a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala index 964f5d98aee..33dc7c302bc 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala @@ -3,8 +3,9 @@ package coop.rchain.node.runtime import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync, Timer} import cats.syntax.all._ import com.typesafe.config.Config -import coop.rchain.casper.protocol.deploy.v1.DeployServiceV1GrpcMonix -import coop.rchain.casper.protocol.propose.v1.ProposeServiceV1GrpcMonix +import coop.rchain.casper.protocol.deploy.v1 +import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc +import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc import coop.rchain.comm.discovery.{KademliaHandleRPC, KademliaStore, NodeDiscovery} import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfAsk} import coop.rchain.comm.rp.HandleMessages @@ -19,14 +20,14 @@ import coop.rchain.node.diagnostics.{ NewPrometheusReporter, UdpInfluxDBReporter } -import coop.rchain.node.model.repl.ReplGrpcMonix +import coop.rchain.node.model.ReplFs2Grpc import coop.rchain.node.web.ReportingRoutes.ReportingHttpRoutes import coop.rchain.node.{api, web} import coop.rchain.sdk.syntax.all._ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.concurrent.Queue -import io.grpc.Server +import io.grpc.{Metadata, Server} import kamon.Kamon import kamon.system.SystemMetrics import kamon.zipkin.ZipkinReporter @@ -46,7 +47,7 @@ object NetworkServers { /* Diagnostics */ : Log: Metrics] // format: on ( routingMessageQueue: Queue[F, RoutingMessage], - grpcServices: GrpcServices, + grpcServices: GrpcServices[F], webApi: WebApi[F], adminWebApi: AdminWebApi[F], reportingRoutes: ReportingHttpRoutes[F], @@ -86,11 +87,11 @@ object NetworkServers { } yield () } - def internalServer[F[_]: Concurrent: Log]( + def internalServer[F[_]: Concurrent: ConcurrentEffect: Log]( nodeConf: NodeConf, - replService: ReplGrpcMonix.Repl, - deployService: DeployServiceV1GrpcMonix.DeployService, - proposeService: ProposeServiceV1GrpcMonix.ProposeService, + replService: ReplFs2Grpc[F, Metadata], + deployService: DeployServiceFs2Grpc[F, Metadata], + proposeService: ProposeServiceFs2Grpc[F, Metadata], grpcScheduler: Scheduler ): Resource[F, Server] = api.acquireInternalServer[F]( @@ -109,9 +110,9 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def externalServer[F[_]: Concurrent: Log]( + def externalServer[F[_]: Concurrent: ConcurrentEffect: Log]( nodeConf: NodeConf, - deployService: DeployServiceV1GrpcMonix.DeployService, + deployService: v1.DeployServiceFs2Grpc[F, Metadata], grpcScheduler: Scheduler ): Resource[F, Server] = api.acquireExternalServer[F]( @@ -128,7 +129,7 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def protocolServer[F[_]: Monixable: Concurrent: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Timer]( + def protocolServer[F[_]: Monixable: Concurrent: ConcurrentEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Timer]( nodeConf: NodeConf, routingMessageQueue: Queue[F, RoutingMessage] )(implicit scheduler: Scheduler): Resource[F, Unit] = { @@ -148,7 +149,7 @@ object NetworkServers { ) } - def discoveryServer[F[_]: Monixable: Concurrent: KademliaStore: Log: Metrics]( + def discoveryServer[F[_]: Monixable: Concurrent: ConcurrentEffect: KademliaStore: Log: Metrics]( nodeConf: NodeConf, grpcScheduler: Scheduler ): Resource[F, Server] = diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala index b6c0c91e7aa..118f41ff1e0 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala @@ -85,7 +85,7 @@ object NodeMain { * @param options command line options * @param console console */ - def runCLI[F[_]: Sync: Monixable: ConsoleIO: Timer]( + def runCLI[F[_]: Sync: Monixable: ConcurrentEffect: ConsoleIO: Timer]( options: commandline.Options ): F[Unit] = { val grpcPort = diff --git a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala index 6906f72ff41..47241f49e77 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala @@ -60,7 +60,7 @@ object Setup { ( Stream[F, Unit], // Node startup process (protocol messages handling) Queue[F, RoutingMessage], - GrpcServices, + GrpcServices[F], WebApi[F], AdminWebApi[F], ReportingHttpRoutes[F] From 0f6b868cb10a5c93278d0507f5876ac64f2b74a0 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Fri, 24 Mar 2023 11:46:40 +0400 Subject: [PATCH 05/17] Remove grpc monix generator --- project/GrpcMonixGenerator.scala | 451 ------------------------------- 1 file changed, 451 deletions(-) delete mode 100644 project/GrpcMonixGenerator.scala diff --git a/project/GrpcMonixGenerator.scala b/project/GrpcMonixGenerator.scala deleted file mode 100644 index cb40021ad54..00000000000 --- a/project/GrpcMonixGenerator.scala +++ /dev/null @@ -1,451 +0,0 @@ -package grpcmonix.generators - -import com.google.protobuf.Descriptors._ -import com.google.protobuf.ExtensionRegistry -import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse -import protocbridge.{Artifact, JvmGenerator} -import protocgen.{CodeGenApp, CodeGenRequest, CodeGenResponse} -import scalapb.compiler.FunctionalPrinter.PrinterEndo -import scalapb.compiler._ -import scalapb.options.compiler.Scalapb - -import scala.collection.JavaConverters._ - -object gen { - def apply( - flatPackage: Boolean = false, - javaConversions: Boolean = false, - grpc: Boolean = true, - singleLineToProtoString: Boolean = false, - asciiFormatToString: Boolean = false, - noLenses: Boolean = false - ): (JvmGenerator, Seq[String]) = - ( - JvmGenerator("scala", GrpcMonixGenerator), - Seq( - "flat_package" -> flatPackage, - "java_conversions" -> javaConversions, - "grpc" -> grpc, - "single_line_to_proto_string" -> singleLineToProtoString, - "ascii_format_to_string" -> asciiFormatToString, - "no_lenses" -> noLenses - ).collect { case (name, v) if v => name } - ) -} - -object GrpcMonixGenerator extends CodeGenApp { - override def registerExtensions(registry: ExtensionRegistry): Unit = - Scalapb.registerAllExtensions(registry) - - override def suggestedDependencies: Seq[Artifact] = Seq( - Artifact( - "com.thesamet.scalapb", - "scalapb-runtime", - scalapb.compiler.Version.scalapbVersion, - crossVersion = true - ) - ) - - // Adapted from scalapb ProtobufGenerator and old (unmaintained) gRPC monix scalapb plugin - // - https://github.com/scalapb/ScalaPB/blob/v0.10.8/compiler-plugin/src/main/scala/scalapb/compiler/ProtobufGenerator.scala#L1732 - // - https://github.com/btlines/grpcmonix - def process(request: CodeGenRequest): CodeGenResponse = - ProtobufGenerator.parseParameters(request.parameter) match { - case Right(params) => - try { - val implicits = new DescriptorImplicits(params, request.allProtos) - // Inserted custom printer - val generator = new GrpcMonixGenerator(implicits) - val validator = new ProtoValidation(implicits) - validator.validateFiles(request.allProtos) - val files = request.filesToGenerate - .filter { file => - file.getServices.asScala.nonEmpty - } - .map { file => - generator.generateFile(file) - } - CodeGenResponse.succeed(files) - } catch { - case e: GeneratorException => - CodeGenResponse.fail(e.message) - } - case Left(error) => - CodeGenResponse.fail(error) - } -} - -class GrpcMonixGenerator(implicits: DescriptorImplicits) { - - import implicits._ - - implicit class FileExt(val file: FileDescriptor) { - def objectName = { - val className = file.javaOuterClassName.replace("OuterClass", "") - s"${className}GrpcMonix" - } - } - - private[this] def grpcObserver(typeName: String) = s"StreamObserver[$typeName]" - - private[this] def serviceCompanion(typeName: String) = s"ServiceCompanion[$typeName]" - - private[this] def task(typeParam: String) = s"Task[$typeParam]" - - private[this] def serviceMethodDescriptor(method: MethodDescriptor): PrinterEndo = { printer => - val methodType = method.streamType match { - case StreamType.Unary => "UNARY" - case StreamType.ClientStreaming => "CLIENT_STREAMING" - case StreamType.ServerStreaming => "SERVER_STREAMING" - case StreamType.Bidirectional => "BIDI_STREAMING" - } - - printer - .add( - s"val ${method.grpcDescriptor.name}: MethodDescriptor[${method.inputType.scalaType}, ${method.outputType.scalaType}] =" - ) - .indent - .add("MethodDescriptor.newBuilder()") - .addIndented( - s".setType(MethodDescriptor.MethodType.$methodType)", - s""".setFullMethodName(MethodDescriptor.generateFullMethodName("${method.getService.getFullName}", "${method.getName}"))""", - s".setRequestMarshaller(new Marshaller(${method.inputType.scalaType}))", - s".setResponseMarshaller(new Marshaller(${method.outputType.scalaType}))", - ".build()" - ) - .outdent - } - - private[this] def serviceDescriptor(service: ServiceDescriptor): PrinterEndo = - _.add( - s"""val SERVICE: _root_.io.grpc.ServiceDescriptor = _root_.io.grpc.ServiceDescriptor.newBuilder("${service.getFullName}")""" - ).indent - .add( - s".setSchemaDescriptor(new ConcreteProtoFileDescriptorSupplier(${service.getFile.fileDescriptorObject.fullName}.javaDescriptor))" - ) - .print(service.methods) { - case (printer, method) => - printer.add(s".addMethod(${method.grpcDescriptor.name})") - } - .add(".build()") - .outdent - - private[this] def serviceMethodSignature(method: MethodDescriptor) = - s"def ${method.name}" + (method.streamType match { - case StreamType.Unary => - s"(request: ${method.inputType.scalaType}): ${task(method.outputType.scalaType)}" - case StreamType.ClientStreaming => - s"(input: Observable[${method.inputType.scalaType}]): ${task(method.outputType.scalaType)}" - case StreamType.ServerStreaming => - s"(request: ${method.inputType.scalaType}): Observable[${method.outputType.scalaType}]" - case StreamType.Bidirectional => - s"(input: Observable[${method.inputType.scalaType}]): Observable[${method.outputType.scalaType}]" - }) - - private[this] def serviceTrait(service: ServiceDescriptor): PrinterEndo = { printer => - printer - .add(s"trait ${service.getName} extends AbstractService {") - .indent - .add(s"override def serviceCompanion = ${service.getName}") - .seq(service.methods.map(serviceMethodSignature)) - .outdent - .add("}") - } - - private[this] def serviceTraitCompanion( - service: ServiceDescriptor, - fileDesc: FileDescriptor - ): PrinterEndo = { printer => - printer - .add(s"object ${service.getName} extends ${serviceCompanion(service.getName)} {") - .indent - .add(s"implicit def serviceCompanion: ${serviceCompanion(service.getName)} = this") - .add(s"def javaDescriptor: ServiceDescriptor = ${service.javaDescriptorSource}") - .add( - s"def scalaDescriptor: scalapb.descriptors.ServiceDescriptor = ${service.scalaDescriptorSource}" - ) - .add( - s"def bindService(serviceImpl: ${service.getName}, executionContext: scala.concurrent.ExecutionContext): ServerServiceDefinition =" - ) - .indent - .add( - s"${fileDesc.objectName}.bindService(serviceImpl, Scheduler(executionContext))" - ) - .outdent - .outdent - .add("}") - } - - private[this] def stub(service: ServiceDescriptor): PrinterEndo = { printer => - printer - .add(s"class ${service.stub}(") - .indent - .add(s"channel: Channel,") - .add(s"options: CallOptions = CallOptions.DEFAULT") - .outdent - .add(s") extends AbstractStub[${service.stub}](channel, options) with ${service.name} {") - .indent - .print(service.getMethods.asScala) { - case (p, m) => p.call(clientMethodImpl(m)) - } - .add(s"override def build(channel: Channel, options: CallOptions): ${service.stub} = ") - .indent - .add(s"new ${service.stub}(channel, options)") - .outdent - .outdent - .add("}") - } - - private[this] def clientMethodImpl(method: MethodDescriptor): PrinterEndo = { printer => - def liftByGrpcOperator(inputType: String, outputType: String) = - s"liftByGrpcOperator[$inputType, $outputType]" - - method.streamType match { - case StreamType.Unary => - printer - .add(s"override ${serviceMethodSignature(method)} = ") - .indent - .add("guavaFutureToMonixTask(") - .indent - .add( - s"ClientCalls.futureUnaryCall(channel.newCall(${method.grpcDescriptor.name}, options), request)" - ) - .outdent - .add(")") - .outdent - case StreamType.ClientStreaming => - printer - .add(s"override ${serviceMethodSignature(method)} = ") - .indent - .add(s"${liftByGrpcOperator(method.inputType.scalaType, method.outputType.scalaType)}(") - .indent - .add("input,") - .add(s"outputObserver =>") - .indent - .add("ClientCalls.asyncClientStreamingCall(") - .indent - .add(s"channel.newCall(${method.grpcDescriptor.name}, options),") - .add("outputObserver") - .outdent - .add(")") - .outdent - .outdent - .add(").firstL") - .outdent - case StreamType.ServerStreaming => - printer - .add(s"override ${serviceMethodSignature(method)} = ") - .indent - .add(s"Observable.fromReactivePublisher(new PublisherR[${method.outputType.scalaType}] {") - .indent - .add( - s"override def subscribe(subscriber: SubscriberR[_ >: ${method.outputType.scalaType}]): Unit = {" - ) - .indent - .add("ClientCalls.asyncServerStreamingCall(") - .addIndented( - s"channel.newCall(${method.grpcDescriptor.name}, options),", - "request,", - s"reactiveSubscriberToGrpcObserver[${method.outputType.scalaType}](subscriber)" - ) - .add(")") - .outdent - .add("}") - .outdent - .add("})") - .outdent - case StreamType.Bidirectional => - printer - .add(s"override ${serviceMethodSignature(method)} = ") - .indent - .add(s"${liftByGrpcOperator(method.inputType.scalaType, method.outputType.scalaType)}(") - .indent - .add("input,") - .add("outputObserver =>") - .indent - .add("ClientCalls.asyncBidiStreamingCall(") - .indent - .add(s"channel.newCall(${method.grpcDescriptor.name}, options),") - .add("outputObserver") - .outdent - .add(")") - .outdent - .outdent - .add(")") - .outdent - } - } - - private[this] def bindService(service: ServiceDescriptor): PrinterEndo = { printer => - printer - .add( - s"def bindService(serviceImpl: ${service.name}, scheduler: Scheduler): ServerServiceDefinition = " - ) - .indent - .add("ServerServiceDefinition") - .indent - .add(".builder(SERVICE)") - .print(service.methods) { - case (p, m) => - p.call(addMethodImplementation(m)) - } - .add(".build()") - .outdent - .outdent - } - - private[this] def addMethodImplementation(method: MethodDescriptor): PrinterEndo = { printer => - def unliftByTransformer(inputType: String, outputType: String) = - s"unliftByTransformer[$inputType, $outputType]" - - val call = method.streamType match { - case StreamType.Unary => "ServerCalls.asyncUnaryCall" - case StreamType.ClientStreaming => "ServerCalls.asyncClientStreamingCall" - case StreamType.ServerStreaming => "ServerCalls.asyncServerStreamingCall" - case StreamType.Bidirectional => "ServerCalls.asyncBidiStreamingCall" - } - val serverMethod = method.streamType match { - case StreamType.Unary => - s"ServerCalls.UnaryMethod[${method.inputType.scalaType}, ${method.outputType.scalaType}]" - case StreamType.ClientStreaming => - s"ServerCalls.ClientStreamingMethod[${method.inputType.scalaType}, ${method.outputType.scalaType}]" - case StreamType.ServerStreaming => - s"ServerCalls.ServerStreamingMethod[${method.inputType.scalaType}, ${method.outputType.scalaType}]" - case StreamType.Bidirectional => - s"ServerCalls.BidiStreamingMethod[${method.inputType.scalaType}, ${method.outputType.scalaType}]" - } - val impl: PrinterEndo = method.streamType match { - case StreamType.Unary => - _.add( - s"override def invoke(request: ${method.inputType.scalaType}, observer: ${grpcObserver(method.outputType.scalaType)}): Unit =" - ).indent - .add( - s"serviceImpl.${method.name}(request).runAsync(grpcObserverToMonixCallback(observer))(scheduler)" - ) - .outdent - case StreamType.ClientStreaming => - _.add( - s"override def invoke(observer: ${grpcObserver(method.outputType.scalaType)}): ${grpcObserver(method.inputType.scalaType)} = {" - ).indent - .add("val outputSubscriber = grpcObserverToMonixSubscriber(observer, scheduler)") - .add( - s"val inputSubscriber = ${unliftByTransformer(method.inputType.scalaType, method.outputType.scalaType)}(" - ) - .indent - .add( - s"inputObservable => Observable.fromTask(serviceImpl.${method.name}(inputObservable))," - ) - .add("outputSubscriber") - .outdent - .add(")") - .add("monixSubscriberToGrpcObserver(inputSubscriber)") - .outdent - .add("}") - case StreamType.ServerStreaming => - _.add( - s"override def invoke(request: ${method.inputType.scalaType}, observer: ${grpcObserver(method.outputType.scalaType)}): Unit = " - ).indent - .add( - s"serviceImpl.${method.name}(request).subscribe(grpcObserverToMonixSubscriber(observer, scheduler))" - ) - .outdent - case StreamType.Bidirectional => - _.add( - s"override def invoke(observer: ${grpcObserver(method.outputType.scalaType)}): ${grpcObserver(method.inputType.scalaType)} = {" - ).indent - .add("val outputSubscriber = grpcObserverToMonixSubscriber(observer, scheduler)") - .add( - s"val inputSubscriber = ${unliftByTransformer(method.inputType.scalaType, method.outputType.scalaType)}(" - ) - .indent - .add(s"inputObservable => serviceImpl.${method.name}(inputObservable),") - .add("outputSubscriber") - .outdent - .add(")") - .add("monixSubscriberToGrpcObserver(inputSubscriber)") - .outdent - .add("}") - } - - printer - .add(".addMethod(") - .indent - .add(s"${method.grpcDescriptor.name},") - .add(s"$call(") - .indent - .add(s"new $serverMethod {") - .indent - .call(impl) - .outdent - .add("}") - .outdent - .add(")") - .outdent - .add(")") - } - - private[this] def javaDescriptor(service: ServiceDescriptor): PrinterEndo = { printer => - printer - .add(s"def javaDescriptor: ServiceDescriptor = ") - .indent - .add(service.javaDescriptorSource) - .outdent - } - - def generateFile(fileDesc: FileDescriptor): CodeGeneratorResponse.File = { - val b = CodeGeneratorResponse.File.newBuilder() - - b.setName(s"${fileDesc.scalaDirectory}/${fileDesc.objectName}.scala") - - val fp = FunctionalPrinter() - .add(s"package ${fileDesc.scalaPackage.fullName}") - .newline - .add("import _root_.com.google.protobuf.Descriptors.ServiceDescriptor") - .add( - "import _root_.scalapb.grpc.{ AbstractService, ConcreteProtoFileDescriptorSupplier, Marshaller, ServiceCompanion }" - ) - .add( - "import _root_.io.grpc.{ CallOptions, Channel, MethodDescriptor, ServerServiceDefinition }" - ) - .add("import _root_.coop.rchain.grpcmonix.GrpcMonix._") - .add("import _root_.io.grpc.stub.{ AbstractStub, ClientCalls, ServerCalls, StreamObserver }") - .add("import _root_.monix.eval.Task") - .add("import _root_.monix.execution.{ Cancelable, Scheduler }") - .add("import _root_.monix.reactive.Observable") - .add( - "import _root_.org.reactivestreams.{ Publisher => PublisherR, Subscriber => SubscriberR }" - ) - .newline - .add(s"object ${fileDesc.objectName} {") - .indent - .newline - .print(fileDesc.getServices.asScala) { - case (printer, service) => - printer - .print(service.getMethods.asScala) { - case (p, m) => p.call(serviceMethodDescriptor(m)) - } - .newline - .call(serviceDescriptor(service)) - .newline - .call(serviceTrait(service)) - .newline - .call(serviceTraitCompanion(service, fileDesc)) - .newline - .call(stub(service)) - .newline - .call(bindService(service)) - .newline - .add(s"def stub(channel: Channel): ${service.stub} = new ${service.stub}(channel)") - .newline - .call(javaDescriptor(service)) - } - .outdent - .add("}") - .newline - - b.setContent(fp.result) - - b.build - } -} From f2d527ec51f4bf9c3becd8c4cbede72ccde6a3f6 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Fri, 24 Mar 2023 12:44:04 +0400 Subject: [PATCH 06/17] Fix tests --- .../comm/transport/GrpcTransportSpec.scala | 11 ++++---- .../comm/transport/StreamHandlerSpec.scala | 28 +++++++++++-------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala index 43db3847194..ceeb1777726 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala @@ -3,10 +3,11 @@ package coop.rchain.comm.transport import com.google.protobuf.ByteString import coop.rchain.comm.CommError._ import coop.rchain.comm._ +import coop.rchain.comm.protocol.routing import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.rp.ProtocolHelper import coop.rchain.metrics.Metrics -import io.grpc.{Status, StatusRuntimeException} +import io.grpc.{Metadata, Status, StatusRuntimeException} import monix.eval.Task import monix.execution.Scheduler import monix.reactive.Observable @@ -52,13 +53,13 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { new RuntimeException("Test exception") private class TestTransportLayer(response: Task[TLResponse]) - extends RoutingGrpcMonix.TransportLayer { - def send(request: TLRequest): Task[TLResponse] = { + extends routing.TransportLayerFs2Grpc[Task, Metadata] { + override def send(request: TLRequest, ctx: Metadata): Task[TLResponse] = { sendMessages += request response } - def stream(input: Observable[Chunk]): Task[TLResponse] = - input.toListL.map { l => + override def stream(input: fs2.Stream[Task, Chunk], ctx: Metadata): Task[TLResponse] = + input.compile.toList.map { l => streamMessages += l ack } diff --git a/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala index b48c9624edb..e4839f3a2e4 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.comm.transport import com.google.protobuf.ByteString -import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.catscontrib.ski._ import coop.rchain.comm._ import coop.rchain.comm.protocol.routing._ @@ -13,6 +12,7 @@ import monix.reactive.Observable import org.scalatest.Inside import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers +import fs2.Stream import scala.collection.concurrent.TrieMap import scala.util.Random @@ -66,11 +66,13 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { it("should stop processing a stream if stream is missing header") { // given val cache = TrieMap[String, Array[Byte]]() - val streamWithoutHeader: Observable[Chunk] = - Observable.fromIterator(createStreamIterator().map(_.toList).map { + val streamWithoutHeader: Stream[Task, Chunk] = { + val it: Task[Iterator[Chunk]] = createStreamIterator().map(_.toList).map { case _ :: data => data.toIterator case _ => throw new RuntimeException("") - }) + } + Stream.eval(it).flatMap(Stream.fromIterator[Task](_, 1)) + } // when val err: StreamHandler.StreamError = handleStreamErr(streamWithoutHeader) // then @@ -83,11 +85,13 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { it("should stop processing a stream if stream brought incomplete data") { // given val cache = TrieMap[String, Array[Byte]]() - val incompleteStream: Observable[Chunk] = - Observable.fromIterator(createStreamIterator().map(_.toList).map { + val incompleteStream: Stream[Task, Chunk] = { + val it: Task[Iterator[Chunk]] = createStreamIterator().map(_.toList).map { case header :: _ :: data2 => (header :: data2).toIterator case _ => throw new RuntimeException("") - }) + } + Stream.eval(it).flatMap(Stream.fromIterator[Task](_, 1)) + } // when val err: StreamHandler.StreamError = handleStreamErr(incompleteStream, cache = cache) // then @@ -99,7 +103,7 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { } private def handleStream( - stream: Observable[Chunk], + stream: fs2.Stream[Task, Chunk], cache: TrieMap[String, Array[Byte]] = TrieMap[String, Array[Byte]]() ): StreamMessage = StreamHandler @@ -109,7 +113,7 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { .get private def handleStreamErr( - stream: Observable[Chunk], + stream: fs2.Stream[Task, Chunk], circuitBreaker: StreamHandler.CircuitBreaker = neverBreak, cache: TrieMap[String, Array[Byte]] = TrieMap[String, Array[Byte]]() ): StreamHandler.StreamError = @@ -124,8 +128,10 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { contentLength: Int = 30 * 1024, sender: String = "sender", typeId: String = "BlockMessageTest" - ): Observable[Chunk] = - Observable.fromIterator(createStreamIterator(messageSize, contentLength, sender, typeId)) + ): Stream[Task, Chunk] = + Stream + .eval(createStreamIterator(messageSize, contentLength, sender, typeId)) + .flatMap(Stream.fromIterator[Task](_, 1)) private def createStreamIterator( messageSize: Int = 10 * 1024, From 7cce9c30432d786d422c2e77cb51d1a332bf709c Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Fri, 24 Mar 2023 23:50:30 +0400 Subject: [PATCH 07/17] Bump sbt-info --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 0959c34805c..24aa6d476d6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("com.github.tkawachi" % "sbt-repeat" % "0.1.0") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.12") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.4.1") From c08c4e79bdac660232ecb8faccaadf550887387f Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Sat, 25 Mar 2023 00:19:49 +0400 Subject: [PATCH 08/17] Fix conflict between sbt-buildinfo and fs2-grpc plugins --- build.sbt | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index 50b221f8c53..e8e67939cbe 100644 --- a/build.sbt +++ b/build.sbt @@ -18,8 +18,8 @@ lazy val projectSettings = Seq( version := "0.1.0-SNAPSHOT", resolvers ++= Resolver.sonatypeOssRepos("releases") ++ - Resolver.sonatypeOssRepos("snapshots") ++ - Seq("jitpack" at "https://jitpack.io"), + Resolver.sonatypeOssRepos("snapshots") ++ + Seq("jitpack" at "https://jitpack.io"), wartremoverExcluded += sourceManaged.value, Compile / compile / wartremoverErrors ++= Warts.allBut( // those we want @@ -77,7 +77,7 @@ lazy val projectSettings = Seq( Seq( Compile / packageDoc / publishArtifact := false, packageDoc / publishArtifact := false, - Compile / doc / sources := Seq.empty, + Compile / doc / sources := Seq.empty ) } @@ -276,6 +276,9 @@ lazy val node = (project in file("node")) (Compile / sourceManaged).value, coop.rchain.scalapb.gen(flatPackage = true)._2 ), + // if this is not specified similar error happens https://github.com/sbt/sbt-buildinfo/issues/149 + // looks like fs2 grpc plugin pipeline removes BuildInfo.scala + PB.deleteTargetDirectory := false, version := git.gitDescribedVersion.value.getOrElse({ val v = "0.0.0-unknown" System.err.println("Could not get version from `git describe`.") @@ -401,7 +404,7 @@ lazy val node = (project in file("node")) rpmUrl := Some("https://rchain.coop"), rpmLicense := Some("Apache 2.0"), Rpm / packageArchitecture := "noarch", - Rpm / maintainerScripts := maintainerScriptsAppendFromFile((Rpm/maintainerScripts).value)( + Rpm / maintainerScripts := maintainerScriptsAppendFromFile((Rpm / maintainerScripts).value)( RpmConstants.Post -> (sourceDirectory.value / "rpm" / "scriptlets" / "post") ), rpmPrerequisites := Seq( @@ -432,7 +435,7 @@ lazy val rholang = (project in file("rholang")) "-Xfatal-warnings", "-Xlint:_,-missing-interpolator" // disable "possible missing interpolator" warning ), - Compile / packageDoc/ publishArtifact := false, + Compile / packageDoc / publishArtifact := false, packageDoc / publishArtifact := false, Compile / doc / sources := Seq.empty, libraryDependencies ++= commonDependencies ++ Seq( From a5c822976dd6367316e7a9f2691c03728672e779 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Wed, 5 Apr 2023 14:03:41 +0400 Subject: [PATCH 09/17] WIP --- build.sbt | 13 +- .../protocol/client/ProposeService.scala | 1 - .../casper/reporting/ReportingCasper.scala | 4 +- .../casper/rholang/RuntimeManager.scala | 19 +- .../HashSetCasperSpecification.scala | 8 +- .../MultiParentCasperAddBlockSpec.scala | 10 +- .../rchain/casper/addblock/ProposerSpec.scala | 54 +- .../api/BlockQueryResponseAPITest.scala | 555 +++++++++--------- .../casper/api/BlocksResponseAPITest.scala | 463 +++++++-------- .../casper/api/BondedStatusAPITest.scala | 313 +++++----- .../casper/api/ExploratoryDeployAPITest.scala | 317 +++++----- .../casper/api/LastFinalizedAPITest.scala | 224 +++---- .../casper/api/ListeningNameAPITest.scala | 338 +++++------ .../MultiParentCasperCommunicationSpec.scala | 1 - .../batch1/MultiParentCasperDeploySpec.scala | 1 - .../MultiParentCasperFinalizationSpec.scala | 1 - .../batch1/MultiParentCasperMergeSpec.scala | 1 - .../MultiParentCasperReportingSpec.scala | 2 +- .../batch1/MultiParentCasperRholangSpec.scala | 1 - .../batch1/MultiParentCasperSmokeSpec.scala | 1 - .../batch2/BlockReceiverEffectsSpec.scala | 399 ++++++------- .../batch2/LimitedParentDepthSpec.scala | 6 +- .../casper/batch2/LmdbKeyValueStoreSpec.scala | 19 +- .../batch2/SingleParentCasperSpec.scala | 1 - .../rchain/casper/batch2/ValidateTest.scala | 173 +++--- .../engine/LfsBlockRequesterEffectsSpec.scala | 21 +- .../engine/LfsStateRequesterEffectsSpec.scala | 19 +- .../engine/RunningHandleHasBlockSpec.scala | 57 +- .../casper/genesis/AuthKeyUpdateSpec.scala | 1 - .../rchain/casper/genesis/GenesisTest.scala | 80 +-- .../genesis/PosMultiSigTransferSpec.scala | 1 - .../rchain/casper/genesis/PosUpdateSpec.scala | 1 - .../casper/genesis/RegistryUpdateSpec.scala | 1 - .../TimeoutResultCollectorSpec.scala | 1 - .../helper/BlockDagStorageFixture.scala | 36 +- .../rchain/casper/helper/BlockGenerator.scala | 5 +- .../coop/rchain/casper/helper/RhoSpec.scala | 25 +- .../coop/rchain/casper/helper/TestNode.scala | 23 +- .../rchain/casper/helper/TestRhoRuntime.scala | 1 - .../merging/MergeNumberChannelSpec.scala | 20 +- .../rchain/casper/merging/MergingCases.scala | 21 +- .../rchain/casper/rholang/DeployIdTest.scala | 17 +- .../casper/rholang/DeployerIdTest.scala | 15 +- .../casper/rholang/InterpreterUtilTest.scala | 123 ++-- .../rchain/casper/rholang/Resources.scala | 8 +- .../casper/rholang/RuntimeManagerTest.scala | 52 +- .../rchain/casper/rholang/RuntimeSpec.scala | 172 +++--- .../sync/BlockRetrieverRequesAllSpec.scala | 78 +-- .../casper/sync/BlockRetrieverSpec.scala | 58 +- .../rchain/casper/util/GenesisBuilder.scala | 26 +- .../casper/util/comm/CommUtilSpec.scala | 36 +- .../util/comm/TransportLayerTestImpl.scala | 1 + .../util/scalatest/Fs2StreamMatchers.scala | 10 +- .../comm/discovery/GrpcKademliaRPC.scala | 13 +- .../discovery/GrpcKademliaRPCServer.scala | 3 +- .../coop/rchain/comm/discovery/package.scala | 9 +- .../rchain/comm/transport/GrpcTransport.scala | 5 +- .../comm/transport/GrpcTransportClient.scala | 20 +- .../transport/GrpcTransportReceiver.scala | 20 +- .../comm/transport/GrpcTransportServer.scala | 53 +- .../rchain/comm/transport/StreamHandler.scala | 2 - .../comm/transport/StreamObservable.scala | 1 - .../comm/discovery/GrpcKademliaRPCSpec.scala | 48 +- .../comm/rp/HandleProtocolHandshakeSpec.scala | 17 +- .../comm/transport/GrpcTransportSpec.scala | 50 +- .../transport/PacketStoreRestoreSpec.scala | 10 +- .../comm/transport/StreamHandlerSpec.scala | 33 +- .../transport/TcpTransportLayerSpec.scala | 38 +- .../transport/TransportLayerRuntime.scala | 5 +- .../scala/coop/rchain/graphz/GraphzSpec.scala | 80 ++- .../main/scala/coop/rchain/node/Main.scala | 23 +- .../rchain/node/api/DeployGrpcServiceV1.scala | 3 +- .../node/api/ProposeGrpcServiceV1.scala | 3 +- .../scala/coop/rchain/node/api/package.scala | 11 +- .../diagnostics/BatchInfluxDBReporter.scala | 4 +- .../rchain/node/effects/JLineConsoleIO.scala | 1 - .../coop/rchain/node/effects/package.scala | 20 +- .../node/revvaultexport/StateBalances.scala | 6 +- .../mainnet1/StateBalanceMain.scala | 9 +- .../mainnet1/reporting/MergeBalanceMain.scala | 40 +- .../reporting/TransactionBalanceMain.scala | 10 +- .../reporting/TransactionBalances.scala | 6 +- .../rchain/node/runtime/GrpcServices.scala | 3 +- .../rchain/node/runtime/NetworkServers.scala | 58 +- .../coop/rchain/node/runtime/NodeMain.scala | 9 +- .../rchain/node/runtime/NodeRuntime.scala | 52 +- .../coop/rchain/node/runtime/Setup.scala | 12 +- .../scala/coop/rchain/node/web/package.scala | 16 +- .../coop/rchain/node/TransactionAPISpec.scala | 20 +- .../node/mergeablity/MergeabilityRules.scala | 38 +- .../TreeHashMapMergeabilitySpec.scala | 25 +- .../rchain/node/perf/HistoryGenKeySpec.scala | 15 +- .../revvaultexport/RhoTrieTraverserTest.scala | 15 +- .../VaultBalanceGetterTest.scala | 6 +- project/Dependencies.scala | 14 +- .../rholang/interpreter/RhoRuntime.scala | 8 +- .../rholang/interpreter/RholangCLI.scala | 51 +- .../rholang/interpreter/storage/package.scala | 2 +- .../coop/rchain/rholang/InterpreterSpec.scala | 105 ++-- .../scala/coop/rchain/rholang/PeekSpec.scala | 154 +++-- .../scala/coop/rchain/rholang/Resources.scala | 23 +- .../coop/rchain/rholang/StackSafetySpec.scala | 26 +- .../rchain/rholang/StoragePrinterSpec.scala | 89 ++- .../interpreter/BigIntNormalizerSpec.scala | 257 ++++---- .../CostAccountingReducerTest.scala | 50 +- .../interpreter/CryptoChannelsSpec.scala | 45 +- .../interpreter/PersistentStoreTester.scala | 64 +- .../rholang/interpreter/ReduceSpec.scala | 190 +++--- .../rholang/interpreter/ReplaySpec.scala | 36 +- .../rholang/interpreter/RuntimeSpec.scala | 24 +- .../interpreter/ShortCircuitBooleanSpec.scala | 53 +- .../CostAccountingPropertyTest.scala | 24 +- .../accounting/CostAccountingSpec.scala | 50 +- .../accounting/RholangMethodsCostsSpec.scala | 46 +- .../interpreter/matcher/MatchTest.scala | 6 +- .../matcher/MatcherMonadSpec.scala | 18 +- .../merging/RholangMergingLogicSpec.scala | 6 +- .../storage/ChargingRSpaceTest.scala | 59 +- .../scala/rholang/rosette/CompilerTests.scala | 32 +- .../coop/rchain/rspace/bench/BasicBench.scala | 7 +- .../rspace/bench/EvalBenchStateBase.scala | 17 +- .../rchain/rspace/bench/RSpaceBench.scala | 3 +- .../rspace/bench/ReplayRSpaceBench.scala | 3 +- .../rspace/bench/RhoBenchBaseState.scala | 24 +- .../bench/RhoReplayBenchBaseState.scala | 1 + .../scala/coop/rchain/rspace/RSpace.scala | 36 +- .../scala/coop/rchain/rspace/RSpaceOps.scala | 10 +- .../coop/rchain/rspace/ReplayRSpace.scala | 18 +- .../coop/rchain/rspace/ReportingRspace.scala | 16 +- .../rspace/examples/AddressBookExample.scala | 11 +- .../rchain/rspace/ExportImportTests.scala | 103 ++-- .../coop/rchain/rspace/HotStoreSpec.scala | 17 +- .../rchain/rspace/ReplayRSpaceTests.scala | 86 +-- .../rchain/rspace/StorageActionsTests.scala | 10 +- .../rchain/rspace/StorageExamplesTests.scala | 16 +- .../coop/rchain/rspace/StorageTestsBase.scala | 38 +- .../rchain/rspace/TestImplicitHelpers.scala | 2 +- .../rspace/concurrent/MultiLockTest.scala | 26 +- .../rspace/concurrent/TwoStepLockTest.scala | 20 +- .../rspace/history/Blake2b256HashTests.scala | 2 +- .../rspace/history/HistoryActionTests.scala | 21 +- .../HistoryRepositoryGenerativeSpec.scala | 43 +- .../history/HistoryRepositorySpec.scala | 41 +- .../rchain/rspace/history/RadixTreeSpec.scala | 37 +- .../coop/rchain/grpcmonix/GrpcMonix.scala | 111 ---- .../scala/coop/rchain/monix/Monixable.scala | 35 -- .../coop/rchain/monix/MonixableSyntax.scala | 20 - .../coop/rchain/shared/MVarMonadState.scala | 41 -- .../coop/rchain/shared/RChainScheduler.scala | 33 +- .../shared/UncaughtExceptionLogger.scala | 13 - .../scala/coop/rchain/shared/package.scala | 2 - .../rchain/shared/Fs2ExtensionsSpec.scala | 14 +- .../coop/rchain/shared/scalatestcontrib.scala | 4 +- .../store/InMemoryKeyValueStoreSpec.scala | 27 +- 154 files changed, 3362 insertions(+), 3624 deletions(-) delete mode 100644 shared/src/main/scala/coop/rchain/grpcmonix/GrpcMonix.scala delete mode 100644 shared/src/main/scala/coop/rchain/monix/Monixable.scala delete mode 100644 shared/src/main/scala/coop/rchain/monix/MonixableSyntax.scala delete mode 100644 shared/src/main/scala/coop/rchain/shared/MVarMonadState.scala delete mode 100644 shared/src/main/scala/coop/rchain/shared/UncaughtExceptionLogger.scala diff --git a/build.sbt b/build.sbt index e8e67939cbe..19c08bebba6 100644 --- a/build.sbt +++ b/build.sbt @@ -132,7 +132,6 @@ lazy val shared = (project in file("shared")) catsTagless, fs2Core, lz4, - monix, scodecCore, scodecCats, scodecBits, @@ -142,7 +141,8 @@ lazy val shared = (project in file("shared")) catsLawsTest, catsLawsTestkitTest, enumeratum, - jaxb + jaxb, + monix // remove when monix TestSheduler is replaced ) ) .dependsOn(sdk) @@ -171,7 +171,6 @@ lazy val casper = (project in file("casper")) catsCore, catsRetry, catsMtl, - monix, fs2Core, fs2Io, scalacheck % "slowcooker" @@ -213,7 +212,6 @@ lazy val comm = (project in file("comm")) catsCore, catsMtl, catsTagless, - monix, guava ) ) @@ -302,7 +300,8 @@ lazy val node = (project in file("node")) scalapbRuntimegGrpc, circeParser, circeGenericExtras, - pureconfig + pureconfig, + monix // remove when BatchInfluxDBReporter is adjusted to work w/o monix ), buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion, git.gitHeadCommit), buildInfoPackage := "coop.rchain.node", @@ -441,7 +440,6 @@ lazy val rholang = (project in file("rholang")) libraryDependencies ++= commonDependencies ++ Seq( catsMtl, catsEffect, - monix, scallop, lightningj, catsLawsTest, @@ -506,7 +504,8 @@ lazy val rspace = (project in file("rspace")) catsCore, fs2Core, scodecCore, - scodecBits + scodecBits, + monix // remove when AtomicAny migrated to Ref ), /* Tutorial */ /* Publishing Settings */ diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala index 19466b56043..c3958a6fcce 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala @@ -4,7 +4,6 @@ import cats.effect.{ConcurrentEffect, Sync} import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.propose.v1._ import coop.rchain.models.either.implicits._ -import coop.rchain.monix.Monixable import coop.rchain.shared.syntax._ import io.grpc.netty.NettyChannelBuilder import io.grpc.{ManagedChannel, ManagedChannelBuilder, Metadata} diff --git a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala index 57b0a070f92..0f931f9e0bf 100644 --- a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala @@ -86,7 +86,7 @@ object ReportingCasper { def rhoReporter[F[_]: Concurrent: ContextShift: Parallel: BlockDagStorage: Log: Metrics: Span]( rspaceStore: RSpaceStore[F], shardId: String - )(implicit scheduler: ExecutionContext): ReportingCasper[F] = + ): ReportingCasper[F] = new ReportingCasper[F] { override def trace(block: BlockMessage): F[ReplayResult] = for { @@ -171,8 +171,6 @@ object ReportingRuntime { def createReportingRSpace[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( store: RSpaceStore[F] - )( - implicit scheduler: ExecutionContext ): F[RhoReportingRspace[F]] = { import coop.rchain.rholang.interpreter.storage._ implicit val m: RSpaceMatch[F, BindPattern, ListParWithRandom] = matchListPar[F] diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala index e99bef02c6c..96d037e024e 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala @@ -281,25 +281,28 @@ object RuntimeManager { store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, - executionTracker: BlockExecutionTracker[F] - )( - implicit ec: ExecutionContext + executionTracker: BlockExecutionTracker[F], + rholangEC: ExecutionContext ): F[RuntimeManagerImpl[F]] = - createWithHistory(store, mergeableStore, mergeableTagName, executionTracker).map(_._1) + createWithHistory(store, mergeableStore, mergeableTagName, executionTracker, rholangEC).map( + _._1 + ) def createWithHistory[F[_]: Concurrent: ContextShift: Parallel: Metrics: Span: Log]( store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, - executionTracker: BlockExecutionTracker[F] - )( - implicit ec: ExecutionContext + executionTracker: BlockExecutionTracker[F], + rholangEC: ExecutionContext ): F[(RuntimeManagerImpl[F], RhoHistoryRepository[F])] = { import coop.rchain.rholang.interpreter.storage._ implicit val m: rspace.Match[F, BindPattern, ListParWithRandom] = matchListPar[F] RSpace - .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation](store) + .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( + store, + rholangEC + ) .flatMap { case (rSpacePlay, rSpaceReplay) => val historyRepo = rSpacePlay.historyRepo diff --git a/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala b/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala index 2568462b06d..cba3ac9875b 100644 --- a/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala +++ b/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala @@ -1,6 +1,6 @@ package coop.rchain.casper -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.dag.BlockDagStorage.DeployId import coop.rchain.blockstorage.syntax._ @@ -9,8 +9,6 @@ import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.protocol.{BlockMessage, DeployData} import coop.rchain.casper.util.{ConstructDeploy, GenesisBuilder} import coop.rchain.crypto.signatures.Signed -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalacheck._ import org.scalacheck.commands.Commands @@ -43,7 +41,7 @@ object HashSetCasperActions { ): Effect[Either[ParsingError, DeployId]] = node.deploy(deployData) - def create(node: TestNode[Effect]): Task[BlockMessage] = + def create(node: TestNode[Effect]): IO[BlockMessage] = for { createBlockResult1 <- node.proposeSync block <- node.blockStore.getUnsafe(createBlockResult1) @@ -61,7 +59,7 @@ object HashSetCasperActions { ConstructDeploy.sourceDeploy(s"new x in { x!(0) }", ts, shardId = "root") implicit class EffectOps[A](f: Effect[A]) { - def result: A = f.runSyncUnsafe() + def result: A = f.unsafeRunSync } } diff --git a/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala index a6de0abd299..c59ba2ab111 100644 --- a/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.addblock -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper._ @@ -18,8 +18,6 @@ import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.scalatestcontrib._ import coop.rchain.shared.syntax._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.Inspectors import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -34,7 +32,7 @@ class MultiParentCasperAddBlockSpec extends AnyFlatSpec with Matchers with Inspe import coop.rchain.casper.util.GenesisBuilder._ implicit val timeEff = new LogicalTime[Effect] - implicit val s = Sync[Task] + implicit val s = Sync[IO] val genesis = buildGenesis() private val SHARD_ID = genesis.genesisBlock.shardId @@ -67,7 +65,7 @@ class MultiParentCasperAddBlockSpec extends AnyFlatSpec with Matchers with Inspe // } yield result // } // val threadStatuses: (ValidBlockProcessing, ValidBlockProcessing) = -// testProgram.runSyncUnsafe() +// testProgram.unsafeRunSync // // threadStatuses should matchPattern { // case (Left(CasperIsBusy), Right(Valid)) | (Right(Valid), Left(CasperIsBusy)) => @@ -495,6 +493,6 @@ class MultiParentCasperAddBlockSpec extends AnyFlatSpec with Matchers with Inspe .signBlock( blockThatPointsToInvalidBlock ) - .pure[Task] + .pure[IO] } } diff --git a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala index a6f9f45624b..86567718964 100644 --- a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.addblock import cats.Applicative +import cats.effect.IO import cats.effect.concurrent.Deferred import cats.syntax.all._ import coop.rchain.casper._ @@ -14,10 +15,9 @@ import coop.rchain.models.Validator.Validator import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.shared.Log import coop.rchain.shared.scalatestcontrib._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import coop.rchain.shared.RChainScheduler._ class ProposerSpec extends AnyFlatSpec with Matchers with BlockDagStorageFixture { @@ -47,23 +47,23 @@ class ProposerSpec extends AnyFlatSpec with Matchers with BlockDagStorageFixture val dummyValidatorIdentity = ValidatorIdentity(randomValidatorSks(1)) /** implicits for creating Proposer instance */ - implicit val logEff: Log[Task] = Log.log[Task] - implicit val spanEff: Span[Task] = NoopSpan[Task] - implicit val metrics = new MetricsNOP[Task]() + implicit val logEff: Log[IO] = Log.log[IO] + implicit val spanEff: Span[IO] = NoopSpan[IO] + implicit val metrics = new MetricsNOP[IO]() it should "reject to propose if proposer is not active validator" in effectTest { - val p = new Proposer[Task]( - checkActiveValidator = alwaysNotActiveF[Task], + val p = new Proposer[IO]( + checkActiveValidator = alwaysNotActiveF[IO], // other params are permissive - getLatestSeqNumber = getLatestSeqNumber[Task], - createBlock = createBlockF[Task], - validateBlock = alwaysSuccesfullValidation[Task], - proposeEffect = proposeEffect[Task](0), + getLatestSeqNumber = getLatestSeqNumber[IO], + createBlock = createBlockF[IO], + validateBlock = alwaysSuccesfullValidation[IO], + proposeEffect = proposeEffect[IO](0), validator = dummyValidatorIdentity ) for { - d <- Deferred[Task, ProposerResult] + d <- Deferred[IO, ProposerResult] pr <- p.propose(false, d) (r, b) = pr } yield assert(r == ProposeResult.notBonded && b.isEmpty) @@ -71,35 +71,35 @@ class ProposerSpec extends AnyFlatSpec with Matchers with BlockDagStorageFixture it should "shut down the node if block created is not successfully replayed" in { an[Throwable] should be thrownBy { - val p = new Proposer[Task]( - validateBlock = alwaysUnsuccesfullValidation[Task], + val p = new Proposer[IO]( + validateBlock = alwaysUnsuccesfullValidation[IO], // other params are permissive - checkActiveValidator = alwaysActiveF[Task], - getLatestSeqNumber = getLatestSeqNumber[Task], - createBlock = createBlockF[Task], - proposeEffect = proposeEffect[Task](0), + checkActiveValidator = alwaysActiveF[IO], + getLatestSeqNumber = getLatestSeqNumber[IO], + createBlock = createBlockF[IO], + proposeEffect = proposeEffect[IO](0), validator = dummyValidatorIdentity ) (for { - d <- Deferred[Task, ProposerResult] + d <- Deferred[IO, ProposerResult] _ <- p.propose(false, d) - } yield ()).runSyncUnsafe() + } yield ()).unsafeRunSync } } it should "execute propose effects if block created successfully replayed" in effectTest { - val p = new Proposer[Task]( - validateBlock = alwaysSuccesfullValidation[Task], - checkActiveValidator = alwaysActiveF[Task], - getLatestSeqNumber = getLatestSeqNumber[Task], - createBlock = createBlockF[Task], - proposeEffect = proposeEffect[Task](10), + val p = new Proposer[IO]( + validateBlock = alwaysSuccesfullValidation[IO], + checkActiveValidator = alwaysActiveF[IO], + getLatestSeqNumber = getLatestSeqNumber[IO], + createBlock = createBlockF[IO], + proposeEffect = proposeEffect[IO](10), validator = dummyValidatorIdentity ) for { - d <- Deferred[Task, ProposerResult] + d <- Deferred[IO, ProposerResult] pr <- p.propose(false, d) (r, b) = pr } yield assert( diff --git a/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala index 8fcf4b959ba..54e2304798e 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala @@ -1,7 +1,8 @@ package coop.rchain.casper.api -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.effect.concurrent.Ref +import cats.effect.testing.scalatest.AsyncIOSpec import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -20,287 +21,287 @@ import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ import coop.rchain.models.{BlockMetadata, FringeData} import coop.rchain.shared.{Log, Time} -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito, Mockito, MockitoSugar} import org.scalatest._ import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers +import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap -class BlockQueryResponseAPITest - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with EitherValues - with BlockDagStorageFixture - with BlockApiFixture - with IdiomaticMockito - with IdiomaticMockitoCats - with ArgumentMatchersSugar { - implicit val timeEff: Time[Task] = Time.fromTimer[Task] - implicit val spanEff: NoopSpan[Task] = NoopSpan[Task]() - implicit val log: Log[Task] = mock[Log[Task]] - implicit val runtimeManager: RuntimeManager[Task] = mock[RuntimeManager[Task]] - - private val tooShortQuery = "12345" - private val badTestHashQuery = "1234acd" - private val invalidHexQuery = "No such a hash" - private val unknownDeploy = ByteString.copyFromUtf8("asdfQwertyUiopxyzcbv") - - private val genesisBlock: BlockMessage = getRandomBlock(setJustifications = Seq().some) - - private val deployCount = 10 - private val randomDeploys = - (0 until deployCount).toList - .traverse(i => ConstructDeploy.basicProcessedDeploy[Task](i)) - .runSyncUnsafe() - - private val senderString: String = - "3456789101112131415161718192345678910111213141516171819261718192113456789101112131415161718192345678910111213141516171819261718192" - private val sender: ByteString = senderString.unsafeHexToByteString - private val bondsValidator = (sender, 1L) - - private val secondBlock: BlockMessage = - getRandomBlock( - setValidator = sender.some, - setDeploys = randomDeploys.some, - setJustifications = List(genesisBlock.blockHash).some, - setBonds = Map(bondsValidator).some - ) - - "getBlock" should "return successful block info response" in { - implicit val bs = createBlockStore[Task] - implicit val bds = createBlockDagStorage[Task] - - for { - _ <- prepareDagStorage[Task] - blockApi <- createBlockApi[Task]("", 1) - _ = Mockito.clearInvocations(bs, bds) - hash = secondBlock.blockHash.toHexString - blockQueryResponse <- blockApi.getBlock(hash) - } yield { - blockQueryResponse shouldBe 'right - val blockInfo = blockQueryResponse.value - blockInfo.deploys shouldBe randomDeploys.map(_.toDeployInfo) - blockInfo.blockInfo shouldBe BlockApi.getLightBlockInfo(secondBlock) - - bs.get(Seq(secondBlock.blockHash)) wasCalled once - verifyNoMoreInteractions(bs) - - bds.insert(*, *) wasNever called - bds.getRepresentation wasCalled twice - bds.lookupByDeployId(*) wasNever called - } - } - - it should "return error when no block exists" in { - implicit val bs = createBlockStore[Task] - implicit val bds = createBlockDagStorage[Task] - - for { - blockApi <- createBlockApi[Task]("", 1) - hash = badTestHashQuery - blockQueryResponse <- blockApi.getBlock(hash) - } yield { - blockQueryResponse shouldBe 'left - blockQueryResponse.left.value shouldBe s"Error: Failure to find block with hash: $badTestHashQuery" - - bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) wasCalled once - verifyNoMoreInteractions(bs) - - bds.insert(*, *) wasNever called - bds.getRepresentation wasCalled once - bds.lookupByDeployId(*) wasNever called - } - } - - it should "return error when hash is invalid hex string" in { - implicit val bs = createBlockStore[Task] - implicit val bds = createBlockDagStorage[Task] - - for { - blockApi <- createBlockApi[Task]("", 1) - hash = invalidHexQuery - blockQueryResponse <- blockApi.getBlock(hash) - } yield { - blockQueryResponse shouldBe 'left - blockQueryResponse.left.value shouldBe s"Input hash value is not valid hex string: $invalidHexQuery" - - verifyNoMoreInteractions(bs) - - bds.insert(*, *) wasNever called - bds.getRepresentation wasNever called - bds.lookupByDeployId(*) wasNever called - } - } - - it should "return error when hash is to short" in { - implicit val bs = createBlockStore[Task] - implicit val bds = createBlockDagStorage[Task] - - for { - blockApi <- createBlockApi[Task]("", 1) - hash = tooShortQuery - blockQueryResponse <- blockApi.getBlock(hash) - } yield { - blockQueryResponse shouldBe 'left - blockQueryResponse.left.value shouldBe s"Input hash value must be at least 6 characters: $tooShortQuery" - - verifyNoMoreInteractions(bs) - - bds.insert(*, *) wasNever called - bds.getRepresentation wasNever called - bds.lookupByDeployId(*) wasNever called - } - } - - "findDeploy" should "return successful block info response when a block contains the deploy with given signature" in { - implicit val bs = createBlockStore[Task] - implicit val bds = createBlockDagStorage[Task] - - for { - _ <- prepareDagStorage[Task] - blockApi <- createBlockApi[Task]("", 1) - _ = Mockito.clearInvocations(bs, bds) - deployId = randomDeploys.head.deploy.sig - blockQueryResponse <- blockApi.findDeploy(deployId) - } yield { - blockQueryResponse shouldBe 'right - blockQueryResponse.value shouldBe BlockApi.getLightBlockInfo(secondBlock) - - bs.get(Seq(secondBlock.blockHash)) wasCalled once - verifyNoMoreInteractions(bs) - - bds.insert(*, *) wasNever called - bds.getRepresentation wasNever called - bds.lookupByDeployId(deployId) wasCalled once - } - } - - it should "return an error when no block contains the deploy with the given signature" in { - implicit val bs = createBlockStore[Task] - implicit val bds = createBlockDagStorage[Task] - - for { - blockApi <- createBlockApi[Task]("", 1) - blockQueryResponse <- blockApi.findDeploy(unknownDeploy) - } yield { - blockQueryResponse shouldBe 'left - blockQueryResponse.left.value shouldBe - s"Couldn't find block containing deploy with id: ${PrettyPrinter.buildStringNoLimit(unknownDeploy)}" - - verifyNoMoreInteractions(bs) - - bds.insert(*, *) wasNever called - bds.getRepresentation wasNever called - bds.lookupByDeployId(unknownDeploy) wasCalled once - } - } - - private def createBlockStore[F[_]: Sync] = { - val bs = mock[BlockStore[F]] - bs.put(Seq((genesisBlock.blockHash, genesisBlock))) returns ().pure - bs.put(Seq((secondBlock.blockHash, secondBlock))) returns ().pure - bs.get(Seq(secondBlock.blockHash)) returnsF Seq(secondBlock.some) - bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) returnsF Seq(None) - bs - } - - private def createBlockDagStorage[F[_]: Sync]: BlockDagStorage[F] = { - val genesisHash: ByteString = RuntimeManager.emptyStateHashFixed - - val state = Ref.unsafe[F, DagRepresentation]( - DagRepresentation( - Set(), - Map(), - SortedMap(), - DagMessageState(), - Map( - Set(genesisHash) -> FringeData( - FringeData.fringeHash(Set.empty), - Set.empty, - Set.empty, - genesisHash.toBlake2b256Hash, - Set.empty, - Set.empty, - Set.empty - ) - ) - ) - ) - - val bds = mock[BlockDagStorage[F]] - - bds.insert(*, *) answers { (bmd: BlockMetadata, b: BlockMessage) => - state.update { s => - val newDagSet = s.dagSet + b.blockHash - - val newChildMap = b.justifications.foldLeft(s.childMap) { - case (m, h) => m + (h -> (m.getOrElse(h, Set.empty) + b.blockHash)) - } + (b.blockHash -> Set.empty[BlockHash]) - - val newHeightMap = s.heightMap + (b.blockNumber -> (s.heightMap - .getOrElse(b.blockNumber, Set.empty) + b.blockHash)) - - val seen = b.justifications - .flatMap(h => s.dagMessageState.msgMap(h).seen) - .toSet ++ b.justifications + b.blockHash - - val newMsgMap = s.dagMessageState.msgMap + (b.blockHash -> toMessage(b, seen)) - - val newLatestMsgs = newMsgMap.foldLeft(Set.empty[Message[BlockHash, Validator]]) { - case (acc, (_, msg)) => - acc + acc - .find(_.sender == msg.sender) - .map(m => if (msg.height > m.height) msg else m) - .getOrElse(msg) - } - val newDagMessageState = s.dagMessageState.copy(newLatestMsgs, newMsgMap) - - s.copy( - dagSet = newDagSet, - childMap = newChildMap, - heightMap = newHeightMap, - dagMessageState = newDagMessageState - ) - } - } - - bds.getRepresentation returns state.get - - bds.lookupByDeployId(randomDeploys.head.deploy.sig) returnsF secondBlock.blockHash.some - bds.lookupByDeployId(unknownDeploy) returnsF None - - bds - } - - // Default args only available for public method in Scala 2.12 (https://github.com/scala/bug/issues/12168) - def toMessage( - m: BlockMessage, - seen: Set[BlockHash] = Set.empty[BlockHash] - ): Message[BlockHash, Validator] = - Message[BlockHash, Validator]( - m.blockHash, - m.blockNumber, - m.sender, - m.seqNum, - m.bonds, - m.justifications.toSet, - Set(), - seen - ) - - private def prepareDagStorage[F[_]: Sync: BlockDagStorage: BlockStore]: F[Unit] = { - import coop.rchain.blockstorage.syntax._ - def insertToDag(b: BlockMessage, stateHash: StateHash): F[Unit] = - BlockDagStorage[F].insert(BlockMetadata.fromBlock(b).copy(fringeStateHash = stateHash), b) - for { - _ <- List(genesisBlock, secondBlock).traverse(BlockStore[F].put(_)) - _ <- insertToDag(genesisBlock, genesisBlock.postStateHash) - _ <- insertToDag(secondBlock, RuntimeManager.emptyStateHashFixed) - } yield () - } -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class BlockQueryResponseAPITest +// extends AsyncFlatSpec +// with AsyncIOSpec +// with Matchers +// with EitherValues +// with BlockDagStorageFixture +// with BlockApiFixture +// with IdiomaticMockito +// with IdiomaticMockitoCats +// with ArgumentMatchersSugar { +// implicit val timeEff: Time[IO] = Time.fromTimer[IO] +// implicit val spanEff: NoopSpan[IO] = NoopSpan[IO]() +// implicit val log: Log[IO] = mock[Log[IO]] +// implicit val runtimeManager: RuntimeManager[IO] = mock[RuntimeManager[IO]] +// +// private val tooShortQuery = "12345" +// private val badTestHashQuery = "1234acd" +// private val invalidHexQuery = "No such a hash" +// private val unknownDeploy = ByteString.copyFromUtf8("asdfQwertyUiopxyzcbv") +// +// private val genesisBlock: BlockMessage = getRandomBlock(setJustifications = Seq().some) +// +// private val deployCount = 10 +// private val randomDeploys = +// (0 until deployCount).toList +// .traverse(i => ConstructDeploy.basicProcessedDeploy[IO](i)) +// .unsafeRunSync +// +// private val senderString: String = +// "3456789101112131415161718192345678910111213141516171819261718192113456789101112131415161718192345678910111213141516171819261718192" +// private val sender: ByteString = senderString.unsafeHexToByteString +// private val bondsValidator = (sender, 1L) +// +// private val secondBlock: BlockMessage = +// getRandomBlock( +// setValidator = sender.some, +// setDeploys = randomDeploys.some, +// setJustifications = List(genesisBlock.blockHash).some, +// setBonds = Map(bondsValidator).some +// ) +// +// "getBlock" should "return successful block info response" in { +// implicit val bs = createBlockStore[IO] +// implicit val bds = createBlockDagStorage[IO] +// +// for { +// _ <- prepareDagStorage[IO] +// blockApi <- createBlockApi[IO]("", 1) +// _ = Mockito.clearInvocations(bs, bds) +// hash = secondBlock.blockHash.toHexString +// blockQueryResponse <- blockApi.getBlock(hash) +// } yield { +// blockQueryResponse shouldBe 'right +// val blockInfo = blockQueryResponse.value +// blockInfo.deploys shouldBe randomDeploys.map(_.toDeployInfo) +// blockInfo.blockInfo shouldBe BlockApi.getLightBlockInfo(secondBlock) +// +// bs.get(Seq(secondBlock.blockHash)) wasCalled once +// verifyNoMoreInteractions(bs) +// +// bds.insert(*, *) wasNever called +// bds.getRepresentation wasCalled twice +// bds.lookupByDeployId(*) wasNever called +// } +// } +// +// it should "return error when no block exists" in { +// implicit val bs = createBlockStore[IO] +// implicit val bds = createBlockDagStorage[IO] +// +// for { +// blockApi <- createBlockApi[IO]("", 1) +// hash = badTestHashQuery +// blockQueryResponse <- blockApi.getBlock(hash) +// } yield { +// blockQueryResponse shouldBe 'left +// blockQueryResponse.left.value shouldBe s"Error: Failure to find block with hash: $badTestHashQuery" +// +// bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) wasCalled once +// verifyNoMoreInteractions(bs) +// +// bds.insert(*, *) wasNever called +// bds.getRepresentation wasCalled once +// bds.lookupByDeployId(*) wasNever called +// } +// } +// +// it should "return error when hash is invalid hex string" in { +// implicit val bs = createBlockStore[IO] +// implicit val bds = createBlockDagStorage[IO] +// +// for { +// blockApi <- createBlockApi[IO]("", 1) +// hash = invalidHexQuery +// blockQueryResponse <- blockApi.getBlock(hash) +// } yield { +// blockQueryResponse shouldBe 'left +// blockQueryResponse.left.value shouldBe s"Input hash value is not valid hex string: $invalidHexQuery" +// +// verifyNoMoreInteractions(bs) +// +// bds.insert(*, *) wasNever called +// bds.getRepresentation wasNever called +// bds.lookupByDeployId(*) wasNever called +// } +// } +// +// it should "return error when hash is to short" in { +// implicit val bs = createBlockStore[IO] +// implicit val bds = createBlockDagStorage[IO] +// +// for { +// blockApi <- createBlockApi[IO]("", 1) +// hash = tooShortQuery +// blockQueryResponse <- blockApi.getBlock(hash) +// } yield { +// blockQueryResponse shouldBe 'left +// blockQueryResponse.left.value shouldBe s"Input hash value must be at least 6 characters: $tooShortQuery" +// +// verifyNoMoreInteractions(bs) +// +// bds.insert(*, *) wasNever called +// bds.getRepresentation wasNever called +// bds.lookupByDeployId(*) wasNever called +// } +// } +// +// "findDeploy" should "return successful block info response when a block contains the deploy with given signature" in { +// implicit val bs = createBlockStore[IO] +// implicit val bds = createBlockDagStorage[IO] +// +// for { +// _ <- prepareDagStorage[IO] +// blockApi <- createBlockApi[IO]("", 1) +// _ = Mockito.clearInvocations(bs, bds) +// deployId = randomDeploys.head.deploy.sig +// blockQueryResponse <- blockApi.findDeploy(deployId) +// } yield { +// blockQueryResponse shouldBe 'right +// blockQueryResponse.value shouldBe BlockApi.getLightBlockInfo(secondBlock) +// +// bs.get(Seq(secondBlock.blockHash)) wasCalled once +// verifyNoMoreInteractions(bs) +// +// bds.insert(*, *) wasNever called +// bds.getRepresentation wasNever called +// bds.lookupByDeployId(deployId) wasCalled once +// } +// } +// +// it should "return an error when no block contains the deploy with the given signature" in { +// implicit val bs = createBlockStore[IO] +// implicit val bds = createBlockDagStorage[IO] +// +// for { +// blockApi <- createBlockApi[IO]("", 1) +// blockQueryResponse <- blockApi.findDeploy(unknownDeploy) +// } yield { +// blockQueryResponse shouldBe 'left +// blockQueryResponse.left.value shouldBe +// s"Couldn't find block containing deploy with id: ${PrettyPrinter.buildStringNoLimit(unknownDeploy)}" +// +// verifyNoMoreInteractions(bs) +// +// bds.insert(*, *) wasNever called +// bds.getRepresentation wasNever called +// bds.lookupByDeployId(unknownDeploy) wasCalled once +// } +// } +// +// private def createBlockStore[F[_]: Sync] = { +// val bs = mock[BlockStore[F]] +// bs.put(Seq((genesisBlock.blockHash, genesisBlock))) returns ().pure +// bs.put(Seq((secondBlock.blockHash, secondBlock))) returns ().pure +// bs.get(Seq(secondBlock.blockHash)) returnsF Seq(secondBlock.some) +// bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) returnsF Seq(None) +// bs +// } +// +// private def createBlockDagStorage[F[_]: Sync]: BlockDagStorage[F] = { +// val genesisHash: ByteString = RuntimeManager.emptyStateHashFixed +// +// val state = Ref.unsafe[F, DagRepresentation]( +// DagRepresentation( +// Set(), +// Map(), +// SortedMap(), +// DagMessageState(), +// Map( +// Set(genesisHash) -> FringeData( +// FringeData.fringeHash(Set.empty), +// Set.empty, +// Set.empty, +// genesisHash.toBlake2b256Hash, +// Set.empty, +// Set.empty, +// Set.empty +// ) +// ) +// ) +// ) +// +// val bds = mock[BlockDagStorage[F]] +// +// bds.insert(*, *) answers { (bmd: BlockMetadata, b: BlockMessage) => +// state.update { s => +// val newDagSet = s.dagSet + b.blockHash +// +// val newChildMap = b.justifications.foldLeft(s.childMap) { +// case (m, h) => m + (h -> (m.getOrElse(h, Set.empty) + b.blockHash)) +// } + (b.blockHash -> Set.empty[BlockHash]) +// +// val newHeightMap = s.heightMap + (b.blockNumber -> (s.heightMap +// .getOrElse(b.blockNumber, Set.empty) + b.blockHash)) +// +// val seen = b.justifications +// .flatMap(h => s.dagMessageState.msgMap(h).seen) +// .toSet ++ b.justifications + b.blockHash +// +// val newMsgMap = s.dagMessageState.msgMap + (b.blockHash -> toMessage(b, seen)) +// +// val newLatestMsgs = newMsgMap.foldLeft(Set.empty[Message[BlockHash, Validator]]) { +// case (acc, (_, msg)) => +// acc + acc +// .find(_.sender == msg.sender) +// .map(m => if (msg.height > m.height) msg else m) +// .getOrElse(msg) +// } +// val newDagMessageState = s.dagMessageState.copy(newLatestMsgs, newMsgMap) +// +// s.copy( +// dagSet = newDagSet, +// childMap = newChildMap, +// heightMap = newHeightMap, +// dagMessageState = newDagMessageState +// ) +// } +// } +// +// bds.getRepresentation returns state.get +// +// bds.lookupByDeployId(randomDeploys.head.deploy.sig) returnsF secondBlock.blockHash.some +// bds.lookupByDeployId(unknownDeploy) returnsF None +// +// bds +// } +// +// // Default args only available for public method in Scala 2.12 (https://github.com/scala/bug/issues/12168) +// def toMessage( +// m: BlockMessage, +// seen: Set[BlockHash] = Set.empty[BlockHash] +// ): Message[BlockHash, Validator] = +// Message[BlockHash, Validator]( +// m.blockHash, +// m.blockNumber, +// m.sender, +// m.seqNum, +// m.bonds, +// m.justifications.toSet, +// Set(), +// seen +// ) +// +// private def prepareDagStorage[F[_]: Sync: BlockDagStorage: BlockStore]: F[Unit] = { +// import coop.rchain.blockstorage.syntax._ +// def insertToDag(b: BlockMessage, stateHash: StateHash): F[Unit] = +// BlockDagStorage[F].insert(BlockMetadata.fromBlock(b).copy(fringeStateHash = stateHash), b) +// for { +// _ <- List(genesisBlock, secondBlock).traverse(BlockStore[F].put(_)) +// _ <- insertToDag(genesisBlock, genesisBlock.postStateHash) +// _ <- insertToDag(secondBlock, RuntimeManager.emptyStateHashFixed) +// } yield () +// } +//} diff --git a/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala index af89a03e23f..b6333d9d9b6 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.api -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore @@ -15,241 +15,242 @@ import coop.rchain.models.Validator.Validator import coop.rchain.models.syntax._ import coop.rchain.models.{BlockMetadata, FringeData} import coop.rchain.shared.Log -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito, Mockito} import org.scalatest.EitherValues import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers +import cats.effect.testing.scalatest.AsyncIOSpec +import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) // See [[/docs/casper/images/no_finalizable_block_mistake_with_no_disagreement_check.png]] -class BlocksResponseAPITest - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with EitherValues - with BlockGenerator - with BlockDagStorageFixture - with BlockApiFixture - with IdiomaticMockito - with ArgumentMatchersSugar { - - implicit val log: Log[Task] = new Log.NOPLog[Task]() - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - - val v1: Validator = generateValidator("Validator One") - val v2: Validator = generateValidator("Validator Two") - val v3: Validator = generateValidator("Validator Three") - val v1Bond: (Validator, Long) = (v1, 25L) - val v2Bond: (Validator, Long) = (v2, 20L) - val v3Bond: (Validator, Long) = (v3, 15L) - val bonds: Map[Validator, Long] = Map(v1Bond, v2Bond, v3Bond) - val maxBlockLimit = 50 - - private def createDagWith8Blocks[F[_]: Sync: BlockStore: BlockDagStorage] = - for { - genesis <- createGenesis[F](bonds = bonds) - b2 <- createBlock[F]( - v2, - bonds, - Seq(genesis.blockHash) - ) - b3 <- createBlock[F]( - v1, - bonds, - Seq(genesis.blockHash) - ) - b4 <- createBlock[F]( - v3, - bonds, - Seq(genesis.blockHash, b2.blockHash) - ) - b5 <- createBlock[F]( - v2, - bonds, - Seq(b3.blockHash, b2.blockHash, genesis.blockHash) - ) - b6 <- createBlock[F]( - v1, - bonds, - Seq(b3.blockHash, b2.blockHash, b4.blockHash) - ) - b7 <- createBlock[F]( - v3, - bonds, - Seq(b3.blockHash, b5.blockHash, b4.blockHash) - ) - b8 <- createBlock[F]( - v2, - bonds, - Seq(b6.blockHash, b5.blockHash, b4.blockHash) - ) - } yield List(genesis, b2, b3, b4, b5, b6, b7, b8) - - "getBlocks" should "return all blocks" in { - implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[Task] - - for { - blocks <- createDagWith8Blocks[Task] - genesis = blocks.head - blockApi <- createBlockApi[Task](genesis.shardId, maxBlockLimit) - _ = Mockito.clearInvocations(blockStore, blockDagStorage) - blocksResponse <- blockApi.getBlocks(10) - } yield { - blocksResponse shouldBe 'right - blocksResponse.value.length shouldBe 8 - - blocks.map { b => - blockStore.get(Seq(b.blockHash)) wasCalled once - } - verifyNoMoreInteractions(blockStore) - - blockDagStorage.insert(*, *) wasNever called - blockDagStorage.getRepresentation wasCalled once - } - } - - it should "return until depth" in { - implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[Task] - - for { - blocks <- createDagWith8Blocks[Task] - genesis = blocks.head - blockApi <- createBlockApi[Task](genesis.shardId, maxBlockLimit) - _ = Mockito.clearInvocations(blockStore, blockDagStorage) - blocksResponse <- blockApi.getBlocks(2) - } yield { - blocksResponse shouldBe 'right - blocksResponse.value.length shouldBe 3 - - blocks.takeRight(3).map { b => - blockStore.get(Seq(b.blockHash)) wasCalled once - } - verifyNoMoreInteractions(blockStore) - - blockDagStorage.insert(*, *) wasNever called - blockDagStorage.getRepresentation wasCalled once - } - } - - "getBlocksByHeights" should "return blocks between startBlockNumber and endBlockNumber" in { - implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[Task] - - for { - blocks <- createDagWith8Blocks[Task] - genesis = blocks.head - blockApi <- createBlockApi[Task](genesis.shardId, maxBlockLimit) - _ = Mockito.clearInvocations(blockStore, blockDagStorage) - blocksResponse <- blockApi.getBlocksByHeights(2, 5) - } yield { - blocksResponse shouldBe 'right - blocksResponse.value shouldBe blocks.takeRight(5).map(BlockApi.getLightBlockInfo) - - blocks.takeRight(5).map { b => - blockStore.get(Seq(b.blockHash)) wasCalled once - } - verifyNoMoreInteractions(blockStore) - - blockDagStorage.insert(*, *) wasNever called - blockDagStorage.getRepresentation wasCalled once - } - } - - private def createMocks[F[_]: Sync] = - (createBlockStore, createBlockDagStorage, mock[RuntimeManager[F]]) - - private def createBlockStore[F[_]: Sync]: BlockStore[F] = { - val state = Ref.unsafe[F, List[BlockMessage]](List()) - val bs = mock[BlockStore[F]] - - bs.put(*) answers { kvPairs: Seq[(BlockHash, BlockMessage)] => - state.update(s => kvPairs.foldLeft(s) { case (acc, item) => acc :+ item._2 }) - } - bs.get(*) answers { keys: Seq[BlockHash] => - state.get.map(s => keys.map(h => s.find(_.blockHash == h))) - } - bs - } - - private def createBlockDagStorage[F[_]: Sync]: BlockDagStorage[F] = { - val genesisHash = RuntimeManager.emptyStateHashFixed - - val state = Ref.unsafe[F, DagRepresentation]( - DagRepresentation( - Set(), - Map(), - SortedMap(), - DagMessageState(), - Map( - Set(genesisHash) -> FringeData( - FringeData.fringeHash(Set.empty), - Set.empty, - Set.empty, - genesisHash.toBlake2b256Hash, - Set.empty, - Set.empty, - Set.empty - ) - ) - ) - ) - val bds = mock[BlockDagStorage[F]] - - bds.insert(any, any) answers { (bmd: BlockMetadata, b: BlockMessage) => - state.update { s => - val newDagSet = s.dagSet + b.blockHash - - val newChildMap = b.justifications.foldLeft(s.childMap) { - case (m, h) => m + (h -> (m.getOrElse(h, Set.empty) + b.blockHash)) - } + (b.blockHash -> Set.empty[BlockHash]) - - val newHeightMap = s.heightMap + (b.blockNumber -> (s.heightMap - .getOrElse(b.blockNumber, Set.empty) + b.blockHash)) - - val seen = b.justifications - .flatMap(h => s.dagMessageState.msgMap(h).seen) - .toSet ++ b.justifications + b.blockHash - - val newMsgMap = s.dagMessageState.msgMap + (b.blockHash -> toMessage(b, seen)) - - val newLatestMsgs = newMsgMap.foldLeft(Set.empty[Message[BlockHash, Validator]]) { - case (acc, (_, msg)) => - acc + acc - .find(_.sender == msg.sender) - .map(m => if (msg.height > m.height) msg else m) - .getOrElse(msg) - } - val newDagMessageState = s.dagMessageState.copy(newLatestMsgs, newMsgMap) - - s.copy( - dagSet = newDagSet, - childMap = newChildMap, - heightMap = newHeightMap, - dagMessageState = newDagMessageState - ) - } - } - - bds.getRepresentation returns state.get - - bds - } - - // Default args only available for public method in Scala 2.12 (https://github.com/scala/bug/issues/12168) - def toMessage( - m: BlockMessage, - seen: Set[BlockHash] = Set.empty[BlockHash] - ): Message[BlockHash, Validator] = - Message[BlockHash, Validator]( - m.blockHash, - m.blockNumber, - m.sender, - m.seqNum, - m.bonds, - m.justifications.toSet, - Set(), - seen - ) -} +//class BlocksResponseAPITest +// extends AsyncFlatSpec +// with AsyncIOSpec +// with Matchers +// with EitherValues +// with BlockGenerator +// with BlockDagStorageFixture +// with BlockApiFixture +// with IdiomaticMockito +// with ArgumentMatchersSugar { +// +// implicit val log: Log[IO] = new Log.NOPLog[IO]() +// implicit val noopSpan: Span[IO] = NoopSpan[IO]() +// +// val v1: Validator = generateValidator("Validator One") +// val v2: Validator = generateValidator("Validator Two") +// val v3: Validator = generateValidator("Validator Three") +// val v1Bond: (Validator, Long) = (v1, 25L) +// val v2Bond: (Validator, Long) = (v2, 20L) +// val v3Bond: (Validator, Long) = (v3, 15L) +// val bonds: Map[Validator, Long] = Map(v1Bond, v2Bond, v3Bond) +// val maxBlockLimit = 50 +// +// private def createDagWith8Blocks[F[_]: Sync: BlockStore: BlockDagStorage] = +// for { +// genesis <- createGenesis[F](bonds = bonds) +// b2 <- createBlock[F]( +// v2, +// bonds, +// Seq(genesis.blockHash) +// ) +// b3 <- createBlock[F]( +// v1, +// bonds, +// Seq(genesis.blockHash) +// ) +// b4 <- createBlock[F]( +// v3, +// bonds, +// Seq(genesis.blockHash, b2.blockHash) +// ) +// b5 <- createBlock[F]( +// v2, +// bonds, +// Seq(b3.blockHash, b2.blockHash, genesis.blockHash) +// ) +// b6 <- createBlock[F]( +// v1, +// bonds, +// Seq(b3.blockHash, b2.blockHash, b4.blockHash) +// ) +// b7 <- createBlock[F]( +// v3, +// bonds, +// Seq(b3.blockHash, b5.blockHash, b4.blockHash) +// ) +// b8 <- createBlock[F]( +// v2, +// bonds, +// Seq(b6.blockHash, b5.blockHash, b4.blockHash) +// ) +// } yield List(genesis, b2, b3, b4, b5, b6, b7, b8) +// +// import coop.rchain.shared.RChainScheduler._ +// "getBlocks" should "return all blocks" in { +// implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[IO] +// +// for { +// blocks <- createDagWith8Blocks[IO] +// genesis = blocks.head +// blockApi <- createBlockApi[IO](genesis.shardId, maxBlockLimit) +// _ = Mockito.clearInvocations(blockStore, blockDagStorage) +// blocksResponse <- blockApi.getBlocks(10) +// } yield { +// blocksResponse shouldBe 'right +// blocksResponse.value.length shouldBe 8 +// +// blocks.map { b => +// blockStore.get(Seq(b.blockHash)) wasCalled once +// } +// verifyNoMoreInteractions(blockStore) +// +// blockDagStorage.insert(*, *) wasNever called +// blockDagStorage.getRepresentation wasCalled once +// } +// } +// +// it should "return until depth" in { +// implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[IO] +// for { +// blocks <- createDagWith8Blocks[IO] +// genesis = blocks.head +// blockApi <- createBlockApi[IO](genesis.shardId, maxBlockLimit) +// _ = Mockito.clearInvocations(blockStore, blockDagStorage) +// blocksResponse <- blockApi.getBlocks(2) +// } yield { +// blocksResponse shouldBe 'right +// blocksResponse.value.length shouldBe 3 +// +// blocks.takeRight(3).map { b => +// blockStore.get(Seq(b.blockHash)) wasCalled once +// } +// verifyNoMoreInteractions(blockStore) +// +// blockDagStorage.insert(*, *) wasNever called +// blockDagStorage.getRepresentation wasCalled once +// } +// } +// +// "getBlocksByHeights" should "return blocks between startBlockNumber and endBlockNumber" in { +// implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[IO] +// +// for { +// blocks <- createDagWith8Blocks[IO] +// genesis = blocks.head +// blockApi <- createBlockApi[IO](genesis.shardId, maxBlockLimit) +// _ = Mockito.clearInvocations(blockStore, blockDagStorage) +// blocksResponse <- blockApi.getBlocksByHeights(2, 5) +// } yield { +// blocksResponse shouldBe 'right +// blocksResponse.value shouldBe blocks.takeRight(5).map(BlockApi.getLightBlockInfo) +// +// blocks.takeRight(5).map { b => +// blockStore.get(Seq(b.blockHash)) wasCalled once +// } +// verifyNoMoreInteractions(blockStore) +// +// blockDagStorage.insert(*, *) wasNever called +// blockDagStorage.getRepresentation wasCalled once +// } +// } +// +// private def createMocks[F[_]: Sync] = +// (createBlockStore, createBlockDagStorage, mock[RuntimeManager[F]]) +// +// private def createBlockStore[F[_]: Sync]: BlockStore[F] = { +// val state = Ref.unsafe[F, List[BlockMessage]](List()) +// val bs = mock[BlockStore[F]] +// +// bs.put(*) answers { kvPairs: Seq[(BlockHash, BlockMessage)] => +// state.update(s => kvPairs.foldLeft(s) { case (acc, item) => acc :+ item._2 }) +// } +// bs.get(*) answers { keys: Seq[BlockHash] => +// state.get.map(s => keys.map(h => s.find(_.blockHash == h))) +// } +// bs +// } +// +// private def createBlockDagStorage[F[_]: Sync]: BlockDagStorage[F] = { +// val genesisHash = RuntimeManager.emptyStateHashFixed +// +// val state = Ref.unsafe[F, DagRepresentation]( +// DagRepresentation( +// Set(), +// Map(), +// SortedMap(), +// DagMessageState(), +// Map( +// Set(genesisHash) -> FringeData( +// FringeData.fringeHash(Set.empty), +// Set.empty, +// Set.empty, +// genesisHash.toBlake2b256Hash, +// Set.empty, +// Set.empty, +// Set.empty +// ) +// ) +// ) +// ) +// val bds = mock[BlockDagStorage[F]] +// +// bds.insert(any, any) answers { (bmd: BlockMetadata, b: BlockMessage) => +// state.update { s => +// val newDagSet = s.dagSet + b.blockHash +// +// val newChildMap = b.justifications.foldLeft(s.childMap) { +// case (m, h) => m + (h -> (m.getOrElse(h, Set.empty) + b.blockHash)) +// } + (b.blockHash -> Set.empty[BlockHash]) +// +// val newHeightMap = s.heightMap + (b.blockNumber -> (s.heightMap +// .getOrElse(b.blockNumber, Set.empty) + b.blockHash)) +// +// val seen = b.justifications +// .flatMap(h => s.dagMessageState.msgMap(h).seen) +// .toSet ++ b.justifications + b.blockHash +// +// val newMsgMap = s.dagMessageState.msgMap + (b.blockHash -> toMessage(b, seen)) +// +// val newLatestMsgs = newMsgMap.foldLeft(Set.empty[Message[BlockHash, Validator]]) { +// case (acc, (_, msg)) => +// acc + acc +// .find(_.sender == msg.sender) +// .map(m => if (msg.height > m.height) msg else m) +// .getOrElse(msg) +// } +// val newDagMessageState = s.dagMessageState.copy(newLatestMsgs, newMsgMap) +// +// s.copy( +// dagSet = newDagSet, +// childMap = newChildMap, +// heightMap = newHeightMap, +// dagMessageState = newDagMessageState +// ) +// } +// } +// +// bds.getRepresentation returns state.get +// +// bds +// } +// +// // Default args only available for public method in Scala 2.12 (https://github.com/scala/bug/issues/12168) +// def toMessage( +// m: BlockMessage, +// seen: Set[BlockHash] = Set.empty[BlockHash] +// ): Message[BlockHash, Validator] = +// Message[BlockHash, Validator]( +// m.blockHash, +// m.blockNumber, +// m.sender, +// m.seqNum, +// m.bonds, +// m.justifications.toSet, +// Set(), +// seen +// ) +//} diff --git a/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala index 99f474d3d75..a5ec9536466 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.api -import cats.effect.{Concurrent, Sync} +import cats.effect.testing.scalatest.AsyncIOSpec +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -19,10 +20,8 @@ import coop.rchain.models.FringeData import coop.rchain.models.Validator.Validator import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ -import coop.rchain.shared.Log +import coop.rchain.shared.{Log, Time} import coop.rchain.shared.scalatestcontrib._ -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} import org.scalatest.EitherValues @@ -31,155 +30,157 @@ import org.scalatest.matchers.should.Matchers import scala.collection.immutable.SortedMap -class BondedStatusAPITest - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with EitherValues - with BlockGenerator - with BlockDagStorageFixture - with BlockApiFixture - with IdiomaticMockito - with IdiomaticMockitoCats - with ArgumentMatchersSugar { - // 4 nodes with 3 validators bonded - private val keys = randomValidatorKeyPairs.take(3).toList - private val initialComputeBondsResult = keys - .zip(createBonds(keys.map(_._2))) - .map { case ((_, pubKey), (_, bond)) => pubKey.bytes.toByteString -> bond } - .toMap - private val gB = getRandomBlock( - setBonds = initialComputeBondsResult.some, - setValidator = toValidatorOpt(keys.head._1) - ) - - "bondStatus" should "return true for bonded validator" in { - implicit val (c, log, bds, bs, rm, sp) = createMocks[Task] - - for { - v1 <- Sync[Task].delay(ValidatorIdentity(keys.head._1)) - v2 = ValidatorIdentity(keys(1)._1) - v3 = ValidatorIdentity(keys(2)._1) - - _ <- bondedStatus(v1, v1.publicKey, gB) shouldBeF true - _ <- bondedStatus(v2, v2.publicKey, gB) shouldBeF true - _ <- bondedStatus(v3, v3.publicKey, gB) shouldBeF true - } yield { - bs.get(Seq(gB.blockHash)) wasCalled 3.times - verifyNoMoreInteractions(bs) - bds.getRepresentation wasCalled 3.times - rm.computeBonds(gB.postStateHash) wasCalled 3.times - } - } - - "bondStatus" should "return false for not bonded validators" in { - implicit val (c, log, bds, bs, rm, sp) = createMocks[Task] - val genesisValidator = ValidatorIdentity(keys.head._1) - for { - _ <- bondedStatus(genesisValidator, createValidator.publicKey, gB) shouldBeF false - } yield { - bs.get(Seq(gB.blockHash)) wasCalled once - verifyNoMoreInteractions(bs) - bds.getRepresentation wasCalled once - rm.computeBonds(gB.postStateHash) wasCalled once - } - } - - "bondStatus" should "return true for newly bonded validator" in { - implicit val (c, log, bds, bs, _, sp) = createMocks[Task] - - val genesisValidator = ValidatorIdentity(keys.head._1) - val newValidator = createValidator - - // Overriding mock for RuntimeManager, as it differ from the standard one - val stake = 1000L - val newComputeBondsResult = initialComputeBondsResult + (newValidator.publicKey.bytes.toByteString -> stake) - implicit val rm: RuntimeManager[Task] = mock[RuntimeManager[Task]] - rm.computeBonds(*) returns initialComputeBondsResult.pure andThen newComputeBondsResult.pure - - for { - _ <- BondingUtil.bondingDeploy[Task](stake, newValidator.privateKey, shardId = gB.shardId) - _ <- bondedStatus(genesisValidator, newValidator.publicKey, gB) shouldBeF false - b1 = getRandomBlock( - setJustifications = Seq(gB.blockHash).some, - setBonds = newComputeBondsResult.some, - setValidator = toValidatorOpt(newValidator.privateKey) - ) - - // b1 is now finalized, hence n4 is now bonded - _ <- bondedStatus(genesisValidator, newValidator.publicKey, b1) shouldBeF true - } yield { - bs.get(Seq(gB.blockHash)) wasCalled twice - verifyNoMoreInteractions(bs) - bds.getRepresentation wasCalled twice - rm.computeBonds(gB.postStateHash) wasCalled once - rm.computeBonds(b1.postStateHash) wasCalled once - } - } - - private def createMocks[F[_]: Concurrent: Sync] - : (Concurrent[F], Log[F], BlockDagStorage[F], BlockStore[F], RuntimeManager[F], Span[F]) = { - val c = Concurrent[F] - val sp = mock[Span[F]] - - val log = mock[Log[F]] - log.warn(*) returns ().pure - - val msg = toMessage(gB) - val bds = mock[BlockDagStorage[F]] - bds.getRepresentation returnsF DagRepresentation( - Set(gB.blockHash), - Map(gB.blockHash -> Set()), - SortedMap(0L -> Set(gB.blockHash)), - new DagMessageState(Set(msg), Map(msg.id -> msg)), - Map( - Set(gB.blockHash) -> FringeData( - FringeData.fringeHash(Set.empty), - Set.empty, - Set.empty, - gB.blockHash.toBlake2b256Hash, - Set.empty, - Set.empty, - Set.empty - ) - ) - ) - - val bs = mock[BlockStore[F]] - bs.get(Seq(gB.blockHash)) returnsF Vector(gB.some) - - val rm = mock[RuntimeManager[F]] - rm.computeBonds(*) returnsF initialComputeBondsResult - - (c, log, bds, bs, rm, sp) - } - - private def toValidatorOpt(pk: PrivateKey): Option[Validator] = pk.bytes.toByteString.some - - private def toMessage(m: BlockMessage): Message[BlockHash, Validator] = - Message[BlockHash, Validator]( - m.blockHash, - m.blockNumber, - m.sender, - m.seqNum, - m.bonds, - m.justifications.toSet, - Set(m.blockHash), - Set(m.blockHash) - ) - - private def bondedStatus[F[_]: Concurrent: BlockDagStorage: BlockStore: Log: RuntimeManager: Span]( - validatorIdOpt: ValidatorIdentity, - publicKey: PublicKey, - block: BlockMessage - ): F[Boolean] = - for { - blockApi <- createBlockApi("root", 50, validatorIdOpt.some) - res <- blockApi.bondStatus(ByteString.copyFrom(publicKey.bytes), block.some).map(_.value) - } yield res - - private def createValidator: ValidatorIdentity = { - val (privateKey, _) = Secp256k1.newKeyPair - ValidatorIdentity(privateKey) - } -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class BondedStatusAPITest +// extends AsyncFlatSpec +// with AsyncIOSpec +// with Matchers +// with EitherValues +// with BlockGenerator +// with BlockDagStorageFixture +// with BlockApiFixture +// with IdiomaticMockito +// with IdiomaticMockitoCats +// with ArgumentMatchersSugar { +// // 4 nodes with 3 validators bonded +// private val keys = randomValidatorKeyPairs.take(3).toList +// private val initialComputeBondsResult = keys +// .zip(createBonds(keys.map(_._2))) +// .map { case ((_, pubKey), (_, bond)) => pubKey.bytes.toByteString -> bond } +// .toMap +// private val gB = getRandomBlock( +// setBonds = initialComputeBondsResult.some, +// setValidator = toValidatorOpt(keys.head._1) +// ) +// import coop.rchain.shared.RChainScheduler._ +// +// "bondStatus" should "return true for bonded validator" in { +// implicit val (c, log, bds, bs, rm, sp) = createMocks[IO] +// +// for { +// v1 <- IO.delay(ValidatorIdentity(keys.head._1)) +// v2 = ValidatorIdentity(keys(1)._1) +// v3 = ValidatorIdentity(keys(2)._1) +// +// _ <- bondedStatus(v1, v1.publicKey, gB) shouldBeF true +// _ <- bondedStatus(v2, v2.publicKey, gB) shouldBeF true +// _ <- bondedStatus(v3, v3.publicKey, gB) shouldBeF true +// } yield { +// bs.get(Seq(gB.blockHash)) wasCalled 3.times +// verifyNoMoreInteractions(bs) +// bds.getRepresentation wasCalled 3.times +// rm.computeBonds(gB.postStateHash) wasCalled 3.times +// } +// } +// +// "bondStatus" should "return false for not bonded validators" in { +// implicit val (c, log, bds, bs, rm, sp) = createMocks[IO] +// val genesisValidator = ValidatorIdentity(keys.head._1) +// for { +// _ <- bondedStatus(genesisValidator, createValidator.publicKey, gB) shouldBeF false +// } yield { +// bs.get(Seq(gB.blockHash)) wasCalled once +// verifyNoMoreInteractions(bs) +// bds.getRepresentation wasCalled once +// rm.computeBonds(gB.postStateHash) wasCalled once +// } +// } +// +// "bondStatus" should "return true for newly bonded validator" in { +// implicit val (c, log, bds, bs, _, sp) = createMocks[IO] +// +// val genesisValidator = ValidatorIdentity(keys.head._1) +// val newValidator = createValidator +// +// // Overriding mock for RuntimeManager, as it differ from the standard one +// val stake = 1000L +// val newComputeBondsResult = initialComputeBondsResult + (newValidator.publicKey.bytes.toByteString -> stake) +// implicit val rm: RuntimeManager[IO] = mock[RuntimeManager[IO]] +// rm.computeBonds(*) returns initialComputeBondsResult.pure andThen newComputeBondsResult.pure +// +// for { +// _ <- BondingUtil.bondingDeploy[IO](stake, newValidator.privateKey, shardId = gB.shardId) +// _ <- bondedStatus(genesisValidator, newValidator.publicKey, gB) shouldBeF false +// b1 = getRandomBlock( +// setJustifications = Seq(gB.blockHash).some, +// setBonds = newComputeBondsResult.some, +// setValidator = toValidatorOpt(newValidator.privateKey) +// ) +// +// // b1 is now finalized, hence n4 is now bonded +// _ <- bondedStatus(genesisValidator, newValidator.publicKey, b1) shouldBeF true +// } yield { +// bs.get(Seq(gB.blockHash)) wasCalled twice +// verifyNoMoreInteractions(bs) +// bds.getRepresentation wasCalled twice +// rm.computeBonds(gB.postStateHash) wasCalled once +// rm.computeBonds(b1.postStateHash) wasCalled once +// } +// } +// +// private def createMocks[F[_]: Concurrent: Sync] +// : (Concurrent[F], Log[F], BlockDagStorage[F], BlockStore[F], RuntimeManager[F], Span[F]) = { +// val c = Concurrent[F] +// val sp = mock[Span[F]] +// +// val log = mock[Log[F]] +// log.warn(*) returns ().pure +// +// val msg = toMessage(gB) +// val bds = mock[BlockDagStorage[F]] +// bds.getRepresentation returnsF DagRepresentation( +// Set(gB.blockHash), +// Map(gB.blockHash -> Set()), +// SortedMap(0L -> Set(gB.blockHash)), +// new DagMessageState(Set(msg), Map(msg.id -> msg)), +// Map( +// Set(gB.blockHash) -> FringeData( +// FringeData.fringeHash(Set.empty), +// Set.empty, +// Set.empty, +// gB.blockHash.toBlake2b256Hash, +// Set.empty, +// Set.empty, +// Set.empty +// ) +// ) +// ) +// +// val bs = mock[BlockStore[F]] +// bs.get(Seq(gB.blockHash)) returnsF Vector(gB.some) +// +// val rm = mock[RuntimeManager[F]] +// rm.computeBonds(*) returnsF initialComputeBondsResult +// +// (c, log, bds, bs, rm, sp) +// } +// +// private def toValidatorOpt(pk: PrivateKey): Option[Validator] = pk.bytes.toByteString.some +// +// private def toMessage(m: BlockMessage): Message[BlockHash, Validator] = +// Message[BlockHash, Validator]( +// m.blockHash, +// m.blockNumber, +// m.sender, +// m.seqNum, +// m.bonds, +// m.justifications.toSet, +// Set(m.blockHash), +// Set(m.blockHash) +// ) +// +// private def bondedStatus[F[_]: Concurrent: BlockDagStorage: BlockStore: Log: RuntimeManager: Span]( +// validatorIdOpt: ValidatorIdentity, +// publicKey: PublicKey, +// block: BlockMessage +// ): F[Boolean] = +// for { +// blockApi <- createBlockApi("root", 50, validatorIdOpt.some) +// res <- blockApi.bondStatus(ByteString.copyFrom(publicKey.bytes), block.some).map(_.value) +// } yield res +// +// private def createValidator: ValidatorIdentity = { +// val (privateKey, _) = Secp256k1.newKeyPair +// ValidatorIdentity(privateKey) +// } +//} diff --git a/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala index 58f6a8550b7..0374a6596d1 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.api -import cats.effect.Concurrent +import cats.effect.testing.scalatest.AsyncIOSpec +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -21,8 +22,6 @@ import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.shared.Log -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} import org.scalatest.EitherValues @@ -31,158 +30,160 @@ import org.scalatest.matchers.should.Matchers import scala.collection.immutable.SortedMap -class ExploratoryDeployAPITest - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with EitherValues - with BlockGenerator - with BlockDagStorageFixture - with BlockApiFixture - with IdiomaticMockito - with IdiomaticMockitoCats - with ArgumentMatchersSugar { - - private val genesis = getRandomBlock() - private val b1 = getRandomBlock() - private val b2 = getRandomBlock() - private val b3 = getRandomBlock() - - private def validator(index: Int) = keys(index)._2.bytes.toByteString - - private val keys = randomValidatorKeyPairs.take(4).toList - private val vGenesis = validator(0) - private val v1 = validator(1) - private val v2 = validator(2) - private val v3 = validator(3) - - /* - * DAG Looks like this: - * b3 - * | - * b2 - * | - * b1 - * | - * genesis - */ - it should "exploratoryDeploy get data from the read only node" in { - implicit val log = mock[Log[Task]] - implicit val sp = mock[Span[Task]] - - implicit val bs = mock[BlockStore[Task]] - bs.get(Seq(b2.blockHash)) returnsF Seq(b2.some) - - val blocks = List(genesis, b1, b2, b3) - val validators = List(vGenesis, v1, v2, v3) - val bondsMap = validators.map(_ -> 10L).toMap - - def toMessage(block: BlockMessage, sender: ByteString, senderSeq: Long) = - Message[BlockHash, Validator]( - block.blockHash, - blocks.indexOf(block).toLong, - sender, - senderSeq, - bondsMap, - blocks.get(blocks.indexOf(block) - 1L).map(b => Set(b.blockHash)).getOrElse(Set.empty), - Set.empty, - blocks.take(blocks.indexOf(block) + 1).map(_.blockHash).toSet - ) - - implicit val bds = mock[BlockDagStorage[Task]] - bds.getRepresentation returnsF DagRepresentation( - blocks - .map(_.blockHash) - .toSet, - blocks.zipWithIndex.map { - case (b, i) => - b.blockHash -> blocks - .get(i + 1L) - .map(nextBlock => Set(nextBlock.blockHash)) - .getOrElse(Set.empty) - }.toMap, - blocks.zipWithIndex.foldLeft(SortedMap.empty[Long, Set[BlockHash]]) { - case (acc, (b, i)) => acc + (i.toLong -> Set(b.blockHash)) - }, - DagMessageState[BlockHash, Validator]( - Set(toMessage(b2, vGenesis, 2), toMessage(b3, v1, 0)), - Map( - genesis.blockHash -> toMessage(genesis, vGenesis, 0), - b1.blockHash -> toMessage(b1, vGenesis, 1), - b2.blockHash -> toMessage(b2, vGenesis, 2), - b3.blockHash -> toMessage(b3, v1, 0) - ) - ), - Map( - Set.empty -> FringeData( - Blake2b256Hash.create(Blake2b256.hash("".getBytes)), - Set.empty, - Set.empty, - RuntimeManager.emptyStateHashFixed.toBlake2b256Hash, - Set.empty, - Set.empty, - Set.empty - ) - ) - ) - - val term = "new return in { for (@data <- @\"store\") {return!(data)}}" - val storedData = "data" - implicit val rm = mock[RuntimeManager[Task]] - rm.playExploratoryDeploy(term, *) returnsF List(Par(exprs = List(Expr(GString(storedData))))) - - for { - result <- exploratoryDeploy[Task](term, b2.blockHash) - } yield { - result shouldBe 'right - - val (par, b) = result.value - par match { - case Seq(Par(_, _, _, Seq(expr), _, _, _, _, _, _)) => - expr match { - case Expr(GString(data)) => data shouldBe storedData - case _ => fail("Could not get data from exploratory api") - } - } - b.blockHash shouldBe PrettyPrinter.buildStringNoLimit(b2.blockHash) - - bs.get(Seq(b2.blockHash)) wasCalled once - verifyNoMoreInteractions(bs) - bds.getRepresentation wasCalled once - rm.playExploratoryDeploy(term, *) wasCalled once - } - } - - it should "exploratoryDeploy return error on bonded validator" in { - implicit val blockDagStorage = mock[BlockDagStorage[Task]] - implicit val blockStore = mock[BlockStore[Task]] - implicit val runtimeManager = mock[RuntimeManager[Task]] - implicit val log = mock[Log[Task]] - implicit val sp = mock[Span[Task]] - - for { - result <- exploratoryDeploy[Task]( - "new return in { return!(1) }", - ByteString.EMPTY, - ValidatorIdentity(keys.head._1).some - ) - } yield { - result shouldBe 'left - result.left.value shouldBe "Exploratory deploy can only be executed on read-only RNode." - - verifyNoMoreInteractions(blockDagStorage) - verifyNoMoreInteractions(blockStore) - verifyNoMoreInteractions(runtimeManager) - } - } - - private def exploratoryDeploy[F[_]: Concurrent: BlockStore: BlockDagStorage: RuntimeManager: Log: Span]( - term: String, - block: BlockHash, - validatorIdOpt: Option[ValidatorIdentity] = none - ): F[ApiErr[(Seq[Par], LightBlockInfo)]] = - for { - blockApi <- createBlockApi[F](genesis.shardId, 50, validatorIdOpt) - res <- blockApi.exploratoryDeploy(term, blockHash = block.toHexString.some) - } yield res -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class ExploratoryDeployAPITest +// extends AsyncFlatSpec +// with AsyncIOSpec +// with Matchers +// with EitherValues +// with BlockGenerator +// with BlockDagStorageFixture +// with BlockApiFixture +// with IdiomaticMockito +// with IdiomaticMockitoCats +// with ArgumentMatchersSugar { +// import coop.rchain.shared.RChainScheduler._ +// +// private val genesis = getRandomBlock() +// private val b1 = getRandomBlock() +// private val b2 = getRandomBlock() +// private val b3 = getRandomBlock() +// +// private def validator(index: Int) = keys(index)._2.bytes.toByteString +// +// private val keys = randomValidatorKeyPairs.take(4).toList +// private val vGenesis = validator(0) +// private val v1 = validator(1) +// private val v2 = validator(2) +// private val v3 = validator(3) +// +// /* +// * DAG Looks like this: +// * b3 +// * | +// * b2 +// * | +// * b1 +// * | +// * genesis +// */ +// it should "exploratoryDeploy get data from the read only node" in { +// implicit val log = mock[Log[IO]] +// implicit val sp = mock[Span[IO]] +// +// implicit val bs = mock[BlockStore[IO]] +// bs.get(Seq(b2.blockHash)) returnsF Seq(b2.some) +// +// val blocks = List(genesis, b1, b2, b3) +// val validators = List(vGenesis, v1, v2, v3) +// val bondsMap = validators.map(_ -> 10L).toMap +// +// def toMessage(block: BlockMessage, sender: ByteString, senderSeq: Long) = +// Message[BlockHash, Validator]( +// block.blockHash, +// blocks.indexOf(block).toLong, +// sender, +// senderSeq, +// bondsMap, +// blocks.get(blocks.indexOf(block) - 1L).map(b => Set(b.blockHash)).getOrElse(Set.empty), +// Set.empty, +// blocks.take(blocks.indexOf(block) + 1).map(_.blockHash).toSet +// ) +// +// implicit val bds = mock[BlockDagStorage[IO]] +// bds.getRepresentation returnsF DagRepresentation( +// blocks +// .map(_.blockHash) +// .toSet, +// blocks.zipWithIndex.map { +// case (b, i) => +// b.blockHash -> blocks +// .get(i + 1L) +// .map(nextBlock => Set(nextBlock.blockHash)) +// .getOrElse(Set.empty) +// }.toMap, +// blocks.zipWithIndex.foldLeft(SortedMap.empty[Long, Set[BlockHash]]) { +// case (acc, (b, i)) => acc + (i.toLong -> Set(b.blockHash)) +// }, +// DagMessageState[BlockHash, Validator]( +// Set(toMessage(b2, vGenesis, 2), toMessage(b3, v1, 0)), +// Map( +// genesis.blockHash -> toMessage(genesis, vGenesis, 0), +// b1.blockHash -> toMessage(b1, vGenesis, 1), +// b2.blockHash -> toMessage(b2, vGenesis, 2), +// b3.blockHash -> toMessage(b3, v1, 0) +// ) +// ), +// Map( +// Set.empty -> FringeData( +// Blake2b256Hash.create(Blake2b256.hash("".getBytes)), +// Set.empty, +// Set.empty, +// RuntimeManager.emptyStateHashFixed.toBlake2b256Hash, +// Set.empty, +// Set.empty, +// Set.empty +// ) +// ) +// ) +// +// val term = "new return in { for (@data <- @\"store\") {return!(data)}}" +// val storedData = "data" +// implicit val rm = mock[RuntimeManager[IO]] +// rm.playExploratoryDeploy(term, *) returnsF List(Par(exprs = List(Expr(GString(storedData))))) +// +// for { +// result <- exploratoryDeploy[IO](term, b2.blockHash) +// } yield { +// result shouldBe 'right +// +// val (par, b) = result.value +// par match { +// case Seq(Par(_, _, _, Seq(expr), _, _, _, _, _, _)) => +// expr match { +// case Expr(GString(data)) => data shouldBe storedData +// case _ => fail("Could not get data from exploratory api") +// } +// } +// b.blockHash shouldBe PrettyPrinter.buildStringNoLimit(b2.blockHash) +// +// bs.get(Seq(b2.blockHash)) wasCalled once +// verifyNoMoreInteractions(bs) +// bds.getRepresentation wasCalled once +// rm.playExploratoryDeploy(term, *) wasCalled once +// } +// } +// +// it should "exploratoryDeploy return error on bonded validator" in { +// implicit val blockDagStorage = mock[BlockDagStorage[IO]] +// implicit val blockStore = mock[BlockStore[IO]] +// implicit val runtimeManager = mock[RuntimeManager[IO]] +// implicit val log = mock[Log[IO]] +// implicit val sp = mock[Span[IO]] +// +// for { +// result <- exploratoryDeploy[IO]( +// "new return in { return!(1) }", +// ByteString.EMPTY, +// ValidatorIdentity(keys.head._1).some +// ) +// } yield { +// result shouldBe 'left +// result.left.value shouldBe "Exploratory deploy can only be executed on read-only RNode." +// +// verifyNoMoreInteractions(blockDagStorage) +// verifyNoMoreInteractions(blockStore) +// verifyNoMoreInteractions(runtimeManager) +// } +// } +// +// private def exploratoryDeploy[F[_]: Concurrent: BlockStore: BlockDagStorage: RuntimeManager: Log: Span]( +// term: String, +// block: BlockHash, +// validatorIdOpt: Option[ValidatorIdentity] = none +// ): F[ApiErr[(Seq[Par], LightBlockInfo)]] = +// for { +// blockApi <- createBlockApi[F](genesis.shardId, 50, validatorIdOpt) +// res <- blockApi.exploratoryDeploy(term, blockHash = block.toHexString.some) +// } yield res +//} diff --git a/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala index 55b362a04d1..e448e094db7 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala @@ -1,6 +1,8 @@ package coop.rchain.casper.api import cats.Applicative +import cats.effect.IO +import cats.effect.testing.scalatest.AsyncIOSpec import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.{BlockDagStorage, DagMessageState, DagRepresentation, Message} @@ -13,8 +15,6 @@ import coop.rchain.models.BlockHash.BlockHash import coop.rchain.models.Validator.Validator import coop.rchain.models.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.IdiomaticMockito import org.mockito.cats.IdiomaticMockitoCats import org.scalatest.EitherValues @@ -23,112 +23,114 @@ import org.scalatest.matchers.should.Matchers import scala.collection.immutable.SortedMap -class LastFinalizedAPITest - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with EitherValues - with BlockGenerator - with BlockDagStorageFixture - with BlockApiFixture - with IdiomaticMockito - with IdiomaticMockitoCats { - - private val knownHash = "abc" - private val unknownHash = "bcd" - private val wrongHash = "xyz" - - private val createValidator = ValidatorIdentity(randomValidatorKeyPairs.take(1).toList.head._1) - private val createSender = createValidator.publicKey.bytes.toByteString - - "isFinalized" should "return true for a block placed in the DAG" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - res <- blockApi.isFinalized(knownHash) - } yield { - res.value shouldBe true - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - } - } - - "isFinalized" should "return false for a block not placed in the DAG" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - res <- blockApi.isFinalized(unknownHash) - } yield { - res.value shouldBe false - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - } - } - - "isFinalized" should "not throw exception and return false for wrong hash" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - - // No exception is thrown here, because the decoding implementation simply discards non-hex characters - res <- blockApi.isFinalized(wrongHash) - } yield { - res.value shouldBe false - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - } - } - - "isFinalized" should "return true for hash which becomes known after removing wrong characters" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - res <- blockApi.isFinalized(wrongHash + knownHash) - } yield { - res.value shouldBe true - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - } - } - - private def createMocks[F[_]: Applicative] - : (Log[F], Span[F], RuntimeManager[F], BlockStore[F], BlockDagStorage[F]) = { - val log = mock[Log[F]] - val sp = mock[Span[F]] - val rm = mock[RuntimeManager[F]] - val bs = mock[BlockStore[F]] - - val bds = mock[BlockDagStorage[F]] - - val knownHashBS = knownHash.unsafeHexToByteString - val msg = new Message[BlockHash, Validator]( - knownHashBS, - 0, - createSender, - 0, - Map.empty, - Set.empty, - // DAG contains only one message, which is finalized and sees itself - Set(knownHashBS), - Set(knownHashBS) - ) - - bds.getRepresentation returnsF DagRepresentation( - Set.empty, - Map.empty, - SortedMap.empty, - new DagMessageState(Set(msg), Map(msg.id -> msg)), - Map.empty - ) - - (log, sp, rm, bs, bds) - } -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class LastFinalizedAPITest +// extends AsyncFlatSpec +// with Matchers +// with AsyncIOSpec +// with EitherValues +// with BlockGenerator +// with BlockDagStorageFixture +// with BlockApiFixture +// with IdiomaticMockito +// with IdiomaticMockitoCats { +// +// private val knownHash = "abc" +// private val unknownHash = "bcd" +// private val wrongHash = "xyz" +// +// private val createValidator = ValidatorIdentity(randomValidatorKeyPairs.take(1).toList.head._1) +// private val createSender = createValidator.publicKey.bytes.toByteString +// import coop.rchain.shared.RChainScheduler._ +// +// "isFinalized" should "return true for a block placed in the DAG" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// res <- blockApi.isFinalized(knownHash) +// } yield { +// res.value shouldBe true +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// } +// } +// +// "isFinalized" should "return false for a block not placed in the DAG" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// res <- blockApi.isFinalized(unknownHash) +// } yield { +// res.value shouldBe false +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// } +// } +// +// "isFinalized" should "not throw exception and return false for wrong hash" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// +// // No exception is thrown here, because the decoding implementation simply discards non-hex characters +// res <- blockApi.isFinalized(wrongHash) +// } yield { +// res.value shouldBe false +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// } +// } +// +// "isFinalized" should "return true for hash which becomes known after removing wrong characters" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// res <- blockApi.isFinalized(wrongHash + knownHash) +// } yield { +// res.value shouldBe true +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// } +// } +// +// private def createMocks[F[_]: Applicative] +// : (Log[F], Span[F], RuntimeManager[F], BlockStore[F], BlockDagStorage[F]) = { +// val log = mock[Log[F]] +// val sp = mock[Span[F]] +// val rm = mock[RuntimeManager[F]] +// val bs = mock[BlockStore[F]] +// +// val bds = mock[BlockDagStorage[F]] +// +// val knownHashBS = knownHash.unsafeHexToByteString +// val msg = new Message[BlockHash, Validator]( +// knownHashBS, +// 0, +// createSender, +// 0, +// Map.empty, +// Set.empty, +// // DAG contains only one message, which is finalized and sees itself +// Set(knownHashBS), +// Set(knownHashBS) +// ) +// +// bds.getRepresentation returnsF DagRepresentation( +// Set.empty, +// Map.empty, +// SortedMap.empty, +// new DagMessageState(Set(msg), Map(msg.id -> msg)), +// Map.empty +// ) +// +// (log, sp, rm, bs, bds) +// } +//} diff --git a/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala index 43a0b37633d..51af60e0b79 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.api import cats.Applicative +import cats.effect.IO import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.{BlockDagStorage, DagMessageState, DagRepresentation, Message} @@ -20,179 +21,180 @@ import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.shared.Log -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} import org.scalatest._ import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers +import cats.effect.testing.scalatest.AsyncIOSpec +import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap -class ListeningNameAPITest - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with Inside - with BlockApiFixture - with EitherValues - with IdiomaticMockito - with IdiomaticMockitoCats - with ArgumentMatchersSugar { - - private val createValidator = ValidatorIdentity(randomValidatorKeyPairs.take(1).toList.head._1) - - private val listeningTerm = "@{ 3 | 2 | 1 }!(0)" - private val listeningName = Par().copy(exprs = Seq(Expr(GInt(2)), Expr(GInt(1)), Expr(GInt(3)))) - private val listeningHash = "b87b08f07e4fadbfde88af2ff54a0d9ba58de47a063f798c5a8ce39f8f6892b6" - - private val resultData = Par().copy(exprs = Seq(Expr(GInt(0)))) - - private val deploy = ProcessedDeploy - .empty(ConstructDeploy.sourceDeployNow(listeningTerm, shardId = "root")) - .copy( - deployLog = List( - ProduceEvent( - Blake2b256Hash.fromHex(listeningHash).toByteString, - Blake2b256.hash("".getBytes).toByteString, - persistent = false, - 0 - ) - ) - ) - - private val b1 = getRandomBlock(setDeploys = Seq(deploy).some) - private val b2 = getRandomBlock() - private val b3 = getRandomBlock() - - "getListeningNameDataResponse" should "return error if depth more than max depth limit" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 2, createValidator.some) - res <- blockApi.getListeningNameDataResponse(3, listeningName) - } yield { - res shouldBe 'left - res.left.value shouldBe "Your request on getListeningName depth 3 exceed the max limit 2" - - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - verifyNoMoreInteractions(rm) - } - } - - it should "return empty result if listening name deeper than expected" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - res <- blockApi.getListeningNameDataResponse(1, listeningName) - } yield { - res shouldBe 'right - res.value shouldBe (Seq(), 0) - - bs.get(*) wasCalled twice - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - verifyNoMoreInteractions(rm) - } - } - - it should "return expected result if block falls within the specified depth" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - res <- blockApi.getListeningNameDataResponse(2, listeningName) - } yield { - res shouldBe 'right - val (blocks, length) = res.value - length shouldBe 1 - val (par, block) = (blocks.head.postBlockData.head, blocks.head.block) - - par shouldBe resultData - block shouldBe BlockApi.getLightBlockInfo(b1) - - rm.getData(*)(*) wasCalled once - bs.get(*) wasCalled 3.times - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - verifyNoMoreInteractions(rm) - } - } - - it should "return expected result even if depth is greater than possible" in { - implicit val (log, sp, rm, bs, bds) = createMocks[Task] - for { - blockApi <- createBlockApi[Task]("root", 50, createValidator.some) - res <- blockApi.getListeningNameDataResponse(10, listeningName) - } yield { - res shouldBe 'right - val (blocks, length) = res.value - length shouldBe 1 - val (par, block) = (blocks.head.postBlockData.head, blocks.head.block) - - par shouldBe resultData - block shouldBe BlockApi.getLightBlockInfo(b1) - - rm.getData(*)(*) wasCalled once - bs.get(*) wasCalled 3.times - bds.getRepresentation wasCalled once - - verifyNoMoreInteractions(bs) - verifyNoMoreInteractions(bds) - verifyNoMoreInteractions(rm) - } - } - - def toMessage(m: BlockMessage): Message[BlockHash, Validator] = - Message[BlockHash, Validator]( - m.blockHash, - m.blockNumber, - m.sender, - m.seqNum, - m.bonds, - m.justifications.toSet, - Set.empty, - Set(m.blockHash) - ) - - private def createMocks[F[_]: Applicative] - : (Log[F], Span[F], RuntimeManager[F], BlockStore[F], BlockDagStorage[F]) = { - val log = mock[Log[F]] - val sp = mock[Span[F]] - - val rm = mock[RuntimeManager[F]] - rm.getData(*)(*) returnsF Seq(resultData) - - val bs = mock[BlockStore[F]] - - bs.get(*) answersF { (keys: Seq[BlockHash]) => - Seq((keys.head match { - case b1.blockHash => b1 - case b2.blockHash => b2 - case b3.blockHash => b3 - }).some) - } - - val bds = mock[BlockDagStorage[F]] - - val m1 = toMessage(b1) - val m2 = toMessage(b2) - val m3 = toMessage(b3) - - bds.getRepresentation returnsF DagRepresentation( - Set(m1.id, m2.id, m3.id), - Map(m1.id -> Set(m2.id, m3.id)), - SortedMap(0L -> Set(m1.id), 1L -> Set(m2.id, m3.id)), - new DagMessageState(Set(m2, m3), Map(m1.id -> m1, m2.id -> m2, m3.id -> m3)), - Map.empty - ) - - (log, sp, rm, bs, bds) - } -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class ListeningNameAPITest +// extends AsyncFlatSpec +// with AsyncIOSpec +// with Matchers +// with Inside +// with BlockApiFixture +// with EitherValues +// with IdiomaticMockito +// with IdiomaticMockitoCats +// with ArgumentMatchersSugar { +// +// private val createValidator = ValidatorIdentity(randomValidatorKeyPairs.take(1).toList.head._1) +// +// private val listeningTerm = "@{ 3 | 2 | 1 }!(0)" +// private val listeningName = Par().copy(exprs = Seq(Expr(GInt(2)), Expr(GInt(1)), Expr(GInt(3)))) +// private val listeningHash = "b87b08f07e4fadbfde88af2ff54a0d9ba58de47a063f798c5a8ce39f8f6892b6" +// +// private val resultData = Par().copy(exprs = Seq(Expr(GInt(0)))) +// +// private val deploy = ProcessedDeploy +// .empty(ConstructDeploy.sourceDeployNow(listeningTerm, shardId = "root")) +// .copy( +// deployLog = List( +// ProduceEvent( +// Blake2b256Hash.fromHex(listeningHash).toByteString, +// Blake2b256.hash("".getBytes).toByteString, +// persistent = false, +// 0 +// ) +// ) +// ) +// +// private val b1 = getRandomBlock(setDeploys = Seq(deploy).some) +// private val b2 = getRandomBlock() +// private val b3 = getRandomBlock() +// +// "getListeningNameDataResponse" should "return error if depth more than max depth limit" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 2, createValidator.some) +// res <- blockApi.getListeningNameDataResponse(3, listeningName) +// } yield { +// res shouldBe 'left +// res.left.value shouldBe "Your request on getListeningName depth 3 exceed the max limit 2" +// +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// verifyNoMoreInteractions(rm) +// } +// } +// +// it should "return empty result if listening name deeper than expected" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// res <- blockApi.getListeningNameDataResponse(1, listeningName) +// } yield { +// res shouldBe 'right +// res.value shouldBe (Seq(), 0) +// +// bs.get(*) wasCalled twice +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// verifyNoMoreInteractions(rm) +// } +// } +// +// it should "return expected result if block falls within the specified depth" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// res <- blockApi.getListeningNameDataResponse(2, listeningName) +// } yield { +// res shouldBe 'right +// val (blocks, length) = res.value +// length shouldBe 1 +// val (par, block) = (blocks.head.postBlockData.head, blocks.head.block) +// +// par shouldBe resultData +// block shouldBe BlockApi.getLightBlockInfo(b1) +// +// rm.getData(*)(*) wasCalled once +// bs.get(*) wasCalled 3.times +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// verifyNoMoreInteractions(rm) +// } +// } +// +// it should "return expected result even if depth is greater than possible" in { +// implicit val (log, sp, rm, bs, bds) = createMocks[IO] +// for { +// blockApi <- createBlockApi[IO]("root", 50, createValidator.some) +// res <- blockApi.getListeningNameDataResponse(10, listeningName) +// } yield { +// res shouldBe 'right +// val (blocks, length) = res.value +// length shouldBe 1 +// val (par, block) = (blocks.head.postBlockData.head, blocks.head.block) +// +// par shouldBe resultData +// block shouldBe BlockApi.getLightBlockInfo(b1) +// +// rm.getData(*)(*) wasCalled once +// bs.get(*) wasCalled 3.times +// bds.getRepresentation wasCalled once +// +// verifyNoMoreInteractions(bs) +// verifyNoMoreInteractions(bds) +// verifyNoMoreInteractions(rm) +// } +// } +// +// def toMessage(m: BlockMessage): Message[BlockHash, Validator] = +// Message[BlockHash, Validator]( +// m.blockHash, +// m.blockNumber, +// m.sender, +// m.seqNum, +// m.bonds, +// m.justifications.toSet, +// Set.empty, +// Set(m.blockHash) +// ) +// +// private def createMocks[F[_]: Applicative] +// : (Log[F], Span[F], RuntimeManager[F], BlockStore[F], BlockDagStorage[F]) = { +// val log = mock[Log[F]] +// val sp = mock[Span[F]] +// +// val rm = mock[RuntimeManager[F]] +// rm.getData(*)(*) returnsF Seq(resultData) +// +// val bs = mock[BlockStore[F]] +// +// bs.get(*) answersF { (keys: Seq[BlockHash]) => +// Seq((keys.head match { +// case b1.blockHash => b1 +// case b2.blockHash => b2 +// case b3.blockHash => b3 +// }).some) +// } +// +// val bds = mock[BlockDagStorage[F]] +// +// val m1 = toMessage(b1) +// val m2 = toMessage(b2) +// val m3 = toMessage(b3) +// +// bds.getRepresentation returnsF DagRepresentation( +// Set(m1.id, m2.id, m3.id), +// Map(m1.id -> Set(m2.id, m3.id)), +// SortedMap(0L -> Set(m1.id), 1L -> Set(m2.id, m3.id)), +// new DagMessageState(Set(m2, m3), Map(m1.id -> m1, m2.id -> m2, m3.id -> m3)), +// Map.empty +// ) +// +// (log, sp, rm, bs, bds) +// } +//} diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala index dc66f65425b..54df05a3c8b 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala @@ -8,7 +8,6 @@ import coop.rchain.casper.util.ConstructDeploy import coop.rchain.crypto.signatures.Signed import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala index 0da1934329e..b59433ea9af 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala @@ -6,7 +6,6 @@ import coop.rchain.casper.helper.{BlockApiFixture, TestNode} import coop.rchain.casper.util.ConstructDeploy import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala index c3030151b4e..c64d428b165 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala @@ -7,7 +7,6 @@ import coop.rchain.casper.protocol.BlockMessage import coop.rchain.casper.util.ConstructDeploy import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala index 40d6f6f3998..7b449b543c0 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala @@ -5,7 +5,6 @@ import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.util.{ConstructDeploy, RSpaceUtil} import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala index c8258437e39..063e0e49991 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala @@ -12,7 +12,6 @@ import coop.rchain.rspace.ReportingRspace.ReportingComm import coop.rchain.shared.scalatestcontrib.effectTest import coop.rchain.store.InMemoryStoreManager import coop.rchain.rspace.syntax._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers @@ -20,6 +19,7 @@ import org.scalatest.matchers.should.Matchers class MultiParentCasperReportingSpec extends AnyFlatSpec with Matchers with Inspectors { import coop.rchain.casper.util.GenesisBuilder._ + import coop.rchain.shared.RChainScheduler._ implicit val timeEff: LogicalTime[Effect] = new LogicalTime[Effect] diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala index eba407989ff..1378ff0930e 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala @@ -11,7 +11,6 @@ import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.models.syntax._ import coop.rchain.shared.Base16 import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala index 01232b690e2..b76e7a57710 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala @@ -6,7 +6,6 @@ import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.util.ConstructDeploy import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala index 3dc20937896..0fc2c27d62e 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala @@ -2,7 +2,7 @@ package coop.rchain.casper.batch2 import cats.Applicative import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.{BlockDagStorage, DagMessageState, DagRepresentation} @@ -16,210 +16,211 @@ import coop.rchain.models.BlockHash.BlockHash import coop.rchain.models.syntax._ import coop.rchain.shared.Log import fs2.concurrent.Queue -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.mockito.captor.ArgCaptor import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} import org.scalatest.Assertion import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers +import cats.effect.testing.scalatest.AsyncIOSpec +import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap -class BlockReceiverEffectsSpec - extends AsyncFlatSpec - with MonixTaskTest - with Matchers - with Fs2StreamMatchers - with IdiomaticMockito - with IdiomaticMockitoCats - with ArgumentMatchersSugar { - implicit val logEff: Log[Task] = Log.log[Task] - - it should "pass correct block to output stream with calling effectful components" in - withEnv[Task]("root") { - case (incomingQueue, _, outStream, bs, br, bds) => - for { - block <- Sync[Task].delay(makeBlock()) - _ <- incomingQueue.enqueue1(block) - outList <- outStream.take(1).compile.toList - } yield { - bs.put(Seq((block.blockHash, block))) wasCalled once - bs.contains(Seq(block.blockHash)) wasCalled once - br.ackReceived(block.blockHash) wasCalled once - dagStorageWasNotModified(bds) - outList shouldBe List(block.blockHash) - } - } - - // Provided to BlockReceiver shard name ("test") is differ from block's shard name ("root" by default) - // So block should be rejected and output stream should never take block - it should "discard block with invalid shard name" in withEnv[Task]("test") { - case (incomingQueue, _, outStream, bs, br, bds) => - for { - block <- Sync[Task].delay(makeBlock()) - _ <- incomingQueue.enqueue1(block) - } yield { - bs.put(*) wasNever called - bs.contains(*) wasNever called - br.ackReceived(*) wasNever called - dagStorageWasNotModified(bds) - outStream should notEmit - } - } - - it should "discard block with invalid block hash" in withEnv[Task]("root") { - case (incomingQueue, _, outStream, bs, br, bds) => - for { - block <- Sync[Task].delay(makeBlock().copy(blockHash = "abc".unsafeHexToByteString)) - _ <- incomingQueue.enqueue1(block) - } yield { - bs.put(*) wasNever called - bs.contains(*) wasNever called - br.ackReceived(*) wasNever called - dagStorageWasNotModified(bds) - outStream should notEmit - } - } - - it should "discard block with invalid signature" in withEnv[Task]("root") { - case (incomingQueue, _, outStream, bs, br, bds) => - for { - block <- Sync[Task].delay(makeBlock().copy(sig = "abc".unsafeHexToByteString)) - _ <- incomingQueue.enqueue1(block) - } yield { - bs.put(*) wasNever called - bs.contains(*) wasNever called - br.ackReceived(*) wasNever called - dagStorageWasNotModified(bds) - outStream should notEmit - } - } - - it should "pass to output blocks with resolved dependencies" in withEnv[Task]("root") { - case (incomingQueue, validatedQueue, outStream, bs, br, bds) => - for { - // Received a parent with an empty list of justifications and its child - a1 <- Sync[Task].delay(makeBlock()) - a2 = makeBlock(List(a1.blockHash)) - - // Put the parent and child in the input queue - _ <- incomingQueue.enqueue1(a2) - _ <- incomingQueue.enqueue1(a1) - - // Dependencies of the child (its parent) have not yet been resolved, - // so only the parent goes to the output queue, since it has no dependencies - a1InOutQueue <- outStream.take(1).compile.lastOrError - - // A1 is now validated (e.g. in BlockProcessor) - _ <- validatedQueue.enqueue1(a1) - - // All dependencies of child A2 are resolved, so it also goes to the output queue - a2InOutQueue <- outStream.take(1).compile.lastOrError - } yield { - bs.put(Seq((a1.blockHash, a1))) wasCalled once - bs.put(Seq((a2.blockHash, a2))) wasCalled once - - val bsContainsCaptor = ArgCaptor[Seq[BlockHash]] - bs.contains(bsContainsCaptor) wasCalled 4.times - bsContainsCaptor.values should contain allOf (Seq(a1.blockHash), Seq(a2.blockHash)) - - br.ackReceived(a1.blockHash) wasCalled once - br.ackReceived(a2.blockHash) wasCalled once - - dagStorageWasNotModified(bds) - a1InOutQueue shouldBe a1.blockHash - a2InOutQueue shouldBe a2.blockHash - } - } - - private def blockDagStorageMock[F[_]: Applicative]: BlockDagStorage[F] = { - val emptyDag = DagRepresentation(Set(), Map(), SortedMap(), DagMessageState(), Map()) - mock[BlockDagStorage[F]].getRepresentation returnsF emptyDag - } - - private def blockRetrieverMock[F[_]: Applicative]: BlockRetriever[F] = { - val brMock = mock[BlockRetriever[F]] - brMock.ackReceived(*) returns ().pure[F] - brMock.admitHash(*, *, *) returnsF AdmitHashResult( - Ignore, - broadcastRequest = false, - requestBlock = false - ) - brMock - } - - private def blockStoreMock[F[_]: Sync]: BlockStore[F] = { - val state = Ref.unsafe[F, Map[BlockHash, BlockMessage]](Map()) - val bsMock = mock[BlockStore[F]] - bsMock.contains(*) answers { keys: Seq[BlockHash] => - state.get.map(s => Seq(s.contains(keys.head))) - } - bsMock.put(*) answers { kvPairs: Seq[(BlockHash, BlockMessage)] => - state.update(s => kvPairs.foldLeft(s) { case (acc, item) => acc + item }) - } - bsMock - } - - import fs2._ - - private def withEnv[F[_]: Concurrent: Log](shardId: String)( - f: ( - Queue[F, BlockMessage], - Queue[F, BlockMessage], - Stream[F, BlockHash], - BlockStore[F], - BlockRetriever[F], - BlockDagStorage[F] - ) => F[Assertion] - ): F[Assertion] = - for { - state <- Ref[F].of(BlockReceiverState[BlockHash]) - incomingBlockQueue <- Queue.unbounded[F, BlockMessage] - incomingBlockStream = incomingBlockQueue.dequeue - validatedBlocksQueue <- Queue.unbounded[F, BlockMessage] - validatedBlocksStream = validatedBlocksQueue.dequeue - - // Create mock separately for each test - bs = blockStoreMock[F] - br = blockRetrieverMock[F] - bds = blockDagStorageMock[F] - - blockReceiver <- { - implicit val (bsImp, brImp, bdsImp) = (bs, br, bds) - BlockReceiver( - state, - incomingBlockStream, - validatedBlocksStream, - shardId, - incomingBlockQueue.enqueue1 - ) - } - res <- f(incomingBlockQueue, validatedBlocksQueue, blockReceiver, bs, br, bds) - } yield res - - private def makeDefaultBlock = - BlockMessage - .from( - BlockMessageProto( - shardId = "root", - postStateHash = "abc".unsafeHexToByteString, - sigAlgorithm = Secp256k1.name - ) - ) - .right - .get - - private def makeBlock(justifications: List[BlockHash] = List()): BlockMessage = { - val (privateKey, pubKey) = Secp256k1.newKeyPair - val block = - makeDefaultBlock.copy(sender = pubKey.bytes.toByteString, justifications = justifications) - ValidatorIdentity(privateKey).signBlock(block) - } - - private def dagStorageWasNotModified[F[_]](bds: BlockDagStorage[F]) = { - bds.insert(*, *) wasNever called - bds.addDeploy(*) wasNever called - } -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class BlockReceiverEffectsSpec +// extends AsyncFlatSpec +// with AsyncIOSpec +// with Matchers +// with Fs2StreamMatchers +// with IdiomaticMockito +// with IdiomaticMockitoCats +// with ArgumentMatchersSugar { +// implicit val logEff: Log[IO] = Log.log[IO] +// +// it should "pass correct block to output stream with calling effectful components" in +// withEnv[IO]("root") { +// case (incomingQueue, _, outStream, bs, br, bds) => +// for { +// block <- IO.delay(makeBlock()) +// _ <- incomingQueue.enqueue1(block) +// outList <- outStream.take(1).compile.toList +// } yield { +// bs.put(Seq((block.blockHash, block))) wasCalled once +// bs.contains(Seq(block.blockHash)) wasCalled once +// br.ackReceived(block.blockHash) wasCalled once +// dagStorageWasNotModified(bds) +// outList shouldBe List(block.blockHash) +// } +// } +// +// // Provided to BlockReceiver shard name ("test") is differ from block's shard name ("root" by default) +// // So block should be rejected and output stream should never take block +// it should "discard block with invalid shard name" in withEnv[IO]("test") { +// case (incomingQueue, _, outStream, bs, br, bds) => +// for { +// block <- IO.delay(makeBlock()) +// _ <- incomingQueue.enqueue1(block) +// } yield { +// bs.put(*) wasNever called +// bs.contains(*) wasNever called +// br.ackReceived(*) wasNever called +// dagStorageWasNotModified(bds) +// outStream should notEmit +// } +// } +// +// it should "discard block with invalid block hash" in withEnv[IO]("root") { +// case (incomingQueue, _, outStream, bs, br, bds) => +// for { +// block <- IO.delay(makeBlock().copy(blockHash = "abc".unsafeHexToByteString)) +// _ <- incomingQueue.enqueue1(block) +// } yield { +// bs.put(*) wasNever called +// bs.contains(*) wasNever called +// br.ackReceived(*) wasNever called +// dagStorageWasNotModified(bds) +// outStream should notEmit +// } +// } +// +// it should "discard block with invalid signature" in withEnv[IO]("root") { +// case (incomingQueue, _, outStream, bs, br, bds) => +// for { +// block <- IO.delay(makeBlock().copy(sig = "abc".unsafeHexToByteString)) +// _ <- incomingQueue.enqueue1(block) +// } yield { +// bs.put(*) wasNever called +// bs.contains(*) wasNever called +// br.ackReceived(*) wasNever called +// dagStorageWasNotModified(bds) +// outStream should notEmit +// } +// } +// +// it should "pass to output blocks with resolved dependencies" in withEnv[IO]("root") { +// case (incomingQueue, validatedQueue, outStream, bs, br, bds) => +// for { +// // Received a parent with an empty list of justifications and its child +// a1 <- IO.delay(makeBlock()) +// a2 = makeBlock(List(a1.blockHash)) +// +// // Put the parent and child in the input queue +// _ <- incomingQueue.enqueue1(a2) +// _ <- incomingQueue.enqueue1(a1) +// +// // Dependencies of the child (its parent) have not yet been resolved, +// // so only the parent goes to the output queue, since it has no dependencies +// a1InOutQueue <- outStream.take(1).compile.lastOrError +// +// // A1 is now validated (e.g. in BlockProcessor) +// _ <- validatedQueue.enqueue1(a1) +// +// // All dependencies of child A2 are resolved, so it also goes to the output queue +// a2InOutQueue <- outStream.take(1).compile.lastOrError +// } yield { +// bs.put(Seq((a1.blockHash, a1))) wasCalled once +// bs.put(Seq((a2.blockHash, a2))) wasCalled once +// +// val bsContainsCaptor = ArgCaptor[Seq[BlockHash]] +// bs.contains(bsContainsCaptor) wasCalled 4.times +// bsContainsCaptor.values should contain allOf (Seq(a1.blockHash), Seq(a2.blockHash)) +// +// br.ackReceived(a1.blockHash) wasCalled once +// br.ackReceived(a2.blockHash) wasCalled once +// +// dagStorageWasNotModified(bds) +// a1InOutQueue shouldBe a1.blockHash +// a2InOutQueue shouldBe a2.blockHash +// } +// } +// +// private def blockDagStorageMock[F[_]: Applicative]: BlockDagStorage[F] = { +// val emptyDag = DagRepresentation(Set(), Map(), SortedMap(), DagMessageState(), Map()) +// mock[BlockDagStorage[F]].getRepresentation returnsF emptyDag +// } +// +// private def blockRetrieverMock[F[_]: Applicative]: BlockRetriever[F] = { +// val brMock = mock[BlockRetriever[F]] +// brMock.ackReceived(*) returns ().pure[F] +// brMock.admitHash(*, *, *) returnsF AdmitHashResult( +// Ignore, +// broadcastRequest = false, +// requestBlock = false +// ) +// brMock +// } +// +// private def blockStoreMock[F[_]: Sync]: BlockStore[F] = { +// val state = Ref.unsafe[F, Map[BlockHash, BlockMessage]](Map()) +// val bsMock = mock[BlockStore[F]] +// bsMock.contains(*) answers { keys: Seq[BlockHash] => +// state.get.map(s => Seq(s.contains(keys.head))) +// } +// bsMock.put(*) answers { kvPairs: Seq[(BlockHash, BlockMessage)] => +// state.update(s => kvPairs.foldLeft(s) { case (acc, item) => acc + item }) +// } +// bsMock +// } +// +// import fs2._ +// +// private def withEnv[F[_]: Concurrent: Log](shardId: String)( +// f: ( +// Queue[F, BlockMessage], +// Queue[F, BlockMessage], +// Stream[F, BlockHash], +// BlockStore[F], +// BlockRetriever[F], +// BlockDagStorage[F] +// ) => F[Assertion] +// ): F[Assertion] = +// for { +// state <- Ref[F].of(BlockReceiverState[BlockHash]) +// incomingBlockQueue <- Queue.unbounded[F, BlockMessage] +// incomingBlockStream = incomingBlockQueue.dequeue +// validatedBlocksQueue <- Queue.unbounded[F, BlockMessage] +// validatedBlocksStream = validatedBlocksQueue.dequeue +// +// // Create mock separately for each test +// bs = blockStoreMock[F] +// br = blockRetrieverMock[F] +// bds = blockDagStorageMock[F] +// +// blockReceiver <- { +// implicit val (bsImp, brImp, bdsImp) = (bs, br, bds) +// BlockReceiver( +// state, +// incomingBlockStream, +// validatedBlocksStream, +// shardId, +// incomingBlockQueue.enqueue1 +// ) +// } +// res <- f(incomingBlockQueue, validatedBlocksQueue, blockReceiver, bs, br, bds) +// } yield res +// +// private def makeDefaultBlock = +// BlockMessage +// .from( +// BlockMessageProto( +// shardId = "root", +// postStateHash = "abc".unsafeHexToByteString, +// sigAlgorithm = Secp256k1.name +// ) +// ) +// .right +// .get +// +// private def makeBlock(justifications: List[BlockHash] = List()): BlockMessage = { +// val (privateKey, pubKey) = Secp256k1.newKeyPair +// val block = +// makeDefaultBlock.copy(sender = pubKey.bytes.toByteString, justifications = justifications) +// ValidatorIdentity(privateKey).signBlock(block) +// } +// +// private def dagStorageWasNotModified[F[_]](bds: BlockDagStorage[F]) = { +// bds.insert(*, *) wasNever called +// bds.addDeploy(*) wasNever called +// } +//} diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala index 9371a0e0ad4..f04097274d7 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala @@ -1,19 +1,19 @@ package coop.rchain.casper.batch2 +import cats.effect.IO import cats.instances.list._ import cats.syntax.traverse._ import coop.rchain.casper.helper.TestNode import coop.rchain.casper.util.ConstructDeploy.basicDeployData import coop.rchain.casper.util.GenesisBuilder.buildGenesis import coop.rchain.p2p.EffectsTestInstances.LogicalTime -import monix.eval.Task import monix.execution.Scheduler import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class LimitedParentDepthSpec extends AnyFlatSpec with Matchers { implicit val scheduler = Scheduler.fixedPool("limited-parent-depth-scheduler", 2) - implicit val timeEff = new LogicalTime[Task] + implicit val timeEff = new LogicalTime[IO] val genesisContext = buildGenesis() @@ -21,7 +21,7 @@ class LimitedParentDepthSpec extends AnyFlatSpec with Matchers { TestNode.networkEff(genesisContext, networkSize = 2, maxParentDepth = None).use { case nodes @ n1 +: n2 +: Seq() => for { - produceDeploys <- (0 until 6).toList.traverse(i => basicDeployData[Task](i)) + produceDeploys <- (0 until 6).toList.traverse(i => basicDeployData[IO](i)) b1 <- n1.propagateBlock(produceDeploys(0))() b2 <- n2.propagateBlock(produceDeploys(1))(nodes: _*) diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala index e46bd65dc9d..e764b43a551 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala @@ -1,12 +1,10 @@ package coop.rchain.casper.batch2 import java.nio.file.Files - -import cats.effect.Concurrent +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import coop.rchain.shared.Log import coop.rchain.store.{KeyValueStoreSut, LmdbStoreManager} -import monix.eval.Task import org.scalacheck.{Arbitrary, Gen} import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AnyFlatSpec @@ -46,35 +44,36 @@ class LmdbKeyValueStoreSpec Gen.listOfN(2000, arbKV).map(_.toMap) } - implicit val log: Log[Task] = new Log.NOPLog[Task]() + implicit val log: Log[IO] = new Log.NOPLog[IO]() + import coop.rchain.shared.RChainScheduler._ it should "put and get data from the store" in { forAll(genData) { expected => - val test = withSut[Task] { sut => + val test = withSut[IO] { sut => for { result <- sut.testPutGet(expected) } yield result shouldBe expected } - test.runSyncUnsafe() + test.unsafeRunSync } } it should "put and get all data from the store" in { forAll(genData) { expected => - val test = withSut[Task] { sut => + val test = withSut[IO] { sut => for { result <- sut.testPutIterate(expected) } yield result shouldBe expected } - test.runSyncUnsafe() + test.unsafeRunSync } } it should "not have deleted keys in the store" in { forAll(genData) { input => - val test = withSut[Task] { sut => + val test = withSut[IO] { sut => val allKeys = input.keysIterator.toVector // Take some keys for deletion val (getKeys, deleteKeys) = allKeys.splitAt(allKeys.size / 2) @@ -87,7 +86,7 @@ class LmdbKeyValueStoreSpec } yield result shouldBe expected } - test.runSyncUnsafe() + test.unsafeRunSync } } diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala index 720eb6bd3b7..07f52a93bce 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala @@ -9,7 +9,6 @@ import coop.rchain.casper.util.GenesisBuilder.buildGenesis import coop.rchain.crypto.signatures.Signed import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib.effectTest -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala index f3e937780e8..c207f3b8b13 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.batch2 -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -27,8 +27,6 @@ import coop.rchain.p2p.EffectsTestInstances.LogStub import coop.rchain.rspace.syntax._ import coop.rchain.shared.Time import coop.rchain.shared.scalatestcontrib._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -47,11 +45,13 @@ class ValidateTest import InvalidBlock._ import ValidBlock._ - implicit val log: LogStub[Task] = new LogStub[Task] - private val SHARD_ID = "root-shard" - implicit val span: Span[Task] = NoopSpan[Task]() - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP[Task]() - implicit val s = Sync[Task] + implicit val log: LogStub[IO] = new LogStub + private val SHARD_ID = "root-shard" + implicit val span: Span[IO] = NoopSpan[IO]() + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val s = Sync[IO] + + import coop.rchain.shared.RChainScheduler._ override def beforeEach(): Unit = { log.reset() @@ -103,7 +103,7 @@ class ValidateTest def signedBlock( chain: Vector[BlockMessage], i: Int - )(implicit sk: PrivateKey, blockDagStorage: BlockDagStorage[Task]): Task[BlockMessage] = { + )(implicit sk: PrivateKey, blockDagStorage: BlockDagStorage[IO]): IO[BlockMessage] = { val pk = Secp256k1.toPublic(sk) val block = chain(i) for { @@ -117,17 +117,17 @@ class ValidateTest "Block signature validation" should "return false on unknown algorithms" in withStorage { implicit blockStore => implicit blockDagStorage => for { - chain <- createChain[Task](2) + chain <- createChain[IO](2) unknownAlgorithm = "unknownAlgorithm" rsa = "RSA" block0 = chain(0).copy(sigAlgorithm = unknownAlgorithm) block1 = chain(1).copy(sigAlgorithm = rsa) - _ <- Validate.blockSignature[Task](block0) shouldBeF false + _ <- Validate.blockSignature[IO](block0) shouldBeF false _ = log.warns.last .contains(s"signature algorithm $unknownAlgorithm is unsupported") should be( true ) - _ <- Validate.blockSignature[Task](block1) shouldBeF false + _ <- Validate.blockSignature[IO](block1) shouldBeF false result = log.warns.last.contains(s"signature algorithm $rsa is unsupported") should be(true) } yield result } @@ -136,7 +136,7 @@ class ValidateTest implicit blockStore => implicit blockDagStorage => implicit val (sk, _) = Secp256k1.newKeyPair for { - chain <- createChain[Task](6) + chain <- createChain[IO](6) (_, wrongPk) = Secp256k1.newKeyPair empty = ByteString.EMPTY invalidKey = "abcdef1234567890".unsafeHexToByteString @@ -147,7 +147,7 @@ class ValidateTest block4 <- signedBlock(chain, 4).map(_.copy(sig = invalidKey)) block5 <- signedBlock(chain, 5).map(_.copy(sig = block0.sig)) //wrong sig blocks = Vector(block0, block1, block2, block3, block4, block5) - _ <- blocks.existsM[Task](Validate.blockSignature[Task]) shouldBeF false + _ <- blocks.existsM[IO](Validate.blockSignature[IO]) shouldBeF false _ = log.warns.size should be(blocks.length) result = log.warns.forall(_.contains("signature is invalid")) should be(true) } yield result @@ -158,12 +158,12 @@ class ValidateTest val n = 6 implicit val (sk, pk) = Secp256k1.newKeyPair for { - chain <- createChain[Task](n) - condition <- (0 until n).toList.forallM[Task] { i => + chain <- createChain[IO](n) + condition <- (0 until n).toList.forallM[IO] { i => val chainWithSender = chain.map(_.copy(sender = pk.bytes.toByteString)) for { block <- signedBlock(chainWithSender, i) - result <- Validate.blockSignature[Task](block) + result <- Validate.blockSignature[IO](block) } yield result } _ = condition should be(true) @@ -174,13 +174,13 @@ class ValidateTest "Block number validation" should "only accept 0 as the number for a block with no parents" in withStorage { implicit blockStore => implicit blockDagStorage => for { - chain <- createChain[Task](1) + chain <- createChain[IO](1) block = chain(0) _ <- Validate - .blockNumber[Task](block.copy(blockNumber = 1)) shouldBeF Left( + .blockNumber[IO](block.copy(blockNumber = 1)) shouldBeF Left( InvalidBlockNumber ) - _ <- Validate.blockNumber[Task](block) shouldBeF Right(Valid) + _ <- Validate.blockNumber[IO](block) shouldBeF Right(Valid) _ = log.warns.size should be(1) result = log.warns.head.contains("not zero, but block has no parents") should be(true) } yield result @@ -189,13 +189,13 @@ class ValidateTest it should "return false for non-sequential numbering" in withStorage { implicit blockStore => implicit blockDagStorage => for { - chain <- createChain[Task](2) + chain <- createChain[IO](2) block = chain(1) _ <- Validate - .blockNumber[Task](block.copy(blockNumber = 17)) shouldBeF Left( + .blockNumber[IO](block.copy(blockNumber = 17)) shouldBeF Left( InvalidBlockNumber ) - _ <- Validate.blockNumber[Task](block) shouldBeF Right(Valid) + _ <- Validate.blockNumber[IO](block) shouldBeF Right(Valid) _ = log.warns.size should be(1) result = log.warns.head.contains("is not one more than maximum parent number") should be( true @@ -207,9 +207,9 @@ class ValidateTest implicit blockStore => implicit blockDagStorage => val n = 6 for { - chain <- createChain[Task](n) - _ <- chain.forallM[Task] { b => - Validate.blockNumber[Task](b).map(_ == Right(Valid)) + chain <- createChain[IO](n) + _ <- chain.forallM[IO] { b => + Validate.blockNumber[IO](b).map(_ == Right(Valid)) } shouldBeF true result = log.warns should be(Nil) } yield result @@ -220,7 +220,7 @@ class ValidateTest def createBlockWithNumber( validator: Validator, parents: Seq[BlockMessage] = Nil - ): Task[BlockMessage] = + ): IO[BlockMessage] = for { block <- createValidatorBlock(parents, validator, Map(), shardId = "") } yield block @@ -228,15 +228,15 @@ class ValidateTest def genSender(id: Int) = List.fill(65)(id.toByte).toArray.toByteString for { - genesis <- createChain[Task](8) // Note we need to create a useless chain to satisfy the assert in TopoSort + genesis <- createChain[IO](8) // Note we need to create a useless chain to satisfy the assert in TopoSort v1 = genSender(1) v2 = genSender(2) b1 <- createBlockWithNumber(v1) b2 <- createBlockWithNumber(v2) b3 <- createBlockWithNumber(v2, Seq(b1, b2)) - s1 <- Validate.blockNumber[Task](b3) + s1 <- Validate.blockNumber[IO](b3) _ = s1 shouldBe Right(Valid) - s2 <- Validate.blockNumber[Task](b3.copy(blockNumber = 4)) + s2 <- Validate.blockNumber[IO](b3.copy(blockNumber = 4)) _ = s2 shouldBe Left(InvalidBlockNumber) } yield () } @@ -244,17 +244,17 @@ class ValidateTest "Future deploy validation" should "work" in withStorage { implicit blockStore => implicit blockDagStorage => for { - deploy <- ConstructDeploy.basicProcessedDeploy[Task](0) + deploy <- ConstructDeploy.basicProcessedDeploy[IO](0) deployData = deploy.deploy.data updatedDeployData = Signed( deployData.copy(validAfterBlockNumber = -1), Secp256k1, ConstructDeploy.defaultSec ) - block <- createGenesis[Task]( + block <- createGenesis[IO]( deploys = Seq(deploy.copy(deploy = updatedDeployData)) ) - status <- Validate.futureTransaction[Task](block) + status <- Validate.futureTransaction[IO](block) _ = status should be(Right(Valid)) } yield () } @@ -262,17 +262,17 @@ class ValidateTest "Future deploy validation" should "not accept blocks with a deploy for a future block number" in withStorage { implicit blockStore => implicit blockDagStorage => for { - deploy <- ConstructDeploy.basicProcessedDeploy[Task](0) + deploy <- ConstructDeploy.basicProcessedDeploy[IO](0) deployData = deploy.deploy.data updatedDeployData = Signed( deployData.copy(validAfterBlockNumber = Long.MaxValue), Secp256k1, ConstructDeploy.defaultSec ) - blockWithFutureDeploy <- createGenesis[Task]( + blockWithFutureDeploy <- createGenesis[IO]( deploys = Seq(deploy.copy(deploy = updatedDeployData)) ) - status <- Validate.futureTransaction[Task](blockWithFutureDeploy) + status <- Validate.futureTransaction[IO](blockWithFutureDeploy) _ = status should be(Left(ContainsFutureDeploy)) } yield () } @@ -280,11 +280,11 @@ class ValidateTest "Deploy expiration validation" should "work" in withStorage { implicit blockStore => implicit blockDagStorage => for { - deploy <- ConstructDeploy.basicProcessedDeploy[Task](0) - block <- createGenesis[Task]( + deploy <- ConstructDeploy.basicProcessedDeploy[IO](0) + block <- createGenesis[IO]( deploys = Seq(deploy) ) - status <- Validate.transactionExpiration[Task](block, expirationThreshold = 10) + status <- Validate.transactionExpiration[IO](block, expirationThreshold = 10) _ = status should be(Right(Valid)) } yield () } @@ -292,18 +292,18 @@ class ValidateTest "Deploy expiration validation" should "not accept blocks with a deploy that is expired" in withStorage { implicit blockStore => implicit blockDagStorage => for { - deploy <- ConstructDeploy.basicProcessedDeploy[Task](0) + deploy <- ConstructDeploy.basicProcessedDeploy[IO](0) deployData = deploy.deploy.data updatedDeployData = Signed( deployData.copy(validAfterBlockNumber = Long.MinValue), Secp256k1, ConstructDeploy.defaultSec ) - blockWithExpiredDeploy <- createGenesis[Task]( + blockWithExpiredDeploy <- createGenesis[IO]( deploys = Seq(deploy.copy(deploy = updatedDeployData)) ) status <- Validate - .transactionExpiration[Task](blockWithExpiredDeploy, expirationThreshold = 10) + .transactionExpiration[IO](blockWithExpiredDeploy, expirationThreshold = 10) _ = status should be(Left(ContainsExpiredDeploy)) } yield () } @@ -311,10 +311,10 @@ class ValidateTest it should "return false for non-sequential numbering" in withStorage { implicit blockStore => implicit blockDagStorage => for { - chain <- createChain[Task](2) + chain <- createChain[IO](2) block = chain(1) _ <- Validate - .sequenceNumber[Task](block.copy(seqNum = 1)) shouldBeF Left( + .sequenceNumber[IO](block.copy(seqNum = 1)) shouldBeF Left( InvalidSequenceNumber ) result = log.warns.size should be(1) @@ -326,11 +326,11 @@ class ValidateTest val n = 20 val validatorCount = 3 for { - chain <- createChainWithRoundRobinValidators[Task](n, validatorCount) - _ <- chain.forallM[Task]( + chain <- createChainWithRoundRobinValidators[IO](n, validatorCount) + _ <- chain.forallM[IO]( block => for { - result <- Validate.sequenceNumber[Task](block) + result <- Validate.sequenceNumber[IO](block) } yield result == Right(Valid) ) shouldBeF true result = log.warns should be(Nil) @@ -340,21 +340,21 @@ class ValidateTest "Repeat deploy validation" should "return valid for empty blocks" in withStorage { implicit blockStore => implicit blockDagStorage => for { - chain <- createChain[Task](2) + chain <- createChain[IO](2) block = chain(0) block2 = chain(1) - _ <- Validate.repeatDeploy[Task](block, 50) shouldBeF Right(Valid) - _ <- Validate.repeatDeploy[Task](block2, 50) shouldBeF Right(Valid) + _ <- Validate.repeatDeploy[IO](block, 50) shouldBeF Right(Valid) + _ <- Validate.repeatDeploy[IO](block2, 50) shouldBeF Right(Valid) } yield () } it should "not accept blocks with a repeated deploy" in withStorage { implicit blockStore => implicit blockDagStorage => for { - deploy <- ConstructDeploy.basicProcessedDeploy[Task](0) - genesis <- createGenesis[Task](deploys = Seq(deploy)) - block1 <- createBlock[Task](justifications = Seq(genesis.blockHash), deploys = Seq(deploy)) - _ <- Validate.repeatDeploy[Task](block1, 50) shouldBeF Left( + deploy <- ConstructDeploy.basicProcessedDeploy[IO](0) + genesis <- createGenesis[IO](deploys = Seq(deploy)) + block1 <- createBlock[IO](justifications = Seq(genesis.blockHash), deploys = Seq(deploy)) + _ <- Validate.repeatDeploy[IO](block1, 50) shouldBeF Left( InvalidRepeatDeploy ) } yield () @@ -367,14 +367,14 @@ class ValidateTest "Block summary validation" should "short circuit after first invalidity" in withStorage { implicit blockStore => implicit blockDagStorage => for { - chain <- createChain[Task](2) + chain <- createChain[IO](2) block = chain(1) (sk, pk) = Secp256k1.newKeyPair signedBlock = ValidatorIdentity(sk).signBlock( block.copy(blockNumber = 17).copy(seqNum = 1) ) - _ <- Validate.blockSummary[Task]( + _ <- Validate.blockSummary[IO]( signedBlock, "root", Int.MaxValue @@ -392,15 +392,15 @@ class ValidateTest }.toMap for { - b0 <- createGenesis[Task](bonds = bonds) - b1 <- createValidatorBlock[Task](Seq(b0), v0, bonds, shardId = SHARD_ID) - b2 <- createValidatorBlock[Task](Seq(b1, b0), v0, bonds, shardId = SHARD_ID) - b3 <- createValidatorBlock[Task](Seq(b2, b0), v1, bonds, shardId = SHARD_ID) - b4 <- createValidatorBlock[Task](Seq(b2, b3), v1, bonds, shardId = SHARD_ID) - _ <- List(b0, b1, b2, b3, b4).forallM[Task]( + b0 <- createGenesis[IO](bonds = bonds) + b1 <- createValidatorBlock[IO](Seq(b0), v0, bonds, shardId = SHARD_ID) + b2 <- createValidatorBlock[IO](Seq(b1, b0), v0, bonds, shardId = SHARD_ID) + b3 <- createValidatorBlock[IO](Seq(b2, b0), v1, bonds, shardId = SHARD_ID) + b4 <- createValidatorBlock[IO](Seq(b2, b3), v1, bonds, shardId = SHARD_ID) + _ <- List(b0, b1, b2, b3, b4).forallM[IO]( block => for { - result <- Validate.justificationRegressions[Task](block) + result <- Validate.justificationRegressions[IO](block) } yield result == Right(Valid) ) shouldBeF true // The justification block for validator 0 should point to b2 or above. @@ -410,7 +410,7 @@ class ValidateTest setJustifications = justificationsWithRegression.some, hashF = (ProtoUtil.hashBlock _).some ) - result <- Validate.justificationRegressions[Task](blockWithJustificationRegression) shouldBeF + result <- Validate.justificationRegressions[IO](blockWithJustificationRegression) shouldBeF Left(JustificationRegression) } yield result } @@ -424,12 +424,12 @@ class ValidateTest }.toMap for { - b0 <- createGenesis[Task](bonds = bonds) - b1 <- createValidatorBlock[Task](Seq(b0), v0, bonds, 1, shardId = SHARD_ID) - b2 <- createValidatorBlock[Task](Seq(b1, b0), v1, bonds, 1, shardId = SHARD_ID) - b3 <- createValidatorBlock[Task](Seq(b1, b2), v0, bonds, 2, shardId = SHARD_ID) - b4 <- createValidatorBlock[Task](Seq(b3, b2), v1, bonds, 2, shardId = SHARD_ID) - b5 <- createValidatorBlock[Task]( + b0 <- createGenesis[IO](bonds = bonds) + b1 <- createValidatorBlock[IO](Seq(b0), v0, bonds, 1, shardId = SHARD_ID) + b2 <- createValidatorBlock[IO](Seq(b1, b0), v1, bonds, 1, shardId = SHARD_ID) + b3 <- createValidatorBlock[IO](Seq(b1, b2), v0, bonds, 2, shardId = SHARD_ID) + b4 <- createValidatorBlock[IO](Seq(b3, b2), v1, bonds, 2, shardId = SHARD_ID) + b5 <- createValidatorBlock[IO]( Seq(b3, b4), v0, bonds, @@ -443,7 +443,7 @@ class ValidateTest setValidator = v1.some, setJustifications = justificationsWithInvalidBlock.some ) - _ <- Validate.justificationRegressions[Task](blockWithInvalidJustification) shouldBeF + _ <- Validate.justificationRegressions[IO](blockWithInvalidJustification) shouldBeF Right(Valid) } yield () } @@ -455,25 +455,26 @@ class ValidateTest val storageDirectory = Files.createTempDirectory(s"hash-set-casper-test-genesis-") for { - kvm <- mkTestRNodeStoreManager[Task](storageDirectory) + kvm <- mkTestRNodeStoreManager[IO](storageDirectory) rStore <- kvm.rSpaceStores mStore <- RuntimeManager.mergeableStore(kvm) - runtimeManager <- RuntimeManager[Task]( + runtimeManager <- RuntimeManager[IO]( rStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName( genesis.shardId ), - RuntimeManager.noOpExecutionTracker[Task] + RuntimeManager.noOpExecutionTracker[IO], + rholangEC ) result <- { implicit val rm = runtimeManager for { - _ <- InterpreterUtil.validateBlockCheckpointLegacy[Task](genesis) - _ <- Validate.bondsCache[Task](genesis) shouldBeF Right(Valid) + _ <- InterpreterUtil.validateBlockCheckpointLegacy[IO](genesis) + _ <- Validate.bondsCache[IO](genesis) shouldBeF Right(Valid) modifiedBonds = Map.empty[Validator, Long] modifiedGenesis = genesis.copy(bonds = modifiedBonds) - result <- Validate.bondsCache[Task](modifiedGenesis) shouldBeF Left(InvalidBondsCache) + result <- Validate.bondsCache[IO](modifiedGenesis) shouldBeF Left(InvalidBondsCache) } yield result } } yield result @@ -489,12 +490,12 @@ class ValidateTest seqNum = getLatestSeqNum(sender, dag) + 1L genesis = ValidatorIdentity(sk) .signBlock(context.genesisBlock.copy(seqNum = seqNum)) - _ <- Validate.formatOfFields[Task](genesis) shouldBeF true - _ <- Validate.formatOfFields[Task](genesis.copy(blockHash = ByteString.EMPTY)) shouldBeF false - _ <- Validate.formatOfFields[Task](genesis.copy(sig = ByteString.EMPTY)) shouldBeF false - _ <- Validate.formatOfFields[Task](genesis.copy(sigAlgorithm = "")) shouldBeF false - _ <- Validate.formatOfFields[Task](genesis.copy(shardId = "")) shouldBeF false - _ <- Validate.formatOfFields[Task]( + _ <- Validate.formatOfFields[IO](genesis) shouldBeF true + _ <- Validate.formatOfFields[IO](genesis.copy(blockHash = ByteString.EMPTY)) shouldBeF false + _ <- Validate.formatOfFields[IO](genesis.copy(sig = ByteString.EMPTY)) shouldBeF false + _ <- Validate.formatOfFields[IO](genesis.copy(sigAlgorithm = "")) shouldBeF false + _ <- Validate.formatOfFields[IO](genesis.copy(shardId = "")) shouldBeF false + _ <- Validate.formatOfFields[IO]( genesis.copy(postStateHash = ByteString.EMPTY) ) shouldBeF false } yield () @@ -508,14 +509,14 @@ class ValidateTest val blockValidHash = block.copy(blockHash = hash) // Test valid block hash - val hashValid = Validate.blockHash[Task](blockValidHash).runSyncUnsafe() + val hashValid = Validate.blockHash[IO](blockValidHash).unsafeRunSync hashValid shouldBe true val blockInValidHash = block.copy(blockHash = ByteString.copyFromUtf8("123")) // Test invalid block hash - val hashInValid = Validate.blockHash[Task](blockInValidHash).runSyncUnsafe() + val hashInValid = Validate.blockHash[IO](blockInValidHash).unsafeRunSync hashInValid shouldBe false } @@ -530,7 +531,7 @@ class ValidateTest // Expected one of hard-coded block versions supported by this version of RNode software val expectedValid = BlockVersion.Supported.contains(version) // Actual validation - val actualValid = Validate.version[Task](blockWithVersion).runSyncUnsafe() + val actualValid = Validate.version[IO](blockWithVersion).unsafeRunSync actualValid shouldBe expectedValid } diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala index 6f80f561cd7..c6d1079af17 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.engine import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, Timer} +import cats.effect.{Concurrent, IO, Timer} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.engine.LfsBlockRequester.ST @@ -12,7 +12,6 @@ import coop.rchain.models.blockImplicits import coop.rchain.shared.Log import fs2.Stream import fs2.concurrent.Queue -import monix.eval.Task import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -20,6 +19,8 @@ import scala.concurrent.duration._ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2StreamMatchers { + import coop.rchain.shared.RChainScheduler._ + def mkHash(s: String) = ByteString.copyFromUtf8(s) def getBlock(hash: BlockHash, number: Long, latestMessages: Seq[BlockHash]) = { @@ -135,9 +136,7 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str } yield () } - implicit val logEff: Log[Task] = Log.log[Task] - - import monix.execution.Scheduler.Implicits.global + implicit val logEff: Log[IO] = Log.log[IO] /** * Test runner @@ -152,11 +151,11 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str startBlock: BlockMessage, runProcessingStream: Boolean = true, requestTimeout: FiniteDuration = 10.days - )(test: Mock[Task] => Task[Unit]): Unit = - createMock[Task](startBlock, requestTimeout) { mock => + )(test: Mock[IO] => IO[Unit]): Unit = + createMock[IO](startBlock, requestTimeout) { mock => if (!runProcessingStream) test(mock) else (Stream.eval(test(mock)) concurrently mock.stream).compile.drain - }.runSyncUnsafe(timeout = 10.seconds) + }.unsafeRunTimed(10.seconds) def asMap(bs: BlockMessage*): Map[BlockHash, BlockMessage] = bs.map(b => (b.blockHash, b)).toMap @@ -373,8 +372,8 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * NOTE: We don't have any abstraction to test time in execution (with monix Task or cats IO). * We have LogicalTime and DiscreteTime which are just wrappers to get different "milliseconds" but are totally - * disconnected from Task/IO execution notion of time (e.g. Task.sleep). - * Other testing instances of Time are the same as in normal node execution (using Task.timer). + * disconnected from Task/IO execution notion of time (e.g. IO.sleep). + * Other testing instances of Time are the same as in normal node execution (using IO.timer). * https://github.com/rchain/rchain/issues/3001 */ it should "re-send request after timeout" in dagFromBlock( @@ -385,7 +384,7 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str import mock._ for { // Wait for timeout to expire - _ <- stream.compile.drain.timeout(300.millis).onErrorHandle(_ => ()) + _ <- stream.compile.drain.timeout(300.millis).attempt // Wait for two requests reqs <- sentRequests.take(2).compile.toList diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala index e5b9c04b338..e5252d33839 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.{Concurrent, Timer} +import cats.effect.{Concurrent, IO, Timer} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.engine.LfsTupleSpaceRequester.{ST, StatePartPath} @@ -12,7 +12,6 @@ import coop.rchain.rspace.state.{RSpaceImporter, StateValidationError} import coop.rchain.shared.Log import fs2.Stream import fs2.concurrent.Queue -import monix.eval.Task import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector @@ -159,9 +158,9 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str } yield () } - implicit val logEff: Log[Task] = Log.log[Task] + implicit val logEff: Log[IO] = Log.log[IO] - import monix.execution.Scheduler.Implicits.global + import coop.rchain.shared.RChainScheduler._ /** * Test runner @@ -173,12 +172,12 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * @param test test specification */ def createBootstrapTest(runProcessingStream: Boolean, requestTimeout: FiniteDuration = 10.days)( - test: Mock[Task] => Task[Unit] + test: Mock[IO] => IO[Unit] ): Unit = - createMock[Task](requestTimeout) { mock => + createMock[IO](requestTimeout) { mock => if (!runProcessingStream) test(mock) else (Stream.eval(test(mock)) concurrently mock.stream).compile.drain - }.runSyncUnsafe(timeout = 10.seconds) + }.unsafeRunSync val bootstrapTest = createBootstrapTest(runProcessingStream = true) _ @@ -306,8 +305,8 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * NOTE: We don't have any abstraction to test time in execution (with monix Task or cats IO). * We have LogicalTime and DiscreteTime which are just wrappers to get different "milliseconds" but are totally - * disconnected from Task/IO execution notion of time (e.g. Task.sleep). - * Other testing instances of Time are the same as in normal node execution (using Task.timer). + * disconnected from Task/IO execution notion of time (e.g. IO.sleep). + * Other testing instances of Time are the same as in normal node execution (using IO.timer). * https://github.com/rchain/rchain/issues/3001 */ it should "re-send request after timeout" in createBootstrapTest( @@ -317,7 +316,7 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str import mock._ for { // Wait for timeout to expire - _ <- stream.compile.drain.timeout(300.millis).onErrorHandle(_ => ()) + _ <- stream.compile.drain.timeout(300.millis).attempt // Wait for two requests reqs <- sentRequests.take(2).compile.toList diff --git a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala index e5a46453904..fef6943a2b2 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.engine +import cats.effect.IO import cats.effect.concurrent.Ref import cats.syntax.all._ import com.google.protobuf.ByteString @@ -21,27 +22,27 @@ import coop.rchain.p2p.EffectsTestInstances.{ LogicalTime, TransportLayerStub } -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global +import coop.rchain.shared.Log import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { + import coop.rchain.shared.RChainScheduler._ val local: PeerNode = peerNode("src", 40400) - implicit val log = new LogStub[Task] - implicit val metrics = new Metrics.MetricsNOP[Task] - implicit val currentRequests: RequestedBlocks[Task] = - Ref.unsafe[Task, Map[BlockHash, RequestState]](Map.empty[BlockHash, RequestState]) - implicit val connectionsCell: ConnectionsCell[Task] = - Ref.unsafe[Task, Connections](List(local)) - implicit val transportLayer = new TransportLayerStub[Task] - implicit val rpConf = createRPConfAsk[Task](local) - implicit val time = new LogicalTime[Task] - implicit val commUtil = CommUtil.of[Task] - implicit val blockRetriever = BlockRetriever.of[Task] + implicit val log: Log[IO] = new LogStub + implicit val metrics = new Metrics.MetricsNOP[IO] + implicit val currentRequests: RequestedBlocks[IO] = + Ref.unsafe[IO, Map[BlockHash, RequestState]](Map.empty[BlockHash, RequestState]) + implicit val connectionsCell: ConnectionsCell[IO] = + Ref.unsafe[IO, Connections](List(local)) + implicit val transportLayer = new TransportLayerStub[IO] + implicit val rpConf = createRPConfAsk[IO](local) + implicit val time = new LogicalTime[IO] + implicit val commUtil = CommUtil.of[IO] + implicit val blockRetriever = BlockRetriever.of[IO] val hash = ByteString.copyFrom("hash", "UTF-8") val hb = HasBlock(hash) @@ -54,7 +55,7 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with PeerNode(NodeIdentifier(name.getBytes), endpoint(port)) private def alwaysSuccess: PeerNode => Protocol => CommErr[Unit] = kp(kp(Right(()))) - private def alwaysDoNotIgnoreF: BlockHash => Task[Boolean] = _ => false.pure[Task] + private def alwaysDoNotIgnoreF: BlockHash => IO[Boolean] = _ => false.pure[IO] override def beforeEach(): Unit = { transportLayer.reset() transportLayer.setResponses(alwaysSuccess) @@ -70,13 +71,13 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with Map( hash -> RequestState(timestamp = System.currentTimeMillis, waitingList = List(otherPeer)) ) - currentRequests.set(requestStateBefore).runSyncUnsafe() + currentRequests.set(requestStateBefore).unsafeRunSync // when - NodeRunning.handleHasBlockMessage[Task](sender, hb.hash)(alwaysDoNotIgnoreF).runSyncUnsafe() + NodeRunning.handleHasBlockMessage[IO](sender, hb.hash)(alwaysDoNotIgnoreF).unsafeRunSync // then transportLayer.requests shouldBe empty - val requestStateAfter = currentRequests.get.runSyncUnsafe().get(hash).get + val requestStateAfter = currentRequests.get.unsafeRunSync.get(hash).get requestStateAfter.waitingList.size should be(2) } @@ -91,9 +92,9 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with waitingList = List.empty ) ) - currentRequests.set(requestStateBefore).runSyncUnsafe() + currentRequests.set(requestStateBefore).unsafeRunSync // when - NodeRunning.handleHasBlockMessage[Task](sender, hb.hash)(alwaysDoNotIgnoreF).runSyncUnsafe() + NodeRunning.handleHasBlockMessage[IO](sender, hb.hash)(alwaysDoNotIgnoreF).unsafeRunSync // then val (recipient, msg) = transportLayer.getRequest(0) // assert RequestState @@ -104,7 +105,7 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with recipient shouldBe sender transportLayer.requests.size shouldBe 1 // assert RequestState information stored - val requestStateAfter = currentRequests.get.runSyncUnsafe().get(hash).get + val requestStateAfter = currentRequests.get.unsafeRunSync.get(hash).get requestStateAfter.waitingList should be(List(sender)) } } @@ -114,9 +115,9 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with // given val sender = peerNode("somePeer", 40400) val requestStateBefore = Map.empty[BlockHash, RequestState] - currentRequests.set(requestStateBefore).runSyncUnsafe() + currentRequests.set(requestStateBefore).unsafeRunSync // when - NodeRunning.handleHasBlockMessage[Task](sender, hb.hash)(alwaysDoNotIgnoreF).runSyncUnsafe() + NodeRunning.handleHasBlockMessage[IO](sender, hb.hash)(alwaysDoNotIgnoreF).unsafeRunSync // then val (recipient, msg) = transportLayer.getRequest(0) // assert RequestState @@ -127,7 +128,7 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with recipient shouldBe sender transportLayer.requests.size shouldBe 1 // assert RequestState informaton stored - val requestStateAfter = currentRequests.get.runSyncUnsafe().get(hash).get + val requestStateAfter = currentRequests.get.unsafeRunSync.get(hash).get requestStateAfter.waitingList should be(List(sender)) } @@ -135,10 +136,10 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with describe("if there is already an entry in the RequestState blocks") { it("should ignore if peer on the RequestState peers list") { // given - val sender = peerNode("somePeer", 40400) - val casperContains: BlockHash => Task[Boolean] = _ => true.pure[Task] + val sender = peerNode("somePeer", 40400) + val casperContains: BlockHash => IO[Boolean] = _ => true.pure[IO] // when - NodeRunning.handleHasBlockMessage[Task](sender, hb.hash)(casperContains).runSyncUnsafe() + NodeRunning.handleHasBlockMessage[IO](sender, hb.hash)(casperContains).unsafeRunSync // then transportLayer.requests shouldBe empty } @@ -150,9 +151,9 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with describe("handleHasBlock") { it("should not call send hash to BlockReceiver if it is ignorable hash") { // given - val casperContains: BlockHash => Task[Boolean] = _ => true.pure[Task] + val casperContains: BlockHash => IO[Boolean] = _ => true.pure[IO] // when - NodeRunning.handleHasBlockMessage[Task](null, hb.hash)(casperContains).runSyncUnsafe() + NodeRunning.handleHasBlockMessage[IO](null, hb.hash)(casperContains).unsafeRunSync // then transportLayer.requests shouldBe empty } diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala index 35f5e61b795..19260da1bcc 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala @@ -12,7 +12,6 @@ import coop.rchain.models.syntax._ import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.Inspectors import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala index 1c17c46cb4a..0f6c0fe7858 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.genesis import cats.Parallel -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, ContextShift, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.syntax._ @@ -20,11 +20,10 @@ import coop.rchain.p2p.EffectsTestInstances.LogStub import coop.rchain.rspace.syntax._ import coop.rchain.shared.PathOps.RichPath import coop.rchain.shared.syntax._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import coop.rchain.shared.RChainScheduler._ import java.io.PrintWriter import java.nio.file.{Files, Path} @@ -32,8 +31,8 @@ import java.nio.file.{Files, Path} class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with BlockDagStorageFixture { import GenesisTest._ - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val span: Span[Task] = NoopSpan[Task]() + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val span: Span[IO] = NoopSpan[IO]() val validators = Seq( "299670c52849f1aa82e8dfe5be872c16b600bf09cc8983e04b903411358f2de6", @@ -70,17 +69,17 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block } "Genesis.fromInputFiles" should "generate random validators when no bonds file is given" in taskTest( - withGenResources[Task] { + withGenResources[IO] { ( - runtimeManager: RuntimeManager[Task], + runtimeManager: RuntimeManager[IO], genesisPath: Path, - log: LogStub[Task] + log: LogStub[IO] ) => for { _ <- fromInputFiles()( genesisPath, runtimeManager, - implicitly[Concurrent[Task]], + implicitly[Concurrent[IO]], log ) _ = log.warns.count( @@ -93,18 +92,18 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block ) it should "tell when bonds file does not exist" in taskTest( - withGenResources[Task] { + withGenResources[IO] { ( - runtimeManager: RuntimeManager[Task], + runtimeManager: RuntimeManager[IO], genesisPath: Path, - log: LogStub[Task] + log: LogStub[IO] ) => val nonExistingPath = storageLocation.resolve("not/a/real/file").toString for { genesisAttempt <- fromInputFiles(maybeBondsPath = Some(nonExistingPath))( genesisPath, runtimeManager, - implicitly[Concurrent[Task]], + implicitly[Concurrent[IO]], log ).attempt } yield log.warns.exists(_.contains("BONDS FILE NOT FOUND")) @@ -112,11 +111,11 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block ) it should "fail with error when bonds file cannot be parsed" in taskTest( - withGenResources[Task] { + withGenResources[IO] { ( - runtimeManager: RuntimeManager[Task], + runtimeManager: RuntimeManager[IO], genesisPath: Path, - log: LogStub[Task] + log: LogStub[IO] ) => val badBondsFile = genesisPath.resolve("misformatted.txt").toString @@ -128,7 +127,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesisAttempt <- fromInputFiles(maybeBondsPath = Some(badBondsFile))( genesisPath, runtimeManager, - implicitly[Concurrent[Task]], + implicitly[Concurrent[IO]], log ).attempt } yield genesisAttempt.left.value.getMessage should include( @@ -138,11 +137,11 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block ) it should "create a genesis block with the right bonds when a proper bonds file is given" in taskTest( - withGenResources[Task] { + withGenResources[IO] { ( - runtimeManager: RuntimeManager[Task], + runtimeManager: RuntimeManager[IO], genesisPath: Path, - log: LogStub[Task] + log: LogStub[IO] ) => val bondsFile = genesisPath.resolve("givenBonds.txt").toString printBonds(bondsFile) @@ -151,7 +150,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesis <- fromInputFiles(maybeBondsPath = Some(bondsFile))( genesisPath, runtimeManager, - implicitly[Concurrent[Task]], + implicitly[Concurrent[IO]], log ) bonds = genesis.bonds.toList @@ -166,11 +165,11 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block it should "create a valid genesis block" in withStorage { implicit blockStore => implicit blockDagStorage => - withGenResources[Task] { + withGenResources[IO] { ( - runtimeManager: RuntimeManager[Task], + runtimeManager: RuntimeManager[IO], genesisPath: Path, - log: LogStub[Task] + log: LogStub[IO] ) => implicit val rm = runtimeManager implicit val logEff = log @@ -178,22 +177,22 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesis <- fromInputFiles()( genesisPath, runtimeManager, - implicitly[Concurrent[Task]], + implicitly[Concurrent[IO]], log ) - _ <- BlockStore[Task].put(genesis.blockHash, genesis) + _ <- BlockStore[IO].put(genesis.blockHash, genesis) _ <- blockDagStorage.insertGenesis(genesis) - postState <- InterpreterUtil.validateBlockCheckpointLegacy[Task](genesis) + postState <- InterpreterUtil.validateBlockCheckpointLegacy[IO](genesis) } yield postState.value shouldBe true } } it should "detect an existing bonds file in the default location" in taskTest( - withGenResources[Task] { + withGenResources[IO] { ( - runtimeManager: RuntimeManager[Task], + runtimeManager: RuntimeManager[IO], genesisPath: Path, - log: LogStub[Task] + log: LogStub[IO] ) => val bondsFile = genesisPath.resolve("bonds.txt").toString printBonds(bondsFile) @@ -202,7 +201,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesis <- fromInputFiles()( genesisPath, runtimeManager, - implicitly[Concurrent[Task]], + implicitly[Concurrent[IO]], log ) bonds = genesis.bonds.toList @@ -238,15 +237,15 @@ object GenesisTest { blockNumber: Long = 0 )( implicit genesisPath: Path, - runtimeManager: RuntimeManager[Task], - c: Concurrent[Task], - log: LogStub[Task] - ): Task[BlockMessage] = + runtimeManager: RuntimeManager[IO], + c: Concurrent[IO], + log: LogStub[IO] + ): IO[BlockMessage] = for { - vaults <- VaultParser.parse[Task]( + vaults <- VaultParser.parse[IO]( maybeVaultsPath.getOrElse(genesisPath + "/wallets.txt") ) - bonds <- BondsParser.parse[Task]( + bonds <- BondsParser.parse[IO]( maybeBondsPath.getOrElse(genesisPath + "/bonds.txt"), autogenShardSize ) @@ -294,7 +293,8 @@ object GenesisTest { rStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName(rchainShardId), - t + t, + rholangEC ) result <- body(runtimeManager, genesisPath, log) _ <- Sync[F].delay { storePath.recursivelyDelete() } @@ -302,6 +302,6 @@ object GenesisTest { } yield result } - def taskTest[R](f: Task[R]): R = - f.runSyncUnsafe() + def taskTest[R](f: IO[R]): R = + f.unsafeRunSync } diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala index f388b9ddfe7..569df117604 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala @@ -12,7 +12,6 @@ import coop.rchain.shared.scalatestcontrib._ import coop.rchain.models.syntax._ import coop.rchain.rholang.build.CompiledRholangTemplate import coop.rchain.rholang.interpreter.util.RevAddress -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala index 355585a6e8c..1889352eec3 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala @@ -12,7 +12,6 @@ import coop.rchain.models.syntax._ import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.shared.scalatestcontrib._ -import monix.execution.Scheduler.Implicits.global import org.scalatest.Inside.inside import org.scalatest.Inspectors import org.scalatest.flatspec.AnyFlatSpec diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala index a4da8d59273..c411a9ee227 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala @@ -10,7 +10,6 @@ import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import coop.rchain.models.syntax._ import coop.rchain.rholang.interpreter.util.RevAddress -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/contracts/TimeoutResultCollectorSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/contracts/TimeoutResultCollectorSpec.scala index e883925b8fc..4ba5ff18c62 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/contracts/TimeoutResultCollectorSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/contracts/TimeoutResultCollectorSpec.scala @@ -7,7 +7,6 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ -import monix.execution.Scheduler.Implicits.global class TimeoutResultCollectorSpec extends AnyFlatSpec with AppendedClues with Matchers { it should "testFinished should be false if execution hasn't finished within timeout" in { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala index 2e2528c068b..591645eb831 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.{Concurrent, Resource} +import cats.effect.{Concurrent, IO, Resource, Timer} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -11,9 +11,7 @@ import coop.rchain.casper.util.GenesisBuilder.GenesisContext import coop.rchain.metrics.Metrics import coop.rchain.metrics.Metrics.MetricsNOP import coop.rchain.rholang -import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global +import coop.rchain.shared.{Log, Time} import org.scalatest.{BeforeAndAfter, Suite} import java.nio.file.Path @@ -23,34 +21,36 @@ trait BlockDagStorageFixture extends BeforeAndAfter { self: Suite => def withGenesis[R]( context: GenesisContext - )(f: BlockStore[Task] => BlockDagStorage[Task] => RuntimeManager[Task] => Task[R]): R = { - implicit val metrics = new MetricsNOP[Task]() - implicit val log = Log.log[Task] + )(f: BlockStore[IO] => BlockDagStorage[IO] => RuntimeManager[IO] => IO[R]): R = { + implicit val metrics = new MetricsNOP[IO]() + implicit val log = Log.log[IO] + import coop.rchain.shared.RChainScheduler._ def create(dir: Path) = for { - kvm <- Resources.mkTestRNodeStoreManager[Task](dir) - blocks <- BlockStore[Task](kvm) - dag <- BlockDagKeyValueStorage.create[Task](kvm) - indexedDag = BlockDagStorage[Task](dag) - runtime <- Resources.mkRuntimeManagerAt[Task]( + kvm <- Resources.mkTestRNodeStoreManager[IO](dir) + blocks <- BlockStore[IO](kvm) + dag <- BlockDagKeyValueStorage.create[IO](kvm) + indexedDag = BlockDagStorage[IO](dag) + runtime <- Resources.mkRuntimeManagerAt[IO]( kvm, BlockRandomSeed.nonNegativeMergeableTagName(context.genesisBlock.shardId) ) } yield (blocks, indexedDag, runtime) Resources - .copyStorage[Task](context.storageDirectory) + .copyStorage[IO](context.storageDirectory) .evalMap(create) .use(Function.uncurried(f).tupled) - .runSyncUnsafe() + .unsafeRunSync } - def withStorage[R](f: BlockStore[Task] => BlockDagStorage[Task] => Task[R]): R = { - implicit val metrics = new MetricsNOP[Task]() - implicit val log = Log.log[Task] + def withStorage[R](f: BlockStore[IO] => BlockDagStorage[IO] => IO[R]): R = { + implicit val metrics = new MetricsNOP[IO]() + implicit val log = Log.log[IO] + import coop.rchain.shared.RChainScheduler._ - BlockDagStorageTestFixture.withStorageF[Task].use(Function.uncurried(f).tupled).runSyncUnsafe() + BlockDagStorageTestFixture.withStorageF[IO].use(Function.uncurried(f).tupled).unsafeRunSync } } diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala index c48bfc55db7..aa1b7ff95b7 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.helper import cats.Applicative -import cats.effect.{Concurrent, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -26,13 +26,12 @@ import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.syntax._ import coop.rchain.shared.{Log, LogSource, Time} -import monix.eval.Task object BlockGenerator { private[this] val GenerateBlockMetricsSource = Metrics.Source(CasperMetricsSource, "generate-block") - implicit val timeEff = new LogicalTime[Task] + implicit val timeEff = new LogicalTime[IO] implicit val logSource: LogSource = LogSource(this.getClass) // Dummy empty Casper snapshot diff --git a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala index c2649b23b01..942272808f1 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.{Concurrent, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import coop.rchain.casper.genesis.Genesis import coop.rchain.casper.genesis.contracts.TestUtil @@ -20,13 +20,12 @@ import coop.rchain.rholang.build.CompiledRholangSource import coop.rchain.rholang.interpreter.{PrettyPrinter, RhoRuntime, SystemProcesses} import coop.rchain.rspace.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.AppendedClues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{Duration, FiniteDuration} +import coop.rchain.shared.RChainScheduler._ class RhoSpec( testObject: CompiledRholangSource[_], @@ -37,9 +36,9 @@ class RhoSpec( with AppendedClues with Matchers { - implicit val logger: Log[Task] = Log.log[Task] - implicit val metricsEff: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + implicit val logger: Log[IO] = Log.log[IO] + implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() private val printer = PrettyPrinter() private val SHARD_ID = "root-shard" @@ -134,17 +133,18 @@ class RhoSpec( otherLibs: Seq[Signed[DeployData]], timeout: FiniteDuration, shardId: String - ): Task[TestResult] = - TestResultCollector[Task].flatMap { testResultCollector => + ): IO[TestResult] = + TestResultCollector[IO].flatMap { testResultCollector => val genesis = GenesisBuilder.buildGenesis(genesisParameters) - val runtimeResource = copyStorage[Task](genesis.storageDirectory) - .evalMap(mkTestRNodeStoreManager[Task]) + val runtimeResource = copyStorage[IO](genesis.storageDirectory) + .evalMap(mkTestRNodeStoreManager[IO]) .evalMap(_.rSpaceStores) .evalMap( RhoRuntime.createRuntime( _, BlockRandomSeed.nonNegativeMergeableTagName(shardId), + rholangEC, additionalSystemProcesses = testFrameworkContracts(testResultCollector) ) ) @@ -152,7 +152,7 @@ class RhoSpec( runtimeResource.use { runtime => for { _ <- logger.info("Starting tests from " + testObject.path) - _ <- setupRuntime[Task]( + _ <- setupRuntime[IO]( runtime, otherLibs ) @@ -207,8 +207,7 @@ class RhoSpec( } val result = - getResults(testObject, extraNonGenesisDeploys, executionTimeout, genesisParameters._3.shardId) - .runSyncUnsafe(Duration.Inf) + getResults(testObject, extraNonGenesisDeploys, executionTimeout, genesisParameters._3.shardId).unsafeRunSync it should "finish execution within timeout" in { if (!result.hasFinished) fail(s"Timeout of $executionTimeout expired") diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala index c4172060a6c..6e9f0aa3b0f 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala @@ -2,7 +2,7 @@ package coop.rchain.casper.helper import cats.Parallel import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ContextShift, Resource, Sync, Timer} +import cats.effect.{Concurrent, ContextShift, IO, Resource, Sync, Timer} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -36,7 +36,6 @@ import coop.rchain.rholang.interpreter.RhoRuntime.RhoHistoryRepository import coop.rchain.rspace.syntax._ import coop.rchain.shared._ import fs2.concurrent.Queue -import monix.eval.Task import monix.execution.Scheduler import java.nio.file.Path @@ -282,11 +281,13 @@ case class TestNode[F[_]: Concurrent: Timer]( } object TestNode { - type Effect[A] = Task[A] + type Effect[A] = IO[A] - def standaloneEff(genesis: GenesisContext)( - implicit scheduler: Scheduler - ): Resource[Effect, TestNode[Effect]] = + import scala.concurrent.ExecutionContext.Implicits.global + implicit val cs: ContextShift[IO] = IO.contextShift(global) + implicit val t: Timer[IO] = IO.timer(global) + + def standaloneEff(genesis: GenesisContext): Resource[Effect, TestNode[Effect]] = networkEff( genesis, networkSize = 1 @@ -299,7 +300,7 @@ object TestNode { maxNumberOfParents: Int = Int.MaxValue, maxParentDepth: Option[Int] = None, withReadOnlySize: Int = 0 - )(implicit scheduler: Scheduler): Resource[Effect, IndexedSeq[TestNode[Effect]]] = { + ): Resource[Effect, IndexedSeq[TestNode[Effect]]] = { implicit val c = Concurrent[Effect] implicit val n = TestNetwork.empty[Effect] @@ -322,7 +323,7 @@ object TestNode { maxNumberOfParents: Int, maxParentDepth: Option[Int], withReadOnlySize: Int - )(implicit s: Scheduler): Resource[F, IndexedSeq[TestNode[F]]] = { + ): Resource[F, IndexedSeq[TestNode[F]]] = { val n = sks.length val names = (1 to n).map(i => if (i <= (n - withReadOnlySize)) s"node-$i" else s"readOnly-$i") val isReadOnly = (1 to n).map(i => if (i <= (n - withReadOnlySize)) false else true) @@ -383,12 +384,13 @@ object TestNode { maxNumberOfParents: Int, maxParentDepth: Option[Int], isReadOnly: Boolean - )(implicit s: Scheduler): Resource[F, TestNode[F]] = { + ): Resource[F, TestNode[F]] = { val tle = new TransportLayerTestImpl[F]() val tls = new TransportLayerServerTestImpl[F](currentPeerNode) implicit val log = Log.log[F] implicit val metricEff = new Metrics.MetricsNOP[F] implicit val spanEff = new NoopSpan[F] + import RChainScheduler._ for { newStorageDir <- Resources.copyStorage[F](storageDir) kvm <- Resource.eval(Resources.mkTestRNodeStoreManager(newStorageDir)) @@ -402,7 +404,8 @@ object TestNode { rSpaceStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName(genesis.shardId), - RuntimeManager.noOpExecutionTracker[F] + RuntimeManager.noOpExecutionTracker[F], + rholangEC ) ) diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala index 0884fdc07b6..b22f50b3327 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala @@ -6,7 +6,6 @@ import coop.rchain.rholang.Resources.mkRuntimes import coop.rchain.rholang.interpreter.RhoRuntime.RhoHistoryRepository import coop.rchain.rholang.interpreter.{ReplayRhoRuntime, RhoRuntime} import coop.rchain.shared.Log -import monix.execution.Scheduler.Implicits.global object TestRhoRuntime { def rhoRuntimeEff[F[_]: Log: Metrics: Span: Concurrent: Parallel: ContextShift]( diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala index d42e1ca12aa..8e0bf4b2951 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.merging import cats.Parallel -import cats.effect.{Concurrent, ContextShift} +import cats.effect.{Concurrent, ContextShift, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.rholang.Resources @@ -26,8 +26,6 @@ import coop.rchain.sdk.dag.merging.ConflictResolutionLogic import coop.rchain.sdk.dag.merging.ConflictResolutionLogic._ import coop.rchain.shared.Log import coop.rchain.shared.scalatestcontrib._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import scodec.bits.ByteVector @@ -303,12 +301,14 @@ class MergeNumberChannelSpec extends AnyFlatSpec { } yield () } } - implicit val timeEff = new LogicalTime[Task] - implicit val logEff = Log.log[Task] - implicit val spanEff = Span.noop[Task] + implicit val timeEff = new LogicalTime[IO] + implicit val logEff = Log.log[IO] + implicit val spanEff = Span.noop[IO] + + import coop.rchain.shared.RChainScheduler._ "multiple branches" should "reject deploy when mergeable number channels got negative number" in effectTest { - testCase[Task]( + testCase[IO]( baseTerms = Seq(rhoST, rhoChange(10)), leftTerms = Seq( DeployTestInfo(rhoChange(-5), 10L, "0x11") // -5 @@ -322,7 +322,7 @@ class MergeNumberChannelSpec extends AnyFlatSpec { } "multiple branches" should "reject deploy when mergeable number channels got overflow" in effectTest { - testCase[Task]( + testCase[IO]( baseTerms = Seq(rhoST, rhoChange(10)), leftTerms = Seq( DeployTestInfo(rhoChange(-5), 10L, "0x11") // -5 @@ -336,7 +336,7 @@ class MergeNumberChannelSpec extends AnyFlatSpec { } "multiple branches with normal rejection" should "choose from normal reject options" in effectTest { - testCase[Task]( + testCase[IO]( baseTerms = Seq(rhoST, rhoChange(100)), leftTerms = Seq( DeployTestInfo(parRho(rhoChange(-20), "@\"X\"!(1)"), 10L, "0x11"), @@ -352,7 +352,7 @@ class MergeNumberChannelSpec extends AnyFlatSpec { } "multiple branches" should "merge number channels" in effectTest { - testCase[Task]( + testCase[IO]( baseTerms = Seq(rhoST), leftTerms = Seq( DeployTestInfo(rhoChange(10), 10L, "0x10"), diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala index 8c0748a1fb3..eaa0fa12234 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.merging -import cats.effect.Resource +import cats.effect.{IO, Resource} import cats.syntax.all._ import coop.rchain.casper.genesis.Genesis import coop.rchain.casper.rholang.sysdeploys.CloseBlockDeploy @@ -14,25 +14,24 @@ import coop.rchain.rspace.merger.{EventLogIndex, EventLogMergingLogic} import coop.rchain.sdk.dag.merging.ConflictResolutionLogic import coop.rchain.shared.scalatestcontrib.effectTest import coop.rchain.shared.{Log, Time} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class MergingCases extends AnyFlatSpec with Matchers { - val genesisContext = GenesisBuilder.buildGenesis(validatorsNum = 5) - val genesis = genesisContext.genesisBlock - implicit val logEff = Log.log[Task] - implicit val timeF: Time[Task] = new LogicalTime[Task] + val genesisContext = GenesisBuilder.buildGenesis(validatorsNum = 5) + val genesis = genesisContext.genesisBlock + implicit val logEff = Log.log[IO] + implicit val timeF: Time[IO] = new LogicalTime[IO] + import coop.rchain.shared.RChainScheduler._ - val runtimeManagerResource: Resource[Task, RuntimeManager[Task]] = for { - dir <- Resources.copyStorage[Task](genesisContext.storageDirectory) - kvm <- Resource.eval(Resources.mkTestRNodeStoreManager[Task](dir)) + val runtimeManagerResource: Resource[IO, RuntimeManager[IO]] = for { + dir <- Resources.copyStorage[IO](genesisContext.storageDirectory) + kvm <- Resource.eval(Resources.mkTestRNodeStoreManager[IO](dir)) mergeableTag = BlockRandomSeed.nonNegativeMergeableTagName( genesis.shardId ) - rm <- Resource.eval(Resources.mkRuntimeManagerAt[Task](kvm, mergeableTag)) + rm <- Resource.eval(Resources.mkRuntimeManagerAt[IO](kvm, mergeableTag)) } yield rm /** diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala index f520c23716c..ba0fb818cfd 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang -import cats.effect.Resource +import cats.effect.{IO, Resource} import cats.syntax.all._ import cats.implicits.catsSyntaxApplicativeId import coop.rchain.casper.genesis.Genesis @@ -17,19 +17,16 @@ import coop.rchain.models.rholang.implicits._ import coop.rchain.models.{GDeployId, Par} import coop.rchain.shared.Log import coop.rchain.shared.scalatestcontrib._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.concurrent.duration._ - class DeployIdTest extends AnyFlatSpec with Matchers { - implicit val log: Log[Task] = new Log.NOPLog[Task]() + implicit val log: Log[IO] = new Log.NOPLog[IO]() private val dummyMergeableName = BlockRandomSeed.nonNegativeMergeableTagName("dummy") + import coop.rchain.shared.RChainScheduler._ - private val runtimeManager: Resource[Task, RuntimeManager[Task]] = - mkRuntimeManager[Task]("deploy-id-runtime-manager-test", dummyMergeableName) + private val runtimeManager: Resource[IO, RuntimeManager[IO]] = + mkRuntimeManager[IO]("deploy-id-runtime-manager-test", dummyMergeableName) private val sk = ConstructDeploy.defaultSec @@ -56,11 +53,11 @@ class DeployIdTest extends AnyFlatSpec with Matchers { .use( mgr => for { - hash <- RuntimeManager.emptyStateHashFixed.pure[Task] + hash <- RuntimeManager.emptyStateHashFixed.pure[IO] res <- mgr.spawnRuntime >>= { _.captureResults(hash, d) } } yield res ) - .runSyncUnsafe(10.seconds) + .unsafeRunSync result.size should be(1) result.head should be(GDeployId(d.sig): Par) diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala index 73b6aa70fd2..c388db9fcb6 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang -import cats.effect.Resource +import cats.effect.{IO, Resource} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.genesis.Genesis @@ -17,18 +17,17 @@ import coop.rchain.models.{GDeployerId, Par} import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib.effectTest import coop.rchain.shared.{Base16, Log} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class DeployerIdTest extends AnyFlatSpec with Matchers { - implicit val time = new LogicalTime[Task] - implicit val log: Log[Task] = new Log.NOPLog[Task]() + implicit val time = new LogicalTime[IO] + implicit val log: Log[IO] = new Log.NOPLog[IO]() private val dummyMergeableName = BlockRandomSeed.nonNegativeMergeableTagName("dummy") + import coop.rchain.shared.RChainScheduler._ - val runtimeManager: Resource[Task, RuntimeManager[Task]] = - mkRuntimeManager[Task]("deployer-id-runtime-manager-test", dummyMergeableName) + val runtimeManager: Resource[IO, RuntimeManager[IO]] = + mkRuntimeManager[IO]("deployer-id-runtime-manager-test", dummyMergeableName) "Deployer id" should "be equal to the deployer's public key" in effectTest { val sk = PrivateKey( @@ -63,7 +62,7 @@ class DeployerIdTest extends AnyFlatSpec with Matchers { deployer: PrivateKey, contractUser: PrivateKey, isAccessGranted: Boolean - ): Task[Unit] = { + ): IO[Unit] = { val checkDeployerDefinition = s""" |contract @"checkAuth"(input, ret) = { diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala index bd036940b2d..db4c1ac0e2e 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang -import cats.effect.Concurrent +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -25,8 +25,6 @@ import coop.rchain.p2p.EffectsTestInstances.LogStub import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.scalatestcontrib._ import coop.rchain.shared.{Log, LogSource, Time} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -39,11 +37,12 @@ class InterpreterUtilTest with EitherValues { import BlockGenerator.step - implicit val logEff = new LogStub[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val span: Span[Task] = new NoopSpan[Task] - implicit val logSource: LogSource = LogSource(this.getClass) - implicit private val timeEff = Time.fromTimer[Task] + implicit val logEff: Log[IO] = new LogStub + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val span: Span[IO] = new NoopSpan[IO] + implicit val logSource: LogSource = LogSource(this.getClass) + import coop.rchain.shared.RChainScheduler._ + implicit private val timeEff = Time.fromTimer[IO] val genesisContext = GenesisBuilder.buildGenesis() val genesis = genesisContext.genesisBlock @@ -130,14 +129,14 @@ class InterpreterUtilTest b1 <- node.addBlock(b1Deploys: _*) b2 <- node.addBlock(b2Deploys: _*) b3 <- node.addBlock(b3Deploys: _*) - _ <- getDataAtPublicChannel[Task](b0, 2) shouldBeF Seq("2") - _ <- getDataAtPublicChannel[Task](b0, 123) shouldBeF Seq("5") - _ <- getDataAtPublicChannel[Task](b1, 1) shouldBeF Seq("1") - _ <- getDataAtPublicChannel[Task](b1, 123) shouldBeF Seq("5") - _ <- getDataAtPublicChannel[Task](b1, 456) shouldBeF Seq("10") - _ <- getDataAtPublicChannel[Task](b3, 1) + _ <- getDataAtPublicChannel[IO](b0, 2) shouldBeF Seq("2") + _ <- getDataAtPublicChannel[IO](b0, 123) shouldBeF Seq("5") + _ <- getDataAtPublicChannel[IO](b1, 1) shouldBeF Seq("1") + _ <- getDataAtPublicChannel[IO](b1, 123) shouldBeF Seq("5") + _ <- getDataAtPublicChannel[IO](b1, 456) shouldBeF Seq("10") + _ <- getDataAtPublicChannel[IO](b3, 1) .map(_ should contain theSameElementsAs Seq("1", "15")) - _ <- getDataAtPublicChannel[Task](b3, 7) shouldBeF Seq("7") + _ <- getDataAtPublicChannel[IO](b3, 7) shouldBeF Seq("7") } yield () } } @@ -179,8 +178,8 @@ class InterpreterUtilTest b3 <- node1.addBlock(b3Deploys: _*) _ = b3.justifications.toSet shouldBe Set(b1, b2).map(_.blockHash) - _ <- getDataAtPublicChannel[Task](b3, 5) shouldBeF Seq("5") - _ <- getDataAtPublicChannel[Task](b3, 1) shouldBeF Seq("15") + _ <- getDataAtPublicChannel[IO](b3, 5) shouldBeF Seq("5") + _ <- getDataAtPublicChannel[IO](b3, 1) shouldBeF Seq("15") } yield () } } @@ -237,15 +236,15 @@ class InterpreterUtilTest * genesis */ for { - b1 <- buildBlock[Task](justifications = Seq(genesis.blockHash), deploys = b1DeploysWithCost) - b2 <- buildBlock[Task](justifications = Seq(genesis.blockHash), deploys = b2DeploysWithCost) - b3 <- buildBlock[Task]( + b1 <- buildBlock[IO](justifications = Seq(genesis.blockHash), deploys = b1DeploysWithCost) + b2 <- buildBlock[IO](justifications = Seq(genesis.blockHash), deploys = b2DeploysWithCost) + b3 <- buildBlock[IO]( justifications = Seq(b1.blockHash, b2.blockHash), deploys = b3DeploysWithCost ) - _ <- step[Task](b1) - _ <- step[Task](b2) - postState <- validateBlockCheckpointLegacy[Task](b3) + _ <- step[IO](b1) + _ <- step[IO](b2) + postState <- validateBlockCheckpointLegacy[IO](b3) } yield postState.value shouldBe false } @@ -277,30 +276,30 @@ class InterpreterUtilTest * genesis */ for { - b1 <- buildBlock[Task](justifications = Seq(genesis.blockHash), deploys = b1DeploysWithCost) - b2 <- buildBlock[Task](justifications = Seq(b1.blockHash), deploys = b2DeploysWithCost) - b3 <- buildBlock[Task](justifications = Seq(b1.blockHash), deploys = b3DeploysWithCost) - b4 <- buildBlock[Task](justifications = Seq(b3.blockHash), deploys = b4DeploysWithCost) - b5 <- buildBlock[Task]( + b1 <- buildBlock[IO](justifications = Seq(genesis.blockHash), deploys = b1DeploysWithCost) + b2 <- buildBlock[IO](justifications = Seq(b1.blockHash), deploys = b2DeploysWithCost) + b3 <- buildBlock[IO](justifications = Seq(b1.blockHash), deploys = b3DeploysWithCost) + b4 <- buildBlock[IO](justifications = Seq(b3.blockHash), deploys = b4DeploysWithCost) + b5 <- buildBlock[IO]( justifications = Seq(b2.blockHash, b4.blockHash), deploys = b5DeploysWithCost ) - _ <- step[Task](b1) - _ <- step[Task](b2) - _ <- step[Task](b3) - _ <- step[Task](b4) + _ <- step[IO](b1) + _ <- step[IO](b2) + _ <- step[IO](b3) + _ <- step[IO](b4) - postState <- validateBlockCheckpointLegacy[Task](b5) + postState <- validateBlockCheckpointLegacy[IO](b5) } yield postState.value shouldBe false } def computeDeployCosts(deploy: Signed[DeployData]*)( - implicit runtimeManager: RuntimeManager[Task], - bds: BlockDagStorage[Task], - blockStore: BlockStore[Task] - ): Task[Seq[PCost]] = + implicit runtimeManager: RuntimeManager[IO], + bds: BlockDagStorage[IO], + blockStore: BlockStore[IO] + ): IO[Seq[PCost]] = for { - computeResult <- computeDeploysCheckpoint[Task](Seq(genesis.blockHash), deploy) + computeResult <- computeDeploysCheckpoint[IO](Seq(genesis.blockHash), deploy) Right((_, _, processedDeploys, _, _)) = computeResult } yield processedDeploys.map(_.cost) @@ -350,13 +349,13 @@ class InterpreterUtilTest val processedDeploys = deploys.map(d => ProcessedDeploy(d, PCost(1L), List.empty, false)) val invalidHash = ByteString.EMPTY - mkRuntimeManager[Task]( + mkRuntimeManager[IO]( "interpreter-util-test", BlockRandomSeed.nonNegativeMergeableTagName(genesis.shardId) ).use { implicit runtimeManager => for { - block <- createGenesis[Task](deploys = processedDeploys, tsHash = invalidHash) - validateResult <- validateBlockCheckpointLegacy[Task](block) + block <- createGenesis[IO](deploys = processedDeploys, tsHash = invalidHash) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe false } } @@ -376,13 +375,13 @@ class InterpreterUtilTest "for (@x <- @2) { @3!(x) }" ).map(ConstructDeploy.sourceDeployNow(_)) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, blockNumber = 1L ) Right((preStateHash, computedTsHash, processedDeploys, _, _)) = deploysCheckpoint - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = processedDeploys, postStateHash = computedTsHash, @@ -390,7 +389,7 @@ class InterpreterUtilTest justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe true } @@ -422,20 +421,20 @@ class InterpreterUtilTest """.stripMargin ).map(ConstructDeploy.sourceDeployNow(_)) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, 1L ) Right((preStateHash, computedTsHash, processedDeploys, _, _)) = deploysCheckpoint - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = processedDeploys, postStateHash = computedTsHash, preStateHash = preStateHash, justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe true } @@ -472,20 +471,20 @@ class InterpreterUtilTest .map(ConstructDeploy.sourceDeployNow(_)) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, 1L ) Right((preStateHash, computedTsHash, processedDeploys, _, _)) = deploysCheckpoint - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = processedDeploys, postStateHash = computedTsHash, preStateHash = preStateHash, justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe true } @@ -519,20 +518,20 @@ class InterpreterUtilTest ).map(ConstructDeploy.sourceDeployNow(_)) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, 1L ) Right((preStateHash, computedTsHash, processedDeploys, _, _)) = deploysCheckpoint - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = processedDeploys, postStateHash = computedTsHash, preStateHash = preStateHash, justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe true } @@ -558,14 +557,14 @@ class InterpreterUtilTest ).map(ConstructDeploy.sourceDeployNow(_)) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, 1L, i + 1L ) Right((preStateHash, computedTsHash, processedDeploys, _, _)) = deploysCheckpoint - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = processedDeploys, postStateHash = computedTsHash, @@ -574,7 +573,7 @@ class InterpreterUtilTest justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe true } } @@ -585,7 +584,7 @@ class InterpreterUtilTest (0 until 1).map(i => ConstructDeploy.sourceDeployNow(s"for(_ <- @$i){ Nil } | @$i!($i)")) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, 1L @@ -595,14 +594,14 @@ class InterpreterUtilTest badProcessedDeploy = processedDeploys.head.copy( deployLog = processedDeploys.head.deployLog ++ processedDeploys.last.deployLog.take(5) ) - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = Seq(badProcessedDeploy, processedDeploys.last), postStateHash = computedTsHash, preStateHash = preStateHash, justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe false } @@ -630,14 +629,14 @@ class InterpreterUtilTest ).map(s => ConstructDeploy.sourceDeployNow(s)) for { - deploysCheckpoint <- computeDeploysCheckpoint[Task]( + deploysCheckpoint <- computeDeploysCheckpoint[IO]( Seq(genesis.blockHash), deploys, 1L, i + 1L ) Right((preStateHash, computedTsHash, processedDeploys, _, _)) = deploysCheckpoint - block <- createBlock[Task]( + block <- createBlock[IO]( ByteString.copyFrom(genesisContext.validatorPks.head.bytes), deploys = processedDeploys, postStateHash = computedTsHash, @@ -645,7 +644,7 @@ class InterpreterUtilTest seqNum = i + 1L, justifications = Seq(genesis.blockHash) ) - validateResult <- validateBlockCheckpointLegacy[Task](block) + validateResult <- validateBlockCheckpointLegacy[IO](block) } yield validateResult.value shouldBe true } } diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala index e78854ff9c1..36e6b895d4a 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala @@ -39,7 +39,7 @@ object Resources { def mkRuntimeManager[F[_]: Concurrent: Parallel: ContextShift: Log]( prefix: String, mergeableTagName: Par - )(implicit scheduler: Scheduler): Resource[F, RuntimeManager[F]] = + ): Resource[F, RuntimeManager[F]] = mkTempDir[F](prefix) .evalMap(mkTestRNodeStoreManager[F]) .evalMap(mkRuntimeManagerAt[F](_, mergeableTagName)) @@ -49,12 +49,11 @@ object Resources { def mkRuntimeManagerAt[F[_]: Concurrent: Parallel: ContextShift]( kvm: KeyValueStoreManager[F], mergeableTagName: Par - )( - implicit scheduler: Scheduler ): F[RuntimeManager[F]] = { implicit val log = Log.log[F] implicit val metricsEff = new metrics.Metrics.MetricsNOP[F] implicit val noopSpan: Span[F] = NoopSpan[F]() + import coop.rchain.shared.RChainScheduler._ for { rStore <- kvm.rSpaceStores @@ -63,7 +62,8 @@ object Resources { rStore, mStore, mergeableTagName, - RuntimeManager.noOpExecutionTracker[F] + RuntimeManager.noOpExecutionTracker[F], + rholangEC ) } yield runtimeManager } diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala index 6f40ed98c6a..df13be060f2 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.rholang import cats.data.EitherT -import cats.effect.{Resource, Sync} +import cats.effect.{IO, Resource, Sync} import cats.syntax.all._ import cats.{Applicative, Functor, Id} import com.google.protobuf.ByteString @@ -30,8 +30,6 @@ import coop.rchain.rholang.interpreter.{accounting, ParBuilderUtil, ReplayRhoRun import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.shared.scalatestcontrib.effectTest import coop.rchain.shared.{Base16, Log, Time} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -55,23 +53,24 @@ object SystemDeployReplayResult { class RuntimeManagerTest extends AnyFlatSpec with Matchers { - implicit val timeF: Time[Task] = new LogicalTime[Task] - implicit val log: Log[Task] = Log.log[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + implicit val timeF: Time[IO] = new LogicalTime[IO] + implicit val log: Log[IO] = Log.log[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + import coop.rchain.shared.RChainScheduler._ val genesisContext = GenesisBuilder.buildGenesis() val genesis = genesisContext.genesisBlock val genesisBlockNum = genesis.blockNumber val genesisSeqNum = genesis.seqNum - val runtimeManagerResource: Resource[Task, RuntimeManager[Task]] = + val runtimeManagerResource: Resource[IO, RuntimeManager[IO]] = Resources - .copyStorage[Task](genesisContext.storageDirectory) - .evalMap(Resources.mkTestRNodeStoreManager[Task]) + .copyStorage[IO](genesisContext.storageDirectory) + .evalMap(Resources.mkTestRNodeStoreManager[IO]) .evalMap( Resources - .mkRuntimeManagerAt[Task](_, BlockRandomSeed.nonNegativeMergeableTagName(genesis.shardId)) + .mkRuntimeManagerAt[IO](_, BlockRandomSeed.nonNegativeMergeableTagName(genesis.shardId)) ) private def computeState[F[_]: Functor]( @@ -148,11 +147,11 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { } private def compareSuccessfulSystemDeploys[S <: SystemDeploy]( - runtimeManager: RuntimeManager[Task] + runtimeManager: RuntimeManager[IO] )(startState: StateHash)( playSystemDeploy: S, replaySystemDeploy: S - )(resultAssertion: S#Result => Boolean): Task[StateHash] = + )(resultAssertion: S#Result => Boolean): IO[StateHash] = for { runtime <- runtimeManager.spawnRuntime blockData = BlockData.fromBlock(genesis) @@ -421,7 +420,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { runtime <- runtimeManager.spawnRuntime _ <- runtime.cost.set(initialPhlo) - term <- Compiler[Task].sourceToADT(deploy.data.term) + term <- Compiler[IO].sourceToADT(deploy.data.term) _ <- runtime.inj(term) phlosLeft <- runtime.cost.get reductionCost = initialPhlo - phlosLeft @@ -475,21 +474,21 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { .use( mgr => for { - hash <- RuntimeManager.emptyStateHashFixed.pure[Task] + hash <- RuntimeManager.emptyStateHashFixed.pure[IO] res <- mgr.spawnRuntime >>= { _.captureResults(hash, deploy) } } yield res ) - .runSyncUnsafe(10.seconds) + .unsafeRunSync val noResults = runtimeManagerResource .use( mgr => for { - hash <- RuntimeManager.emptyStateHashFixed.pure[Task] + hash <- RuntimeManager.emptyStateHashFixed.pure[IO] res <- mgr.spawnRuntime >>= { _.captureResults(hash, deployNoRes) } } yield res ) - .runSyncUnsafe(10.seconds) + .unsafeRunSync noResults.isEmpty should be(true) @@ -500,19 +499,22 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { } "captureResult" should "throw error if execution fails" in { - val term = s""" new return in { return.undefined() } """ - val deploy = ConstructDeploy.sourceDeploy(term, timestamp = 0) + val buggyTerm = s""" new @return in { return.undefined() } """ + val deploy = ConstructDeploy.sourceDeploy(buggyTerm, timestamp = 0) val task = runtimeManagerResource .use( mgr => for { - hash <- RuntimeManager.emptyStateHashFixed.pure[Task] + hash <- RuntimeManager.emptyStateHashFixed.pure[IO] res <- mgr.spawnRuntime >>= { _.captureResults(hash, deploy) } } yield res ) - Await.result(task.failed.runToFuture, 1.seconds) shouldBe a[BugFoundError] + task.handleErrorWith { + case _: BugFoundError => true.pure[IO] + case _ => false.pure[IO] + }.unsafeRunSync shouldBe true } "emptyStateHash" should "not remember previous hot store state" in effectTest { @@ -520,11 +522,11 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { val term = ConstructDeploy.basicDeployData[Id](0) - def run: Task[StateHash] = + def run: IO[StateHash] = runtimeManagerResource .use { m => for { - hash <- RuntimeManager.emptyStateHashFixed.pure[Task] + hash <- RuntimeManager.emptyStateHashFixed.pure[IO] afterHash <- computeState(m, term, genesis.postStateHash) .map(_ => hash) } yield afterHash @@ -754,7 +756,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { } } - private def invalidReplay(source: String): Task[Either[ReplayFailure, StateHash]] = + private def invalidReplay(source: String): IO[Either[ReplayFailure, StateHash]] = runtimeManagerResource.use { runtimeManager => for { deploy <- ConstructDeploy.sourceDeployNowF(source, phloLimit = 10000) diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala index 12cf34e6b47..43dd4e1581b 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala @@ -1,5 +1,7 @@ package coop.rchain.casper.rholang +import cats.effect.IO +import cats.effect.testing.scalatest.AsyncIOSpec import coop.rchain.casper.genesis.Genesis import coop.rchain.casper.syntax._ import coop.rchain.models.syntax._ @@ -11,90 +13,92 @@ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.shared.Log import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers +import coop.rchain.shared.RChainScheduler._ -class RuntimeSpec extends AsyncFlatSpec with MonixTaskTest with Matchers { - - "emptyStateHash" should "be the same as hard-coded cached value" in { - implicit val log: Log[Task] = new Log.NOPLog[Task] - implicit val span: Span[Task] = new NoopSpan[Task] - implicit val metrics: Metrics[Task] = new MetricsNOP[Task] - - val kvm = InMemoryStoreManager[Task]() - - val dummyShardId = "dummy" - for { - store <- kvm.rSpaceStores - runtime <- RhoRuntime.createRuntime( - store, - BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId) - ) - - /** - * Root hashes compatible with RChain main net network - */ - // Par() - without bootstrap AST - // 03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314 - - // Par(sends = Seq(Send())) - // 1325a42070be0ac7c44c5c345c5f7512379618d5db57ad76a871d4f34051e05c - - // Par(receives = Seq(Receive(binds = Seq(ReceiveBind())))) - // 2a5adf05eb519bd0858414e5a4b31a8e22fd64e5203fae4e1ec8f9b1b5113ff0 - - hardCodedHash = RuntimeManager.emptyStateHashFixed - emptyRootHash <- runtime.emptyStateHash - - emptyHashHardCoded = hardCodedHash.toBlake2b256Hash - emptyHash = emptyRootHash.toBlake2b256Hash - } yield emptyHashHardCoded shouldEqual emptyHash - } - - "stateHash after fixed rholang term execution " should "be hash fixed without hard fork" in { - implicit val metricsEff: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val logger: Log[Task] = Log.log[Task] - val kvm = InMemoryStoreManager[Task]() - val dummyShardId = "dummy" - - // fixed term , if the term changed, it is possible that the stateHash also changed. - val contract = - """ - | new a in { - | @"2"!(10)| - | @2!("test")| - | @"3"!!(3)| - | @42!!("1")| - | for (@t <- a){Nil}| - | for (@num <- @"3"&@num2 <- @1){10}| - | for (@_ <= @"4"){"3"}| - | for (@_ <= @"5"& @num3 <= @5){Nil}| - | for (@3 <- @44){new g in {Nil}}| - | for (@_ <- @"55"& @num3 <- @55){Nil} - | } - |""".stripMargin - - // random seed should be always to the same to make sure everything is the same - val random = - Tools.rng(Blake2b256Hash.create(Array[Byte](1)).toByteString.toByteArray) - - for { - store <- kvm.rSpaceStores - runtime <- RhoRuntime.createRuntime( - store, - BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId) - ) - r <- runtime.evaluate(contract, Cost.UNSAFE_MAX, Map.empty, random) - _ = r.errors should be(Vector.empty) - checkpoint <- runtime.createCheckpoint - expectedHash = Blake2b256Hash.fromHex( - "10cce029738696f1e120a6bad4bdf3f18adca25ccf36133bd4916f607a6a50c0" - ) - stateHash = checkpoint.root - } yield expectedHash shouldEqual stateHash - } - -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class RuntimeSpec extends AsyncFlatSpec with AsyncIOSpec with Matchers { +// +// "emptyStateHash" should "be the same as hard-coded cached value" in { +// implicit val log: Log[IO] = new Log.NOPLog[IO] +// implicit val span: Span[IO] = new NoopSpan[IO] +// implicit val metrics: Metrics[IO] = new MetricsNOP[IO] +// +// val kvm = InMemoryStoreManager[IO]() +// +// val dummyShardId = "dummy" +// for { +// store <- kvm.rSpaceStores +// runtime <- RhoRuntime.createRuntime( +// store, +// BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId), +// rholangEC +// ) +// +// /** +// * Root hashes compatible with RChain main net network +// */ +// // Par() - without bootstrap AST +// // 03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314 +// +// // Par(sends = Seq(Send())) +// // 1325a42070be0ac7c44c5c345c5f7512379618d5db57ad76a871d4f34051e05c +// +// // Par(receives = Seq(Receive(binds = Seq(ReceiveBind())))) +// // 2a5adf05eb519bd0858414e5a4b31a8e22fd64e5203fae4e1ec8f9b1b5113ff0 +// +// hardCodedHash = RuntimeManager.emptyStateHashFixed +// emptyRootHash <- runtime.emptyStateHash +// +// emptyHashHardCoded = hardCodedHash.toBlake2b256Hash +// emptyHash = emptyRootHash.toBlake2b256Hash +// } yield emptyHashHardCoded shouldEqual emptyHash +// } +// +// "stateHash after fixed rholang term execution " should "be hash fixed without hard fork" in { +// implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] +// implicit val noopSpan: Span[IO] = NoopSpan[IO]() +// implicit val logger: Log[IO] = Log.log[IO] +// val kvm = InMemoryStoreManager[IO]() +// val dummyShardId = "dummy" +// +// // fixed term , if the term changed, it is possible that the stateHash also changed. +// val contract = +// """ +// | new a in { +// | @"2"!(10)| +// | @2!("test")| +// | @"3"!!(3)| +// | @42!!("1")| +// | for (@t <- a){Nil}| +// | for (@num <- @"3"&@num2 <- @1){10}| +// | for (@_ <= @"4"){"3"}| +// | for (@_ <= @"5"& @num3 <= @5){Nil}| +// | for (@3 <- @44){new g in {Nil}}| +// | for (@_ <- @"55"& @num3 <- @55){Nil} +// | } +// |""".stripMargin +// +// // random seed should be always to the same to make sure everything is the same +// val random = +// Tools.rng(Blake2b256Hash.create(Array[Byte](1)).toByteString.toByteArray) +// +// for { +// store <- kvm.rSpaceStores +// runtime <- RhoRuntime.createRuntime( +// store, +// BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId), +// rholangEC +// ) +// r <- runtime.evaluate(contract, Cost.UNSAFE_MAX, Map.empty, random) +// _ = r.errors should be(Vector.empty) +// checkpoint <- runtime.createCheckpoint +// expectedHash = Blake2b256Hash.fromHex( +// "10cce029738696f1e120a6bad4bdf3f18adca25ccf36133bd4916f607a6a50c0" +// ) +// stateHash = checkpoint.root +// } yield expectedHash shouldEqual stateHash +// } +// +//} diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala index ee92c761fdb..39041a6b89d 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.sync +import cats.effect.IO import cats.effect.concurrent.Ref import com.google.protobuf.ByteString import coop.rchain.casper.blocks.BlockRetriever @@ -15,12 +16,12 @@ import coop.rchain.comm.{Endpoint, NodeIdentifier, PeerNode} import coop.rchain.metrics.Metrics import coop.rchain.models.BlockHash.BlockHash import coop.rchain.p2p.EffectsTestInstances.{createRPConfAsk, LogStub, TransportLayerStub} -import monix.eval.Task -import monix.execution.schedulers.TestScheduler +import coop.rchain.shared.Log import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers +import java.util.concurrent.TimeUnit import scala.concurrent.duration._ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { @@ -30,17 +31,18 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi val hash = ByteString.copyFrom("newHash", "utf-8") val timeout: FiniteDuration = 240.seconds val local: PeerNode = peerNode("src", 40400) + import coop.rchain.shared.RChainScheduler._ - implicit val log = new LogStub[Task] - implicit val metrics = new Metrics.MetricsNOP[Task] - implicit val currentRequests: RequestedBlocks[Task] = - Ref.unsafe[Task, Map[BlockHash, RequestState]](Map.empty[BlockHash, RequestState]) - implicit val connectionsCell: ConnectionsCell[Task] = - Ref.unsafe[Task, Connections](List(local)) - implicit val transportLayer = new TransportLayerStub[Task] - implicit val rpConf = createRPConfAsk[Task](local) - implicit val commUtil = CommUtil.of[Task] - implicit val blockRetriever = BlockRetriever.of[Task] + implicit val log: Log[IO] = new LogStub + implicit val metrics = new Metrics.MetricsNOP[IO] + implicit val currentRequests: RequestedBlocks[IO] = + Ref.unsafe[IO, Map[BlockHash, RequestState]](Map.empty[BlockHash, RequestState]) + implicit val connectionsCell: ConnectionsCell[IO] = + Ref.unsafe[IO, Connections](List(local)) + implicit val transportLayer = new TransportLayerStub[IO] + implicit val rpConf = createRPConfAsk[IO](local) + implicit val commUtil = CommUtil.of[IO] + implicit val blockRetriever = BlockRetriever.of[IO] val networkId = "nid" val conf = RPConf(local, networkId, null, null, 0, null) @@ -58,29 +60,26 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi private def alwaysSuccess: PeerNode => Protocol => CommErr[Unit] = kp(kp(Right(()))) - // Instance of testing ExecutionContext (Scheduler) - implicit val ec = TestScheduler() - - private def timedOut: Long = -2 * timeout.toMillis - private def notTimedOut: Long = -1 + private def timedOut: Long = -2 * timeout.toMillis +// private def notTimedOut: Long = -1 override def beforeEach(): Unit = { transportLayer.reset() transportLayer.setResponses(alwaysSuccess) - log.reset() - currentRequests.set(Map.empty).runSyncUnsafe() + currentRequests.set(Map.empty).unsafeRunSync } describe("Running") { describe("maintainRequestedBlocks, for every block that was requested") { describe("if block request is still within a timeout") { it("should keep the request not touch") { - val requested = RequestState(timestamp = notTimedOut) - currentRequests.set(Map(hash -> requested)).runSyncUnsafe() + val requested = + RequestState(timestamp = timer.clock.realTime(TimeUnit.MILLISECONDS).unsafeRunSync) + currentRequests.set(Map(hash -> requested)).unsafeRunSync // when - blockRetriever.requestAll(timeout).runSyncUnsafe() + blockRetriever.requestAll(timeout).unsafeRunSync // then - val requestedBlocksMapAfter = currentRequests.get.runSyncUnsafe() + val requestedBlocksMapAfter = currentRequests.get.unsafeRunSync requestedBlocksMapAfter.size should be(1) } } @@ -94,9 +93,9 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi peers = Set(peerNode("peer")), waitingList = waitingList ) - currentRequests.set(Map(hash -> requested)).runSyncUnsafe() + currentRequests.set(Map(hash -> requested)).unsafeRunSync // when - blockRetriever.requestAll(timeout).runSyncUnsafe() + blockRetriever.requestAll(timeout).unsafeRunSync // then val (recipient, msg) = transportLayer.getRequest(0) toBlockRequest(msg).hash should be(hash) @@ -111,27 +110,30 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi peers = Set(peerNode("peer")), waitingList = waitingList ) - currentRequests.set(Map(hash -> requested)).runSyncUnsafe() + currentRequests.set(Map(hash -> requested)).unsafeRunSync // when - blockRetriever.requestAll(timeout).runSyncUnsafe() + blockRetriever.requestAll(timeout).unsafeRunSync // then - val Some(requestedAfter) = currentRequests.get.runSyncUnsafe().get(hash) + val Some(requestedAfter) = currentRequests.get.unsafeRunSync.get(hash) requestedAfter.waitingList shouldBe List(peerNode("waiting2")) requestedAfter.peers shouldBe Set(peerNode("peer"), peerNode("waiting1")) } it("timestamp is reset") { val waitingList = List(peerNode("waiting1"), peerNode("waiting2")) + val initTime = timer.clock + .realTime(TimeUnit.MILLISECONDS) + .unsafeRunSync val requested = RequestState( - timestamp = timedOut, + timestamp = initTime - timeout.toMillis - 1, peers = Set(peerNode("peer")), waitingList = waitingList ) - currentRequests.set(Map(hash -> requested)).runSyncUnsafe() + currentRequests.set(Map(hash -> requested)).unsafeRunSync // when - blockRetriever.requestAll(timeout).runSyncUnsafe() + blockRetriever.requestAll(timeout).unsafeRunSync // then - val Some(requestedAfter) = currentRequests.get.runSyncUnsafe().get(hash) - requestedAfter.timestamp shouldBe 0 + val Some(requestedAfter) = currentRequests.get.unsafeRunSync.get(hash) + requestedAfter.timestamp != initTime shouldBe true } } describe("if waiting list has no peers left") { @@ -143,9 +145,9 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi peers = Set(peerNode("peer")), waitingList = waitingList ) - currentRequests.set(Map(hash -> requested)).runSyncUnsafe() + currentRequests.set(Map(hash -> requested)).unsafeRunSync // when - blockRetriever.requestAll(timeout).runSyncUnsafe() + blockRetriever.requestAll(timeout).unsafeRunSync // then val (_, msg) = transportLayer.getRequest(0) toHasBlockRequest(msg).hash should be(hash) @@ -162,11 +164,11 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi peers = Set(peerNode("peer")), waitingList = waitingList ) - currentRequests.set(Map(hash -> requested)).runSyncUnsafe() + currentRequests.set(Map(hash -> requested)).unsafeRunSync // when - blockRetriever.requestAll(timeout).runSyncUnsafe() + blockRetriever.requestAll(timeout).unsafeRunSync // then - val requestedBlocksMapAfter = currentRequests.get.runSyncUnsafe() + val requestedBlocksMapAfter = currentRequests.get.unsafeRunSync requestedBlocksMapAfter.size should be(0) } } diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala index fe383f8be7c..f56f6af8ce5 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.sync +import cats.effect.IO import cats.effect.concurrent.Ref import com.google.protobuf.ByteString import coop.rchain.casper.blocks.BlockRetriever @@ -11,9 +12,7 @@ import coop.rchain.comm.{Endpoint, NodeIdentifier, PeerNode} import coop.rchain.metrics.Metrics import coop.rchain.models.BlockHash.BlockHash import coop.rchain.p2p.EffectsTestInstances.{createRPConfAsk, LogStub, TransportLayerStub} -import coop.rchain.shared.Time -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global +import coop.rchain.shared.{Log, Time} import org.scalatest.BeforeAndAfterEach import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers @@ -31,36 +30,37 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher val peer: PeerNode = peerNode("peer", 40400) val secondPeer: PeerNode = peerNode("secondPeer", 40400) - implicit val log = new LogStub[Task] - implicit val metrics = new Metrics.MetricsNOP[Task] - implicit val currentRequests: RequestedBlocks[Task] = - Ref.unsafe[Task, Map[BlockHash, RequestState]](Map.empty[BlockHash, RequestState]) - implicit val connectionsCell: ConnectionsCell[Task] = - Ref.unsafe[Task, Connections](List(local)) - implicit val transportLayer = new TransportLayerStub[Task] - implicit val rpConf = createRPConfAsk[Task](local) - implicit val time = Time.fromTimer[Task] - implicit val commUtil = CommUtil.of[Task] - implicit val blockRetriever = BlockRetriever.of[Task] + implicit val log: Log[IO] = new LogStub + implicit val metrics = new Metrics.MetricsNOP[IO] + implicit val currentRequests: RequestedBlocks[IO] = + Ref.unsafe[IO, Map[BlockHash, RequestState]](Map.empty[BlockHash, RequestState]) + implicit val connectionsCell: ConnectionsCell[IO] = + Ref.unsafe[IO, Connections](List(local)) + implicit val transportLayer = new TransportLayerStub[IO] + implicit val rpConf = createRPConfAsk[IO](local) + import coop.rchain.shared.RChainScheduler._ + implicit val time = Time.fromTimer[IO] + implicit val commUtil = CommUtil.of[IO] + implicit val blockRetriever = BlockRetriever.of[IO] override def beforeEach(): Unit = { transportLayer.reset() transportLayer.setResponses(_ => p => Right(())) - currentRequests.set(Map.empty).runSyncUnsafe() + currentRequests.set(Map.empty).unsafeRunSync } describe("BlockRetriever admitting hash") { describe("when hash is unknown") { it("should add record for hash") { - blockRetriever.admitHash(hash, peer = None, admitHashReason = testReason).runSyncUnsafe() - val requests = currentRequests.get.runSyncUnsafe() + blockRetriever.admitHash(hash, peer = None, admitHashReason = testReason).unsafeRunSync + val requests = currentRequests.get.unsafeRunSync requests.contains(hash) should be(true) } describe("when source peer is unknown") { it("should broadcast HasBlockRequest and only HasBlockRequest") { - blockRetriever.admitHash(hash, peer = None, admitHashReason = testReason).runSyncUnsafe() + blockRetriever.admitHash(hash, peer = None, admitHashReason = testReason).unsafeRunSync val (_, msg) = transportLayer.getRequest(0) val hbr = HasBlockRequest.from( convert[PacketTypeTag.HasBlockRequest.type](toPacket(msg).right.get).get @@ -72,7 +72,7 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher describe("when source peer is known") { it("should send BlockRequest and only BlockRequest") { - blockRetriever.admitHash(hash, Some(peer), admitHashReason = testReason).runSyncUnsafe() + blockRetriever.admitHash(hash, Some(peer), admitHashReason = testReason).unsafeRunSync val (recipient, msg) = transportLayer.getRequest(0) val br = BlockRequest.from( convert[PacketTypeTag.BlockRequest.type](toPacket(msg).right.get).get @@ -91,10 +91,10 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher it("should ignore hash") { blockRetriever .admitHash(hash, peer = None, admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync val status = blockRetriever .admitHash(hash, peer = None, admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync status.status equals BlockRetriever.Ignore should be(true) } } @@ -103,7 +103,7 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher it("should request block from peer if sources list was empty") { blockRetriever .admitHash(hash, peer = Some(peer), admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync val (recipient, msg) = transportLayer.getRequest(0) val br = BlockRequest.from( convert[PacketTypeTag.BlockRequest.type](toPacket(msg).right.get).get @@ -116,21 +116,21 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher it("should ignore hash if peer is already in sources list") { blockRetriever .admitHash(hash, peer = Some(peer), admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync val status = blockRetriever .admitHash(hash, peer = Some(peer), admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync status.status equals BlockRetriever.Ignore should be(true) } it("should add peer to sources list if it is absent") { blockRetriever .admitHash(hash, peer = Some(peer), admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync blockRetriever .admitHash(hash, peer = Some(secondPeer), admitHashReason = testReason) - .runSyncUnsafe() - val requests = currentRequests.get.runSyncUnsafe() + .unsafeRunSync + val requests = currentRequests.get.unsafeRunSync val peerSize = requests(hash).waitingList.size peerSize should be(2) } @@ -138,10 +138,10 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher it("should NOT request for block from peer if sources list was not empty") { blockRetriever .admitHash(hash, peer = Some(peer), admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync blockRetriever .admitHash(hash, peer = Some(secondPeer), admitHashReason = testReason) - .runSyncUnsafe() + .unsafeRunSync transportLayer.requests.size should be(1) } } diff --git a/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala b/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala index f0c7bea85eb..47c324b29d8 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.util +import cats.effect.IO import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.syntax._ @@ -19,7 +20,6 @@ import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.rspace.syntax._ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ -import monix.eval.Task import java.nio.file.{Files, Path} import scala.collection.compat.immutable.LazyList @@ -163,36 +163,36 @@ object GenesisBuilder { val (validavalidatorKeyPairs, genesisVaults, genesisParameters) = parameters val storageDirectory = Files.createTempDirectory(s"hash-set-casper-test-genesis-") - implicit val log: Log.NOPLog[Task] = new Log.NOPLog[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val spanEff = NoopSpan[Task]() + implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val spanEff = NoopSpan[IO]() - implicit val scheduler = monix.execution.Scheduler.Implicits.global + import coop.rchain.shared.RChainScheduler._ (for { - kvsManager <- mkTestRNodeStoreManager[Task](storageDirectory) + kvsManager <- mkTestRNodeStoreManager[IO](storageDirectory) rStore <- kvsManager.rSpaceStores mStore <- RuntimeManager.mergeableStore(kvsManager) - t = RuntimeManager.noOpExecutionTracker[Task] + t = RuntimeManager.noOpExecutionTracker[IO] runtimeManager <- RuntimeManager( rStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName(parameters._3.shardId), - t + t, + rholangEC ) // First bonded validator is the creator creator = ValidatorIdentity(parameters._1.head._1) genesis <- { implicit val rm = runtimeManager - Genesis.createGenesisBlock[Task](creator, genesisParameters) + Genesis.createGenesisBlock[IO](creator, genesisParameters) } - blockStore <- BlockStore[Task](kvsManager) + blockStore <- BlockStore[IO](kvsManager) _ <- blockStore.put(genesis.blockHash, genesis) - blockDagStorage <- BlockDagKeyValueStorage.create[Task](kvsManager) + blockDagStorage <- BlockDagKeyValueStorage.create[IO](kvsManager) // Add genesis block to DAG _ <- blockDagStorage.insertGenesis(genesis) - } yield GenesisContext(genesis, validavalidatorKeyPairs, genesisVaults, storageDirectory)) - .runSyncUnsafe() + } yield GenesisContext(genesis, validavalidatorKeyPairs, genesisVaults, storageDirectory)).unsafeRunSync } case class GenesisContext( diff --git a/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala b/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala index e08f0b9d30c..45036f351cd 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala @@ -14,8 +14,6 @@ import coop.rchain.metrics.Metrics.MetricsNOP import coop.rchain.models.BlockHash.BlockHash import coop.rchain.p2p.EffectsTestInstances.{LogStub, LogicalTime, TransportLayerStub} import coop.rchain.shared._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.BeforeAndAfterEach import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers @@ -44,9 +42,9 @@ class CommUtilSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { ) implicit val requestedBlocks = initRequestedBlocks() implicit val connectionsCell = initConnectionsCell(connections = peers) - implicit val commUtil = CommUtil.of[Task] + implicit val commUtil = CommUtil.of[IO] // when - CommUtil[Task].sendBlockRequest(hash).runSyncUnsafe() + CommUtil[IO].sendBlockRequest(hash).unsafeRunSync // then val requested = transport.requests .map(_.msg) @@ -66,9 +64,9 @@ class CommUtilSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { ) implicit val requestedBlocks = initRequestedBlocks() implicit val connectionsCell = initConnectionsCell(connections = peers) - implicit val commUtil = CommUtil.of[Task] + implicit val commUtil = CommUtil.of[IO] // when - CommUtil[Task].sendBlockRequest(hash).runSyncUnsafe() + CommUtil[IO].sendBlockRequest(hash).unsafeRunSync // then log.infos contains (s"Requested missing block ${PrettyPrinter.buildString(hash)} from peers") } @@ -76,11 +74,11 @@ class CommUtilSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { // given implicit val requestedBlocks = initRequestedBlocks() implicit val connectionsCell = initConnectionsCell() - implicit val commUtil = CommUtil.of[Task] + implicit val commUtil = CommUtil.of[IO] // when - CommUtil[Task].sendBlockRequest(hash).runSyncUnsafe() + CommUtil[IO].sendBlockRequest(hash).unsafeRunSync // then - requestedBlocks.read.runSyncUnsafe().contains(hash) should be(true) + requestedBlocks.read.unsafeRunSync.contains(hash) should be(true) } } describe("if given block was already requested") { @@ -97,9 +95,9 @@ class CommUtilSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { ) implicit val requestedBlocks = initRequestedBlocks(init = requestedBefore) implicit val connectionsCell = initConnectionsCell(connections = peers) - implicit val commUtil = CommUtil.of[Task] + implicit val commUtil = CommUtil.of[IO] // when - CommUtil[Task].sendBlockRequest(hash).runSyncUnsafe() + CommUtil[IO].sendBlockRequest(hash).unsafeRunSync // then transport.requests.size shouldBe 0 log.infos.size shouldBe 0 @@ -113,18 +111,18 @@ class CommUtilSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { val maxNoConnections = 10 val conf = RPConf(local, networkId, null, null, maxNoConnections, null) - implicit val transport = new TransportLayerStub[Task] - implicit val askConf = new ConstApplicativeAsk[Task, RPConf](conf) - implicit val log = new LogStub[Task] - implicit val time = new LogicalTime[Task] - implicit val metrics = new MetricsNOP[Task] + implicit val transport = new TransportLayerStub[IO] + implicit val askConf = new ConstApplicativeAsk[IO, RPConf](conf) + implicit val log = new LogStub[IO] + implicit val time = new LogicalTime[IO] + implicit val metrics = new MetricsNOP[IO] private def initRequestedBlocks( init: Map[BlockHash, Requested] = Map.empty - ): RequestedBlocks[Task] = - Cell.unsafe[Task, Map[BlockHash, Running.Requested]](init) + ): RequestedBlocks[IO] = + Cell.unsafe[IO, Map[BlockHash, Running.Requested]](init) private def initConnectionsCell(connections: Connections = List.empty) = - Cell.unsafe[Task, Connections](connections) + Cell.unsafe[IO, Connections](connections) private def endpoint(port: Int): Endpoint = Endpoint("host", port, port) private def peerNode(name: String, port: Int): PeerNode = PeerNode(NodeIdentifier(name.getBytes), endpoint(port)) diff --git a/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala b/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala index 41d3ad46427..482fbf97b60 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala @@ -10,6 +10,7 @@ import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.rp.ProtocolHelper.protocol import coop.rchain.comm.transport._ import coop.rchain.comm.{CommError, PeerNode} +import io.grpc.Server import scala.collection.immutable.Queue import scala.concurrent.duration.FiniteDuration diff --git a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala index 997c7e01ff4..e022c3f9075 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.util.scalatest +import cats.effect.{ContextShift, IO} import fs2.Stream -import monix.eval.Task import org.scalatest.matchers.{MatchResult, Matcher} import java.util.concurrent.TimeoutException @@ -14,11 +14,11 @@ trait Fs2StreamMatchers { * * @param timeout duration to wait for new elements */ - class EmptyMatcher[A](timeout: FiniteDuration) extends Matcher[Stream[Task, A]] { - import monix.execution.Scheduler.Implicits.global + class EmptyMatcher[A](timeout: FiniteDuration) extends Matcher[Stream[IO, A]] { + import coop.rchain.shared.RChainScheduler._ - def apply(left: Stream[Task, A]) = { - val res = left.take(1).timeout(timeout).compile.toList.attempt.runSyncUnsafe() + def apply(left: Stream[IO, A]) = { + val res = left.take(1).timeout(timeout).compile.toList.attempt.unsafeRunSync val isEmpty = res.isLeft && res.left.get.isInstanceOf[TimeoutException] diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala index 9d1dd180b46..064189cabc2 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala @@ -8,20 +8,17 @@ import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.metrics.Metrics import coop.rchain.metrics.implicits.MetricsSyntaxConversion -import coop.rchain.monix.Monixable import coop.rchain.shared.syntax._ import io.grpc._ import io.grpc.netty._ -import monix.eval.Task -import monix.execution.Scheduler - +import scala.concurrent.ExecutionContext import scala.concurrent.duration._ class GrpcKademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( networkId: String, - timeout: FiniteDuration -)(implicit scheduler: Scheduler) - extends KademliaRPC[F] { + timeout: FiniteDuration, + grpcEC: ExecutionContext +) extends KademliaRPC[F] { implicit private val metricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "discovery.kademlia.grpc") @@ -70,7 +67,7 @@ class GrpcKademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( NettyChannelBuilder .forAddress(peer.endpoint.host, peer.endpoint.udpPort) .idleTimeout(timeout.toMillis, MILLISECONDS) - .executor(scheduler) + .executor(grpcEC.execute) .usePlaintext() .build() } diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala index bc3dba4d0cf..ee2ceaf0fe3 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPCServer.scala @@ -3,10 +3,9 @@ package coop.rchain.comm.discovery import cats.effect.Sync import cats.syntax.all._ import coop.rchain.comm.PeerNode -import coop.rchain.monix.Monixable import io.grpc.Metadata -class GrpcKademliaRPCServer[F[_]: Monixable: Sync]( +class GrpcKademliaRPCServer[F[_]: Sync]( networkId: String, pingHandler: PeerNode => F[Unit], lookupHandler: (PeerNode, Array[Byte]) => F[Seq[PeerNode]] diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala index 99a7615fc6a..9ed230e31cd 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala @@ -3,26 +3,25 @@ package coop.rchain.comm import cats.effect.{ConcurrentEffect, Resource, Sync} import com.google.protobuf.ByteString import coop.rchain.metrics.Metrics -import coop.rchain.monix.Monixable import coop.rchain.sdk.syntax.all._ import io.grpc import io.grpc.netty.NettyServerBuilder -import monix.execution.Scheduler +import scala.concurrent.ExecutionContext package object discovery { val DiscoveryMetricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "discovery.kademlia") - def acquireKademliaRPCServer[F[_]: Monixable: Sync: ConcurrentEffect]( + def acquireKademliaRPCServer[F[_]: Sync: ConcurrentEffect]( networkId: String, port: Int, pingHandler: PeerNode => F[Unit], lookupHandler: (PeerNode, Array[Byte]) => F[Seq[PeerNode]], - grpcScheduler: Scheduler + grpcEC: ExecutionContext ): Resource[F, grpc.Server] = { val server = NettyServerBuilder .forPort(port) - .executor(grpcScheduler) + .executor(grpcEC.execute) .addService( KademliaRPCServiceFs2Grpc .bindService(new GrpcKademliaRPCServer(networkId, pingHandler, lookupHandler)) diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala index 4915e198498..7ce4a84e25b 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala @@ -7,7 +7,6 @@ import coop.rchain.comm._ import coop.rchain.comm.protocol.routing._ import coop.rchain.metrics.Metrics import coop.rchain.metrics.implicits._ -import coop.rchain.monix.Monixable import io.grpc.{Metadata, Status, StatusRuntimeException} import fs2.Stream @@ -83,7 +82,7 @@ object GrpcTransport { case e => protocolException(e) } - def send[F[_]: Monixable: Sync]( + def send[F[_]: Sync]( transport: TransportLayerFs2Grpc[F, Metadata], peer: PeerNode, msg: Protocol @@ -99,7 +98,7 @@ object GrpcTransport { .map(processResponse(peer, _)) } yield result - def stream[F[_]: Monixable: Sync]( + def stream[F[_]: Sync]( transport: TransportLayerFs2Grpc[F, Metadata], peer: PeerNode, networkId: String, diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala index 254aabf5d10..70473951bd7 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala @@ -9,22 +9,18 @@ import coop.rchain.comm.CommError.{protocolException, CommErr} import coop.rchain.comm._ import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.transport.StreamObservable.StreamObservable -import coop.rchain.grpc.implicits._ import coop.rchain.metrics.Metrics -import coop.rchain.monix.Monixable import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream -import fs2.concurrent.{Signal, SignallingRef} -import io.grpc.{CallOptions, ManagedChannel, Metadata} +import fs2.concurrent.SignallingRef import io.grpc.netty._ +import io.grpc.{CallOptions, ManagedChannel, Metadata} import io.netty.handler.ssl.SslContext -import monix.eval.Task -import monix.execution.Ack.Continue -import monix.execution.{Cancelable, CancelableFuture, Scheduler} import java.io.ByteArrayInputStream import scala.collection.concurrent.TrieMap +import scala.concurrent.ExecutionContext import scala.concurrent.duration.{FiniteDuration, _} import scala.util._ @@ -39,19 +35,21 @@ final case class BufferedGrpcStreamChannel[F[_]]( buferSubscriber: Stream[F, Unit] ) -class GrpcTransportClient[F[_]: Monixable: Concurrent: ConcurrentEffect: Log: Metrics]( +class GrpcTransportClient[F[_]: Concurrent: ConcurrentEffect: Log: Metrics]( networkId: String, cert: String, key: String, maxMessageSize: Int, packetChunkSize: Int, clientQueueSize: Int, - channelsMap: Ref[F, Map[PeerNode, Deferred[F, BufferedGrpcStreamChannel[F]]]], - ioScheduler: Scheduler + channelsMap: Ref[F, Map[PeerNode, Deferred[F, BufferedGrpcStreamChannel[F]]]] ) extends TransportLayer[F] { val DefaultSendTimeout: FiniteDuration = 5.seconds + import coop.rchain.shared.RChainScheduler.ioScheduler + val ioEC = ExecutionContext.fromExecutorService(ioScheduler) + implicit val metricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "rp.transport") @@ -81,7 +79,7 @@ class GrpcTransportClient[F[_]: Monixable: Concurrent: ConcurrentEffect: Log: Me clientSslContext <- clientSslContextTask grpcChannel = NettyChannelBuilder .forAddress(peer.endpoint.host, peer.endpoint.tcpPort) - .executor(ioScheduler) + .executor(ioEC) .maxInboundMessageSize(maxMessageSize) .negotiationType(NegotiationType.TLS) .sslContext(clientSslContext) diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala index 7e65962618e..6a5fefa0c5e 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.transport import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, Sync, ConcurrentEffect, Timer} +import cats.effect.{Concurrent, ConcurrentEffect, ContextShift, Resource, Sync, Timer} import cats.syntax.all._ import cats.effect.syntax.all._ import coop.rchain.comm.protocol.routing._ @@ -9,17 +9,17 @@ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.comm.rp.ProtocolHelper import coop.rchain.comm.{CommMetricsSource, PeerNode} import coop.rchain.metrics.Metrics -import coop.rchain.monix.Monixable import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream -import io.grpc.Metadata +import io.grpc.{Metadata, Server} import fs2.concurrent.Queue import io.grpc.netty.NettyServerBuilder import io.netty.handler.ssl.SslContext -import monix.execution.{Cancelable, Scheduler} +import io.netty.internal.tcnative.AsyncTask import scala.collection.concurrent.TrieMap +import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt object GrpcTransportReceiver { @@ -30,7 +30,7 @@ object GrpcTransportReceiver { type MessageBuffers[F[_]] = (Send => F[Boolean], StreamMessage => F[Boolean], Stream[F, Unit]) type MessageHandlers[F[_]] = (Send => F[Unit], StreamMessage => F[Unit]) - def create[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( + def create[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( networkId: String, port: Int, serverSslContext: SslContext, @@ -40,7 +40,7 @@ object GrpcTransportReceiver { messageHandlers: MessageHandlers[F], parallelism: Int, cache: TrieMap[String, Array[Byte]] - )(implicit mainScheduler: Scheduler): F[Cancelable] = { + ): Resource[F, Unit] = { val service = new TransportLayerFs2Grpc[F, Metadata] { @@ -161,16 +161,18 @@ object GrpcTransportReceiver { ) } + import coop.rchain.shared.RChainScheduler.mainEC val server = NettyServerBuilder .forPort(port) - .executor(mainScheduler) + .executor(mainEC.execute) .maxInboundMessageSize(maxMessageSize) .sslContext(serverSslContext) .addService(TransportLayerFs2Grpc.bindService(service)) .intercept(new SslSessionServerInterceptor(networkId)) .build - .start - Cancelable(() => server.shutdown().awaitTermination()).pure[F] + val startF = Sync[F].delay(server.start()) + val stopF = Sync[F].delay(server.shutdown().awaitTermination()) + Resource.make(startF)(_ => stopF).map(_ => ()) } } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala index 90ce55d966a..21d041da894 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.transport import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync, Timer} +import cats.effect.{Concurrent, ConcurrentEffect, ContextShift, Resource, Sync, Timer} import cats.syntax.all._ import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.comm.protocol.routing.Protocol @@ -9,11 +9,10 @@ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.comm.transport.GrpcTransportReceiver.MessageBuffers import coop.rchain.comm.{CommMetricsSource, PeerNode} import coop.rchain.metrics.Metrics -import coop.rchain.monix.Monixable import coop.rchain.shared._ +import io.grpc.Server import io.grpc.netty.GrpcSslContexts import io.netty.handler.ssl._ -import monix.execution.Scheduler import java.io.ByteArrayInputStream import java.nio.file.Path @@ -39,7 +38,7 @@ object TransportLayerServer { } } -class GrpcTransportServer[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( +class GrpcTransportServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( networkId: String, port: Int, cert: String, @@ -47,8 +46,7 @@ class GrpcTransportServer[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfA maxMessageSize: Int, maxStreamMessageSize: Long, parallelism: Int -)(implicit mainScheduler: Scheduler) - extends TransportLayerServer[F] { +) extends TransportLayerServer[F] { private def certInputStream = new ByteArrayInputStream(cert.getBytes()) private def keyInputStream = new ByteArrayInputStream(key.getBytes()) @@ -87,29 +85,30 @@ class GrpcTransportServer[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfA handleStreamed(blob) }) >> Metrics[F].incrementCounter("dispatched.packets") - val cancelable = for { - serverSslContext <- serverSslContextTask - messageBuffers <- Ref.of[F, Map[PeerNode, Deferred[F, MessageBuffers[F]]]](Map.empty) - receiver <- GrpcTransportReceiver.create( - networkId: String, - port, - serverSslContext, - maxMessageSize, - maxStreamMessageSize, - messageBuffers, - (dispatchSend, dispatchBlob), - parallelism = parallelism, - cache - ) - } yield receiver - - Resource.make(cancelable)(c => Sync[F].delay(c.cancel())).as(()) + Resource + .eval( + (serverSslContextTask, Ref.of[F, Map[PeerNode, Deferred[F, MessageBuffers[F]]]](Map.empty)).tupled + ) + .flatMap { + case (serverSslContext, messageBuffers) => + GrpcTransportReceiver.create( + networkId: String, + port, + serverSslContext, + maxMessageSize, + maxStreamMessageSize, + messageBuffers, + (dispatchSend, dispatchBlob), + parallelism = parallelism, + cache + ) + } } } object GrpcTransportServer { - def acquireServer[F[_]: Monixable: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( + def acquireServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( networkId: String, port: Int, certPath: Path, @@ -117,12 +116,10 @@ object GrpcTransportServer { maxMessageSize: Int, maxStreamMessageSize: Long, parallelism: Int - )( - implicit mainScheduler: Scheduler ): TransportLayerServer[F] = { val cert = Using.resource(Source.fromFile(certPath.toFile))(_.mkString) val key = Using.resource(Source.fromFile(keyPath.toFile))(_.mkString) - val server = new GrpcTransportServer[F]( + new GrpcTransportServer[F]( networkId, port, cert, @@ -131,7 +128,5 @@ object GrpcTransportServer { maxStreamMessageSize, parallelism ) - TransportLayerServer(server) } - } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala index 74daa53eb81..d6a89d5925c 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala @@ -8,11 +8,9 @@ import coop.rchain.comm.PeerNode import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.rp.ProtocolHelper import coop.rchain.comm.transport.PacketOps._ -import coop.rchain.monix.Monixable import coop.rchain.shared.Compression._ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ -import monix.reactive.Observable import fs2.Stream import scala.collection.concurrent.TrieMap diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala index c3baa93439b..4273509d1f5 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala @@ -7,7 +7,6 @@ import coop.rchain.comm.transport.PacketOps._ import coop.rchain.shared.Log import fs2.Stream import fs2.concurrent.Queue -import monix.execution.Scheduler import scala.collection.concurrent.TrieMap diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala index 5607315d3e7..1681a9c1f70 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala @@ -1,29 +1,27 @@ package coop.rchain.comm.discovery import cats.Applicative -import cats.effect.Resource +import cats.effect.{ContextShift, IO, Resource, Sync, Timer} import cats.mtl.DefaultApplicativeAsk import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.comm.rp.RPConf import coop.rchain.metrics.Metrics -import coop.rchain.shared.Log +import coop.rchain.shared.{Log, RChainScheduler} import io.grpc -import monix.eval.Task -import monix.execution.Scheduler +import coop.rchain.shared.RChainScheduler._ import scala.concurrent.duration._ import scala.util.Random -class GrpcKademliaRPCSpec extends KademliaRPCSpec[Task, GrpcEnvironment] { +class GrpcKademliaRPCSpec extends KademliaRPCSpec[IO, GrpcEnvironment] { - implicit val log: Log[Task] = new Log.NOPLog[Task] - implicit val scheduler: Scheduler = Scheduler.Implicits.global - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP - private val networkId = "test" + implicit val log: Log[IO] = new Log.NOPLog[IO] + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP + private val networkId = "test" - def createEnvironment(port: Int): Task[GrpcEnvironment] = - Task.delay { + def createEnvironment(port: Int): IO[GrpcEnvironment] = + IO.delay { val host = "127.0.0.1" val bytes = Array.ofDim[Byte](40) Random.nextBytes(bytes) @@ -31,25 +29,31 @@ class GrpcKademliaRPCSpec extends KademliaRPCSpec[Task, GrpcEnvironment] { GrpcEnvironment(host, port, peer) } - def createKademliaRPC(env: GrpcEnvironment): Task[KademliaRPC[Task]] = { - implicit val ask: RPConfAsk[Task] = - new DefaultApplicativeAsk[Task, RPConf] { - val applicative: Applicative[Task] = Applicative[Task] - def ask: Task[RPConf] = Task.pure( + def createKademliaRPC(env: GrpcEnvironment): IO[KademliaRPC[IO]] = { + implicit val ask: RPConfAsk[IO] = + new DefaultApplicativeAsk[IO, RPConf] { + val applicative: Applicative[IO] = Applicative[IO] + def ask: IO[RPConf] = IO.pure( RPConf(local = env.peer, null, null, null, 0, null) ) } - Task.delay(new GrpcKademliaRPC(networkId, 500.millis)) + IO.delay(new GrpcKademliaRPC(networkId, 500.millis, RChainScheduler.mainEC)) } - def extract[A](fa: Task[A]): A = fa.runSyncUnsafe(Duration.Inf) + def extract[A](fa: IO[A]): A = fa.unsafeRunSync def createKademliaRPCServer( env: GrpcEnvironment, - pingHandler: PeerNode => Task[Unit], - lookupHandler: (PeerNode, Array[Byte]) => Task[Seq[PeerNode]] - ): Resource[Task, grpc.Server] = - acquireKademliaRPCServer(networkId, env.port, pingHandler, lookupHandler, scheduler) + pingHandler: PeerNode => IO[Unit], + lookupHandler: (PeerNode, Array[Byte]) => IO[Seq[PeerNode]] + ): Resource[IO, grpc.Server] = + acquireKademliaRPCServer( + networkId, + env.port, + pingHandler, + lookupHandler, + RChainScheduler.mainEC + ) } case class GrpcEnvironment( diff --git a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala index 889a7f84581..401c340b22f 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.rp -import cats.effect.Concurrent +import cats.effect.{Concurrent, IO} import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.comm._ @@ -10,15 +10,14 @@ import coop.rchain.p2p.EffectsTestInstances._ import coop.rchain.shared._ import coop.rchain.shared.scalatestcontrib.convertToAnyShouldWrapper import fs2.concurrent.Queue -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +import RChainScheduler._ class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChecks { - implicit private val logEffTest = new Log.NOPLog[Task] - implicit private val metricEffEff = new Metrics.MetricsNOP[Task] + implicit private val logEffTest = new Log.NOPLog[IO] + implicit private val metricEffEff = new Metrics.MetricsNOP[IO] val validConnections = Table( ("src", "remote"), @@ -67,12 +66,12 @@ class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChe val src = peerNode(srcHost) val remote = peerNode(remoteHost) val run = for { - conn <- tryToHandshake[Task](src, remote) + conn <- tryToHandshake[IO](src, remote) _ = conn.size shouldBe 1 } yield () - run.runSyncUnsafe() + run.unsafeRunSync } } @@ -98,12 +97,12 @@ class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChe val src = peerNode(srcHost) val remote = peerNode(remoteHost) val run = for { - conn <- tryToHandshake[Task](src, remote) + conn <- tryToHandshake[IO](src, remote) _ = conn.size shouldBe 0 } yield () - run.runSyncUnsafe() + run.unsafeRunSync } } diff --git a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala index ceeb1777726..92618c2ddde 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.comm.transport +import cats.effect.{ContextShift, IO} import com.google.protobuf.ByteString import coop.rchain.comm.CommError._ import coop.rchain.comm._ @@ -8,7 +9,6 @@ import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.rp.ProtocolHelper import coop.rchain.metrics.Metrics import io.grpc.{Metadata, Status, StatusRuntimeException} -import monix.eval.Task import monix.execution.Scheduler import monix.reactive.Observable import org.scalatest._ @@ -20,12 +20,12 @@ import scala.util.Random class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP - implicit val scheduler: Scheduler = Scheduler.Implicits.global - private val networkId = "test" - private val peerLocal = createPeerNode - private val peerRemote = createPeerNode - private val msg = ProtocolHelper.heartbeat(peerLocal, networkId) + import coop.rchain.shared.RChainScheduler._ + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP + private val networkId = "test" + private val peerLocal = createPeerNode + private val peerRemote = createPeerNode + private val msg = ProtocolHelper.heartbeat(peerLocal, networkId) private def createPeerNode: PeerNode = { val b = Array.ofDim[Byte](4) @@ -52,13 +52,13 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { private val testThrowable: Throwable = new RuntimeException("Test exception") - private class TestTransportLayer(response: Task[TLResponse]) - extends routing.TransportLayerFs2Grpc[Task, Metadata] { - override def send(request: TLRequest, ctx: Metadata): Task[TLResponse] = { + private class TestTransportLayer(response: IO[TLResponse]) + extends routing.TransportLayerFs2Grpc[IO, Metadata] { + override def send(request: TLRequest, ctx: Metadata): IO[TLResponse] = { sendMessages += request response } - override def stream(input: fs2.Stream[Task, Chunk], ctx: Metadata): Task[TLResponse] = + override def stream(input: fs2.Stream[IO, Chunk], ctx: Metadata): IO[TLResponse] = input.compile.toList.map { l => streamMessages += l ack @@ -72,8 +72,8 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { "everything is fine" should { "send and receive Unit" in { val response = ack - val stub = new TestTransportLayer(Task.now(response)) - val result = GrpcTransport.send[Task](stub, peerRemote, msg).attempt.runSyncUnsafe() + val stub = new TestTransportLayer(IO(response)) + val result = GrpcTransport.send[IO](stub, peerRemote, msg).attempt.unsafeRunSync val unit: Unit = () inside(result) { @@ -88,8 +88,8 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { "server replies with InternalCommunicationError" should { "fail with an InternalCommunicationError" in { val response = internalServerError("Test error") - val stub = new TestTransportLayer(Task.now(response)) - val result = GrpcTransport.send[Task](stub, peerRemote, msg).attempt.runSyncUnsafe() + val stub = new TestTransportLayer(IO(response)) + val result = GrpcTransport.send[IO](stub, peerRemote, msg).attempt.unsafeRunSync inside(result) { case Right(Left(p)) => @@ -103,8 +103,8 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { "server is unavailable" should { "fail with a PeerUnavailable" in { - val stub = new TestTransportLayer(Task.raiseError(unavailableThrowable)) - val result = GrpcTransport.send[Task](stub, peerRemote, msg).attempt.runSyncUnsafe() + val stub = new TestTransportLayer(IO.raiseError(unavailableThrowable)) + val result = GrpcTransport.send[IO](stub, peerRemote, msg).attempt.unsafeRunSync inside(result) { case Right(Left(p)) => @@ -118,8 +118,8 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { "timeout" should { "fail with a TimeOut" in { - val stub = new TestTransportLayer(Task.raiseError(timeoutThrowable)) - val result = GrpcTransport.send[Task](stub, peerRemote, msg).attempt.runSyncUnsafe() + val stub = new TestTransportLayer(IO.raiseError(timeoutThrowable)) + val result = GrpcTransport.send[IO](stub, peerRemote, msg).attempt.unsafeRunSync inside(result) { case Right(Left(p)) => @@ -133,8 +133,8 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { "any other exception" should { "fail with a ProtocolException" in { - val stub = new TestTransportLayer(Task.raiseError(testThrowable)) - val result = GrpcTransport.send[Task](stub, peerRemote, msg).attempt.runSyncUnsafe() + val stub = new TestTransportLayer(IO.raiseError(testThrowable)) + val result = GrpcTransport.send[IO](stub, peerRemote, msg).attempt.unsafeRunSync inside(result) { case Right(Left(p)) => @@ -158,14 +158,14 @@ class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { "streaming successful" should { "deliver a list of Chuncks" in { - val stub = new TestTransportLayer(Task.raiseError(testThrowable)) + val stub = new TestTransportLayer(IO.raiseError(testThrowable)) val blob = Blob(peerLocal, Packet("N/A", bigContent)) - val chunks = Chunker.chunkIt[Task](networkId, blob, messageSize).runSyncUnsafe().toList + val chunks = Chunker.chunkIt[IO](networkId, blob, messageSize).unsafeRunSync.toList val result = GrpcTransport - .stream[Task](stub, peerRemote, networkId, blob, messageSize) + .stream[IO](stub, peerRemote, networkId, blob, messageSize) .attempt - .runSyncUnsafe() + .unsafeRunSync result shouldBe Right(Right(())) stub.streamMessages.length shouldBe 1 diff --git a/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala index 5b3fa7b37f5..42870770c72 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala @@ -1,9 +1,8 @@ package coop.rchain.comm.transport +import cats.effect.{ContextShift, IO} import com.google.protobuf.ByteString import coop.rchain.comm.protocol.routing._ -import monix.eval.Task -import monix.execution.Scheduler import org.scalacheck.Gen import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers @@ -16,7 +15,8 @@ class PacketStoreRestoreSpec extends AnyFunSpec with Matchers with ScalaCheckDri import PacketOps._ - implicit val scheduler: Scheduler = Scheduler.Implicits.global + import scala.concurrent.ExecutionContext.Implicits.global + implicit val cs: ContextShift[IO] = IO.contextShift(global) describe("Packet store & restore") { it("should store and restore to the original Packet") { @@ -25,8 +25,8 @@ class PacketStoreRestoreSpec extends AnyFunSpec with Matchers with ScalaCheckDri val cache = TrieMap[String, Array[Byte]]() val packet = Packet("Test", ByteString.copyFrom(content)) // when - val storedIn = packet.store[Task](cache).runSyncUnsafe().right.get - val restored = PacketOps.restore[Task](storedIn, cache).runSyncUnsafe().right.get + val storedIn = packet.store[IO](cache).unsafeRunSync.right.get + val restored = PacketOps.restore[IO](storedIn, cache).unsafeRunSync.right.get // then packet shouldBe restored } diff --git a/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala index e4839f3a2e4..d1f73d0c3e6 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala @@ -1,13 +1,12 @@ package coop.rchain.comm.transport +import cats.effect.IO import com.google.protobuf.ByteString import coop.rchain.catscontrib.ski._ import coop.rchain.comm._ import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.transport.StreamHandler.CircuitBreaker import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import monix.reactive.Observable import org.scalatest.Inside import org.scalatest.funspec.AnyFunSpec @@ -19,7 +18,7 @@ import scala.util.Random class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { - implicit val log: Log.NOPLog[Task] = new Log.NOPLog[Task]() + implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO]() val networkId = "test" @@ -66,12 +65,12 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { it("should stop processing a stream if stream is missing header") { // given val cache = TrieMap[String, Array[Byte]]() - val streamWithoutHeader: Stream[Task, Chunk] = { - val it: Task[Iterator[Chunk]] = createStreamIterator().map(_.toList).map { + val streamWithoutHeader: Stream[IO, Chunk] = { + val it: IO[Iterator[Chunk]] = createStreamIterator().map(_.toList).map { case _ :: data => data.toIterator case _ => throw new RuntimeException("") } - Stream.eval(it).flatMap(Stream.fromIterator[Task](_, 1)) + Stream.eval(it).flatMap(Stream.fromIterator[IO](_, 1)) } // when val err: StreamHandler.StreamError = handleStreamErr(streamWithoutHeader) @@ -85,12 +84,12 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { it("should stop processing a stream if stream brought incomplete data") { // given val cache = TrieMap[String, Array[Byte]]() - val incompleteStream: Stream[Task, Chunk] = { - val it: Task[Iterator[Chunk]] = createStreamIterator().map(_.toList).map { + val incompleteStream: Stream[IO, Chunk] = { + val it: IO[Iterator[Chunk]] = createStreamIterator().map(_.toList).map { case header :: _ :: data2 => (header :: data2).toIterator case _ => throw new RuntimeException("") } - Stream.eval(it).flatMap(Stream.fromIterator[Task](_, 1)) + Stream.eval(it).flatMap(Stream.fromIterator[IO](_, 1)) } // when val err: StreamHandler.StreamError = handleStreamErr(incompleteStream, cache = cache) @@ -103,23 +102,23 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { } private def handleStream( - stream: fs2.Stream[Task, Chunk], + stream: fs2.Stream[IO, Chunk], cache: TrieMap[String, Array[Byte]] = TrieMap[String, Array[Byte]]() ): StreamMessage = StreamHandler .handleStream(stream, circuitBreaker = neverBreak, cache) - .runSyncUnsafe() + .unsafeRunSync .right .get private def handleStreamErr( - stream: fs2.Stream[Task, Chunk], + stream: fs2.Stream[IO, Chunk], circuitBreaker: StreamHandler.CircuitBreaker = neverBreak, cache: TrieMap[String, Array[Byte]] = TrieMap[String, Array[Byte]]() ): StreamHandler.StreamError = StreamHandler .handleStream(stream, circuitBreaker = circuitBreaker, cache) - .runSyncUnsafe() + .unsafeRunSync .left .get @@ -128,23 +127,23 @@ class StreamHandlerSpec extends AnyFunSpec with Matchers with Inside { contentLength: Int = 30 * 1024, sender: String = "sender", typeId: String = "BlockMessageTest" - ): Stream[Task, Chunk] = + ): Stream[IO, Chunk] = Stream .eval(createStreamIterator(messageSize, contentLength, sender, typeId)) - .flatMap(Stream.fromIterator[Task](_, 1)) + .flatMap(Stream.fromIterator[IO](_, 1)) private def createStreamIterator( messageSize: Int = 10 * 1024, contentLength: Int = 30 * 1024, sender: String = "sender", typeId: String = "BlockMessageTest" - ): Task[Iterator[Chunk]] = { + ): IO[Iterator[Chunk]] = { val content = Array.fill(contentLength)((Random.nextInt(256) - 128).toByte) val packet = Packet(typeId, ByteString.copyFrom(content)) val peer = peerNode(sender) val blob = Blob(peer, packet) - Chunker.chunkIt[Task](networkId, blob, messageSize) + Chunker.chunkIt[IO](networkId, blob, messageSize) } private def peerNode(name: String): PeerNode = diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala index 0a39609a261..dbd9669332e 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala @@ -1,25 +1,22 @@ package coop.rchain.comm.transport +import cats.effect.{IO, Sync, Timer} import cats.effect.concurrent.{Deferred, MVar, Ref} import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.crypto.util.{CertificateHelper, CertificatePrinter} import coop.rchain.metrics.Metrics import coop.rchain.p2p.EffectsTestInstances._ +import coop.rchain.shared.RChainScheduler._ import coop.rchain.shared.{Base16, Log} -import monix.eval.Task -import monix.execution.Scheduler -import scala.concurrent.duration.Duration +class TcpTransportLayerSpec extends TransportLayerSpec[IO, TcpTlsEnvironment] { -class TcpTransportLayerSpec extends TransportLayerSpec[Task, TcpTlsEnvironment] { + implicit val log: Log[IO] = new Log.NOPLog[IO] + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP - implicit val log: Log[Task] = new Log.NOPLog[Task] - implicit val scheduler: Scheduler = Scheduler.Implicits.global - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP - - def createEnvironment(port: Int): Task[TcpTlsEnvironment] = - Task.delay { + def createEnvironment(port: Int): IO[TcpTlsEnvironment] = + IO.delay { val host = "127.0.0.1" val keyPair = CertificateHelper.generateKeyPair(true) val cert = CertificatePrinter.print(CertificateHelper.generate(keyPair)) @@ -35,8 +32,8 @@ class TcpTransportLayerSpec extends TransportLayerSpec[Task, TcpTlsEnvironment] def createTransportLayer( env: TcpTlsEnvironment - ): Task[TransportLayer[Task]] = - Task.delay( + ): IO[TransportLayer[IO]] = + IO.delay( new GrpcTransportClient( networkId, env.cert, @@ -44,20 +41,19 @@ class TcpTransportLayerSpec extends TransportLayerSpec[Task, TcpTlsEnvironment] maxMessageSize, maxMessageSize, 100, - Ref.unsafe[Task, Map[PeerNode, Deferred[Task, BufferedGrpcStreamChannel[Task]]]](Map.empty), - scheduler + Ref.unsafe[IO, Map[PeerNode, Deferred[IO, BufferedGrpcStreamChannel[IO]]]](Map.empty) ) ) - def extract[A](fa: Task[A]): A = fa.runSyncUnsafe(Duration.Inf) + def extract[A](fa: IO[A]): A = fa.unsafeRunSync - def createDispatcherCallback: Task[DispatcherCallback[Task]] = - MVar.empty[Task, Unit].map(new DispatcherCallback(_)) + def createDispatcherCallback: IO[DispatcherCallback[IO]] = + MVar.empty[IO, Unit].map(new DispatcherCallback(_)) - def createTransportLayerServer(env: TcpTlsEnvironment): Task[TransportLayerServer[Task]] = - Task.delay { - implicit val rPConfAsk: RPConfAsk[Task] = createRPConfAsk[Task](env.peer) - val server = new GrpcTransportServer( + def createTransportLayerServer(env: TcpTlsEnvironment): IO[TransportLayerServer[IO]] = + IO.delay { + implicit val rPConfAsk: RPConfAsk[IO] = createRPConfAsk[IO](env.peer) + val server = new GrpcTransportServer[IO]( networkId, env.port, env.cert, diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala index 2fdc0fc38a6..2d8564f73e2 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala @@ -205,7 +205,10 @@ abstract class TransportLayerRuntime[F[_]: Sync: Timer, E <: Environment] { remote: PeerNode ): F[CommErr[Unit]] = { val msg = ProtocolHelper.heartbeat(local, networkId) - transport.send(remote, msg) + transport.send(remote, msg).map { x => + println(x) + x + } } def broadcastHeartbeat( diff --git a/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala b/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala index fc9482a2a20..d9d7d4eb119 100644 --- a/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala +++ b/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala @@ -1,8 +1,8 @@ package coop.rchain.graphz +import cats.effect.IO import cats.effect.concurrent.Ref import cats.syntax.all._ -import monix.eval.Task import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers @@ -12,9 +12,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A describe("Graphz") { it("simple graph") { val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", Graph, ser) + g <- Graphz[IO]("G", Graph, ser) _ <- g.close } yield ref graph.show shouldBe @@ -24,9 +24,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("simple digraph") { val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", DiGraph, ser) + g <- Graphz[IO]("G", DiGraph, ser) _ <- g.close } yield ref graph.show shouldBe @@ -36,9 +36,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("simple graph with comment") { val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", Graph, ser, comment = Some("this is comment")) + g <- Graphz[IO]("G", Graph, ser, comment = Some("this is comment")) _ <- g.close } yield ref graph.show shouldBe @@ -50,9 +50,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("graph, two nodes one edge") { // given val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", Graph, ser) + g <- Graphz[IO]("G", Graph, ser) _ <- g.edge("Hello", "World") _ <- g.close } @@ -67,9 +67,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("digraph, two nodes one edge") { // given val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", DiGraph, ser) + g <- Graphz[IO]("G", DiGraph, ser) _ <- g.edge("Hello", "World") _ <- g.close } @@ -84,9 +84,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("digraph, nodes with style") { // given val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", DiGraph, ser) + g <- Graphz[IO]("G", DiGraph, ser) _ <- g.node("Hello", shape = Box) _ <- g.node("World", shape = DoubleCircle) _ <- g.edge("Hello", "World") @@ -103,9 +103,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A } it("digraph with simple subgraphs") { - def process1(ser: StringSerializer[Task]): Task[Unit] = + def process1(ser: StringSerializer[IO]): IO[Unit] = for { - g <- Graphz.subgraph[Task]("", DiGraph, ser) + g <- Graphz.subgraph[IO]("", DiGraph, ser) _ <- g.node("A") _ <- g.node("B") _ <- g.node("C") @@ -114,9 +114,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A _ <- g.close } yield () - def process2(ser: StringSerializer[Task]): Task[Unit] = + def process2(ser: StringSerializer[IO]): IO[Unit] = for { - g <- Graphz.subgraph[Task]("", DiGraph, ser) + g <- Graphz.subgraph[IO]("", DiGraph, ser) _ <- g.node("K") _ <- g.node("L") _ <- g.node("M") @@ -126,9 +126,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A } yield () val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("Process", DiGraph, ser) + g <- Graphz[IO]("Process", DiGraph, ser) _ <- g.node("0") _ <- process1(ser) _ <- g.edge("0", "A") @@ -165,10 +165,10 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A } it("digraph with fancy subgraphs") { - def process1(ser: StringSerializer[Task]): Task[Unit] = + def process1(ser: StringSerializer[IO]): IO[Unit] = for { g <- Graphz - .subgraph[Task]( + .subgraph[IO]( "cluster_p1", DiGraph, ser, @@ -183,10 +183,10 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A _ <- g.close } yield () - def process2(ser: StringSerializer[Task]): Task[Unit] = + def process2(ser: StringSerializer[IO]): IO[Unit] = for { g <- Graphz - .subgraph[Task]( + .subgraph[IO]( "cluster_p2", DiGraph, ser, @@ -202,9 +202,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A } yield () val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("Process", DiGraph, ser) + g <- Graphz[IO]("Process", DiGraph, ser) _ <- g.node("0") _ <- process1(ser) _ <- g.edge("0", "A") @@ -246,26 +246,26 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("blockchain, simple") { // given - def lvl1(ser: StringSerializer[Task]): Task[Unit] = + def lvl1(ser: StringSerializer[IO]): IO[Unit] = for { - g <- Graphz.subgraph[Task]("", DiGraph, ser, rank = Some(Same)) + g <- Graphz.subgraph[IO]("", DiGraph, ser, rank = Some(Same)) _ <- g.node("1") _ <- g.node("ddeecc", shape = Box) _ <- g.node("ffeeff", shape = Box) _ <- g.close } yield () - def lvl0(ser: StringSerializer[Task]): Task[Unit] = + def lvl0(ser: StringSerializer[IO]): IO[Unit] = for { - g <- Graphz.subgraph[Task]("", DiGraph, ser, rank = Some(Same)) + g <- Graphz.subgraph[IO]("", DiGraph, ser, rank = Some(Same)) _ <- g.node("0") _ <- g.node("000000", shape = Box) _ <- g.close } yield () - def timeline(ser: StringSerializer[Task]): Task[Unit] = + def timeline(ser: StringSerializer[IO]): IO[Unit] = for { - g <- Graphz.subgraph[Task]("timeline", DiGraph, ser) + g <- Graphz.subgraph[IO]("timeline", DiGraph, ser) _ <- g.node("3", shape = PlainText) _ <- g.node("2", shape = PlainText) _ <- g.node("1", shape = PlainText) @@ -277,9 +277,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A } yield () val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("Blockchain", DiGraph, ser, rankdir = Some(BT)) + g <- Graphz[IO]("Blockchain", DiGraph, ser, rankdir = Some(BT)) _ <- lvl1(ser) _ <- g.edge("000000" -> "ffeeff") _ <- g.edge("000000" -> "ddeecc") @@ -320,9 +320,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A // from https://github.com/xflr6/graphviz/blob/master/examples/process.py it("Process example") { val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - graph <- Graphz[Task]("G", Graph, ser) + graph <- Graphz[IO]("G", Graph, ser) _ <- graph.edge("run", "intr") _ <- graph.edge("intr", "runbl") _ <- graph.edge("runbl", "run") @@ -358,9 +358,9 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A it("Huge graph") { // test for a stack overflow val graph = for { - ref <- Ref[Task].of(new StringBuffer("")) + ref <- Ref[IO].of(new StringBuffer("")) ser = new StringSerializer(ref) - g <- Graphz[Task]("G", DiGraph, ser) + g <- Graphz[IO]("G", DiGraph, ser) _ <- (1 to 1000).toList.traverse(i => g.edge(s"e$i" -> s"e${i + 1}")) _ <- g.close } yield ref @@ -368,9 +368,7 @@ class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with A } } - implicit class RefOps(ref: Task[Ref[Task, StringBuffer]]) { - import monix.execution.Scheduler.Implicits.global - - def show: String = ref.flatMap(_.get).map(_.toString).runSyncUnsafe() + implicit class RefOps(ref: IO[Ref[IO, StringBuffer]]) { + def show: String = ref.flatMap(_.get).map(_.toString).unsafeRunSync } } diff --git a/node/src/main/scala/coop/rchain/node/Main.scala b/node/src/main/scala/coop/rchain/node/Main.scala index e3479773167..faf1953bde6 100644 --- a/node/src/main/scala/coop/rchain/node/Main.scala +++ b/node/src/main/scala/coop/rchain/node/Main.scala @@ -1,10 +1,10 @@ package coop.rchain.node +import cats.effect.{ContextShift, IO, Timer} import coop.rchain.node.configuration._ import coop.rchain.node.effects._ import coop.rchain.node.runtime.NodeMain import coop.rchain.shared._ -import monix.eval.Task import monix.execution.Scheduler import org.slf4j.LoggerFactory @@ -21,30 +21,25 @@ object Main { LoggerFactory.getLogger(getClass).error("Unhandled exception in thread " + thread.getName, ex) }) - // Main scheduler for all CPU bounded tasks - // Should always be passed as implicit dependency. - // All other schedulers should be explicit. - implicit val scheduler: Scheduler = Scheduler.computation( - Math.max(java.lang.Runtime.getRuntime.availableProcessors, 2), - "node-runner", - reporter = UncaughtExceptionLogger - ) - implicit val console: ConsoleIO[Task] = NodeMain.consoleIO - implicit val log: Log[Task] = effects.log + // Main scheduler for all CPU bounded tasks and ContextShift + import RChainScheduler._ + + implicit val console: ConsoleIO[IO] = NodeMain.consoleIO + implicit val log: Log[IO] = effects.log // Ensure terminal is restored on exit sys.addShutdownHook { - console.close.runSyncUnsafe() + console.close.unsafeRunSync } // Parse CLI options val options = commandline.Options(args) if (options.subcommand.contains(options.run)) // Start the node - NodeMain.startNode[Task](options).runSyncUnsafe() + NodeMain.startNode[IO](options).unsafeRunSync //or else // Execute CLI command - NodeMain.runCLI[Task](options).runSyncUnsafe() + NodeMain.runCLI[IO](options).unsafeRunSync } } diff --git a/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala b/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala index 278b974870d..463ab94030b 100644 --- a/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala +++ b/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala @@ -10,7 +10,6 @@ import coop.rchain.casper.protocol.deploy.v1._ import coop.rchain.catscontrib.TaskContrib.AbstractTaskOps import coop.rchain.models.StacksafeMessage import coop.rchain.models.syntax._ -import coop.rchain.monix.Monixable import coop.rchain.shared.Log import coop.rchain.shared.ThrowableOps.RichThrowable import io.grpc.Metadata @@ -18,7 +17,7 @@ import fs2.Stream object DeployGrpcServiceV1 { - def apply[F[_]: Monixable: Concurrent: Log]( + def apply[F[_]: Concurrent: Log]( blockApi: BlockApi[F], blockReportAPI: BlockReportApi[F] ): DeployServiceFs2Grpc[F, Metadata] = diff --git a/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala b/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala index cd32f865d64..6612c77b305 100644 --- a/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala +++ b/node/src/main/scala/coop/rchain/node/api/ProposeGrpcServiceV1.scala @@ -11,14 +11,13 @@ import coop.rchain.casper.protocol.propose.v1.{ import coop.rchain.casper.protocol.{ProposeQuery, ProposeResultQuery, ServiceError} import coop.rchain.catscontrib.TaskContrib.AbstractTaskOps import coop.rchain.models.StacksafeMessage -import coop.rchain.monix.Monixable import coop.rchain.shared.ThrowableOps._ import coop.rchain.shared._ import io.grpc.Metadata object ProposeGrpcServiceV1 { - def apply[F[_]: Monixable: Sync: Log]( + def apply[F[_]: Sync: Log]( blockApi: BlockApi[F] ): ProposeServiceFs2Grpc[F, Metadata] = new ProposeServiceFs2Grpc[F, Metadata] { diff --git a/node/src/main/scala/coop/rchain/node/api/package.scala b/node/src/main/scala/coop/rchain/node/api/package.scala index c82874efd49..91c556fb642 100644 --- a/node/src/main/scala/coop/rchain/node/api/package.scala +++ b/node/src/main/scala/coop/rchain/node/api/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} +import cats.effect.{Concurrent, ConcurrentEffect, ContextShift, Resource, Sync} import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc import coop.rchain.node.model.ReplFs2Grpc @@ -12,6 +12,7 @@ import io.grpc.protobuf.services.ProtoReflectionService import monix.execution.Scheduler import java.net.InetSocketAddress +import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration package object api { @@ -19,7 +20,7 @@ package object api { def acquireInternalServer[F[_]: Sync: ConcurrentEffect]( host: String, port: Int, - grpcExecutor: Scheduler, + grpcEC: ExecutionContext, replService: ReplFs2Grpc[F, Metadata], deployService: DeployServiceFs2Grpc[F, Metadata], proposeService: ProposeServiceFs2Grpc[F, Metadata], @@ -33,7 +34,7 @@ package object api { ): Resource[F, grpc.Server] = { val server = NettyServerBuilder .forAddress(new InetSocketAddress(host, port)) - .executor(grpcExecutor) + .executor(grpcEC.execute) .maxInboundMessageSize(maxMessageSize) .addService(ReplFs2Grpc.bindService(replService)) .addService(ProposeServiceFs2Grpc.bindService(proposeService)) @@ -54,7 +55,7 @@ package object api { def acquireExternalServer[F[_]: Concurrent: ConcurrentEffect: Log]( host: String, port: Int, - grpcExecutor: Scheduler, + grpcEC: ExecutionContext, deployGrpcService: DeployServiceFs2Grpc[F, Metadata], maxMessageSize: Int, keepAliveTime: FiniteDuration, @@ -66,7 +67,7 @@ package object api { ): Resource[F, grpc.Server] = { val server = NettyServerBuilder .forAddress(new InetSocketAddress(host, port)) - .executor(grpcExecutor) + .executor(grpcEC.execute) .maxInboundMessageSize(maxMessageSize) .addService(DeployServiceFs2Grpc.bindService(deployGrpcService)) .compressorRegistry(null) diff --git a/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala b/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala index 2b4d6c6cab8..ef591b34700 100644 --- a/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala +++ b/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala @@ -2,12 +2,9 @@ package coop.rchain.node.diagnostics import java.io.IOException import java.util.concurrent.atomic.AtomicReference - import scala.concurrent.duration._ import scala.util.Try - import coop.rchain.node.diagnostics.BatchInfluxDBReporter.Settings - import com.typesafe.config.Config import kamon.{Kamon, MetricReporter} import kamon.metric._ @@ -19,6 +16,7 @@ import monix.reactive.subjects._ import okhttp3._ import org.slf4j.LoggerFactory +// TODO get rid of monix @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) class BatchInfluxDBReporter(config: Config = Kamon.config()) extends MetricReporter { private val logger = LoggerFactory.getLogger(classOf[BatchInfluxDBReporter]) diff --git a/node/src/main/scala/coop/rchain/node/effects/JLineConsoleIO.scala b/node/src/main/scala/coop/rchain/node/effects/JLineConsoleIO.scala index a3cb0b0ee97..2f6048362fd 100644 --- a/node/src/main/scala/coop/rchain/node/effects/JLineConsoleIO.scala +++ b/node/src/main/scala/coop/rchain/node/effects/JLineConsoleIO.scala @@ -14,7 +14,6 @@ import TaskContrib._ import cats.effect.Sync import coop.rchain.shared.StringOps.ColoredString import coop.rchain.shared.TerminalMode -import monix.eval.Task @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) class JLineConsoleIO[F[_]: Sync](console: ConsoleReader) extends ConsoleIO[F] { diff --git a/node/src/main/scala/coop/rchain/node/effects/package.scala b/node/src/main/scala/coop/rchain/node/effects/package.scala index ad0ed156d86..7a412407fc0 100644 --- a/node/src/main/scala/coop/rchain/node/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/effects/package.scala @@ -1,7 +1,7 @@ package coop.rchain.node import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ConcurrentEffect, Sync} +import cats.effect.{Blocker, Concurrent, ConcurrentEffect, ContextShift, IO, Sync} import cats.mtl._ import cats.syntax.all._ import cats.{Applicative, Monad, Parallel} @@ -11,12 +11,12 @@ import coop.rchain.comm.rp.Connect._ import coop.rchain.comm.rp._ import coop.rchain.comm.transport._ import coop.rchain.metrics.Metrics -import coop.rchain.monix.Monixable import coop.rchain.shared._ import monix.eval._ import monix.execution._ import java.nio.file.Path +import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.io.Source import scala.tools.jline.console._ @@ -24,7 +24,7 @@ import scala.util.Using package object effects { - def log: Log[Task] = Log.log + def log: Log[IO] = Log.log def kademliaStore[F[_]: Sync: KademliaRPC: Metrics](id: NodeIdentifier): KademliaStore[F] = KademliaStore.table[F](id) @@ -34,16 +34,17 @@ package object effects { def kademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( networkId: String, - timeout: FiniteDuration - )(implicit scheduler: Scheduler): KademliaRPC[F] = new GrpcKademliaRPC(networkId, timeout) + timeout: FiniteDuration, + grpcEC: ExecutionContext + ): KademliaRPC[F] = + new GrpcKademliaRPC(networkId, timeout, grpcEC) - def transportClient[F[_]: Monixable: Concurrent: ConcurrentEffect: Parallel: Log: Metrics]( + def transportClient[F[_]: Concurrent: ContextShift: ConcurrentEffect: Parallel: Log: Metrics]( networkId: String, certPath: Path, keyPath: Path, maxMessageSize: Int, - packetChunkSize: Int, - ioScheduler: Scheduler + packetChunkSize: Int ): F[TransportLayer[F]] = Ref.of[F, Map[PeerNode, Deferred[F, BufferedGrpcStreamChannel[F]]]](Map()) map { channels => val cert = Using.resource(Source.fromFile(certPath.toFile))(_.mkString) @@ -55,8 +56,7 @@ package object effects { maxMessageSize, packetChunkSize, clientQueueSize = 100, - channels, - ioScheduler + channels ): TransportLayer[F] } diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala index a7c5b4ed35d..26631cc7ee3 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala @@ -19,6 +19,7 @@ import coop.rchain.shared.syntax._ import java.nio.file.Path import scala.concurrent.ExecutionContext +import scala.concurrent.ExecutionContext.global object StateBalances { @@ -45,7 +46,7 @@ object StateBalances { blockHash: String, vaultTreeHashMapDepth: Int, dataDir: Path - )(implicit scheduler: ExecutionContext): F[List[(ByteString, Long)]] = { + ): F[List[(ByteString, Long)]] = { import coop.rchain.rholang.interpreter.storage._ implicit val span = NoopSpan[F]() implicit val log: Log[F] = Log.log @@ -59,7 +60,8 @@ object StateBalances { store <- rnodeStoreManager.rSpaceStores spaces <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store + store, + global ) (rSpacePlay, rSpaceReplay) = spaces runtimes <- RhoRuntime.createRuntimes[F](rSpacePlay, rSpaceReplay, true, Seq.empty, Par()) diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala index 923450471df..e85bd5c9f4f 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala @@ -6,8 +6,6 @@ import coop.rchain.models.rholang.RhoType.RhoName import coop.rchain.models.syntax._ import coop.rchain.node.revvaultexport.StateBalances import coop.rchain.shared.Base16 -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.rogach.scallop.ScallopConf import java.io.PrintWriter @@ -76,9 +74,10 @@ object StateBalanceMain { } val stateBalancesFile = outputDir.resolve("stateBalances.csv") - implicit val tc = Concurrent[Task] + import coop.rchain.shared.RChainScheduler._ + implicit val tc = Concurrent[IO] - val task: Task[Unit] = for { + val task: IO[Unit] = for { stateBalances <- StateBalances.read( shardId, blockHash, @@ -95,6 +94,6 @@ object StateBalanceMain { } } yield () - task.runSyncUnsafe() + task.unsafeRunSync } } diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala index 8151000dd4b..0153d8db530 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala @@ -1,6 +1,6 @@ package coop.rchain.node.revvaultexport.mainnet1.reporting -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -14,8 +14,6 @@ import coop.rchain.rspace.{Match, RSpace} import coop.rchain.models.syntax._ import coop.rchain.shared.{Base16, Log} import coop.rchain.shared.syntax._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.rogach.scallop.ScallopConf import java.io.PrintWriter @@ -155,23 +153,25 @@ object MergeBalanceMain { val outputDir = options.outputDir() val mergeFile = outputDir.resolve("mergeBalances.csv") - implicit val log: Log[Task] = Log.log - implicit val span: NoopSpan[Task] = NoopSpan[Task]() - implicit val metrics: Metrics.MetricsNOP[Task] = new Metrics.MetricsNOP[Task]() + implicit val log: Log[IO] = Log.log + implicit val span: NoopSpan[IO] = NoopSpan[IO]() + implicit val metrics: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO]() import coop.rchain.rholang.interpreter.storage._ - implicit val m: Match[Task, BindPattern, ListParWithRandom] = matchListPar[Task] + implicit val m: Match[IO, BindPattern, ListParWithRandom] = matchListPar[IO] + import coop.rchain.shared.RChainScheduler._ - val task: Task[Vector[Account]] = for { - accountMap <- getVaultMap(stateBalanceFile, transactionBalanceFile).pure[Task] - rnodeStoreManager <- RNodeKeyValueStoreManager[Task](dataDir) - blockStore <- BlockStore[Task](rnodeStoreManager) + val task: IO[Vector[Account]] = for { + accountMap <- getVaultMap(stateBalanceFile, transactionBalanceFile).pure[IO] + rnodeStoreManager <- RNodeKeyValueStoreManager[IO](dataDir) + blockStore <- BlockStore[IO](rnodeStoreManager) store <- rnodeStoreManager.rSpaceStores spaces <- RSpace - .createWithReplay[Task, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store + .createWithReplay[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation]( + store, + rholangEC ) (rSpacePlay, rSpaceReplay) = spaces - runtimes <- RhoRuntime.createRuntimes[Task](rSpacePlay, rSpaceReplay, true, Seq.empty, Par()) + runtimes <- RhoRuntime.createRuntimes[IO](rSpacePlay, rSpaceReplay, true, Seq.empty, Par()) (rhoRuntime, _) = runtimes blockOpt <- blockStore.get1(blockHash.unsafeHexToByteString) block = blockOpt.get @@ -179,18 +179,18 @@ object MergeBalanceMain { adjustedAccounts <- accountMap.toList.foldLeftM(Vector.empty[Account]) { case (acc, (_, account)) => if (account.transactionBalance != account.stateBalance) for { - _ <- Log[Task].info(s"account is not correct ${account}") + _ <- Log[IO].info(s"account is not correct ${account}") balance <- if (account.address != "unknown") - getBalanceFromRholang[Task]( + getBalanceFromRholang[IO]( account.address, rhoRuntime, postStateHash ) - else 0L.pure[Task] + else 0L.pure[IO] adjustAccount = account.copy( adjustedStateBalance = balance ) - _ <- Log[Task] + _ <- Log[IO] .info( s"Should Before adjusted after ${adjustAccount}" ) @@ -199,11 +199,11 @@ object MergeBalanceMain { val adjustAccount = account.copy(adjustedStateBalance = account.stateBalance) acc :+ adjustAccount - }.pure[Task] + }.pure[IO] } } yield adjustedAccounts - val accountMap = task.runSyncUnsafe() + val accountMap = task.unsafeRunSync val file = mergeFile.toFile val bw = new PrintWriter(file) diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala index 1b11d7d3b9c..b45b75d01f9 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala @@ -2,8 +2,7 @@ package coop.rchain.node.revvaultexport.mainnet1.reporting import cats.effect._ import coop.rchain.node.revvaultexport.reporting.TransactionBalances -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global + import org.rogach.scallop.ScallopConf import java.io.PrintWriter @@ -74,9 +73,10 @@ object TransactionBalanceMain { val transactionBalancesFile = outputDir.resolve("transactionBalances.csv") val historyFile = outputDir.resolve("history.csv") - implicit val tc = Concurrent[Task] + import coop.rchain.shared.RChainScheduler._ + implicit val tc = Concurrent[IO] - val task: Task[Unit] = for { + val task: IO[Unit] = for { result <- TransactionBalances.main( dataDir, walletPath, @@ -110,6 +110,6 @@ object TransactionBalanceMain { } } yield () - task.runSyncUnsafe() + task.unsafeRunSync } } diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala index 39e370c0df7..a4c93614f90 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala @@ -32,6 +32,7 @@ import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.rspace.syntax._ import coop.rchain.rspace.{Match, RSpace} import coop.rchain.models.syntax._ +import coop.rchain.shared.RChainScheduler.rholangEC import coop.rchain.shared.{Base16, Log} import coop.rchain.shared.syntax._ @@ -225,7 +226,7 @@ object TransactionBalances { walletPath: Path, bondPath: Path, targetBlockHash: String - )(implicit scheduler: ExecutionContext): F[(GlobalVaultsInfo, List[TransactionBlockInfo])] = { + ): F[(GlobalVaultsInfo, List[TransactionBlockInfo])] = { implicit val metrics: Metrics.MetricsNOP[F] = new Metrics.MetricsNOP[F]() import coop.rchain.rholang.interpreter.storage._ implicit val span: NoopSpan[F] = NoopSpan[F]() @@ -237,7 +238,8 @@ object TransactionBalances { store <- rnodeStoreManager.rSpaceStores spaces <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store + store, + rholangEC ) (rSpacePlay, rSpaceReplay) = spaces runtimes <- RhoRuntime diff --git a/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala b/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala index fc6c87d8ba1..1d5dce5cf28 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala @@ -4,7 +4,6 @@ import cats.effect.Concurrent import coop.rchain.casper.api.{BlockApi, BlockReportApi} import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc -import coop.rchain.monix.Monixable import coop.rchain.node.api.{DeployGrpcServiceV1, ProposeGrpcServiceV1, ReplGrpcService} import coop.rchain.node.model.ReplFs2Grpc import coop.rchain.rholang.interpreter.RhoRuntime @@ -18,7 +17,7 @@ final case class GrpcServices[F[_]]( ) object GrpcServices { - def build[F[_]: Monixable: Concurrent: Log]( + def build[F[_]: Concurrent: Log]( blockApi: BlockApi[F], blockReportAPI: BlockReportApi[F], runtime: RhoRuntime[F] diff --git a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala index 33dc7c302bc..4a2b54d8fee 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala @@ -1,6 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync, Timer} +import cats.effect.{Async, Concurrent, ConcurrentEffect, ContextShift, IO, Resource, Sync, Timer} import cats.syntax.all._ import com.typesafe.config.Config import coop.rchain.casper.protocol.deploy.v1 @@ -12,7 +12,6 @@ import coop.rchain.comm.rp.HandleMessages import coop.rchain.comm.transport.{GrpcTransportServer, TransportLayer} import coop.rchain.comm.{discovery, RoutingMessage} import coop.rchain.metrics.Metrics -import coop.rchain.monix.Monixable import coop.rchain.node.api.{AdminWebApi, WebApi} import coop.rchain.node.configuration.NodeConf import coop.rchain.node.diagnostics.{ @@ -31,9 +30,12 @@ import io.grpc.{Metadata, Server} import kamon.Kamon import kamon.system.SystemMetrics import kamon.zipkin.ZipkinReporter -import monix.eval.Task import monix.execution.Scheduler import org.http4s.server +import coop.rchain.shared.RChainScheduler._ + +import scala.concurrent.ExecutionContext +import scala.util.{Failure, Success} object NetworkServers { @@ -42,7 +44,7 @@ object NetworkServers { */ // format: off def create[F[_] - /* Execution */ : Monixable: ConcurrentEffect: Timer + /* Execution */ : ConcurrentEffect: Timer: ContextShift /* Comm */ : TransportLayer: NodeDiscovery: KademliaStore: RPConfAsk: ConnectionsCell /* Diagnostics */ : Log: Metrics] // format: on ( @@ -53,24 +55,24 @@ object NetworkServers { reportingRoutes: ReportingHttpRoutes[F], nodeConf: NodeConf, kamonConf: Config, - grpcScheduler: Scheduler - )(implicit scheduler: Scheduler): Resource[F, Unit] = { + grpcEC: ExecutionContext + ): Resource[F, Unit] = { val GrpcServices(deploySrv, proposeSrv, replSrv) = grpcServices val host = nodeConf.apiServer.host for { nodeAddress <- Resource.eval(RPConfAsk[F].ask.map(_.local.toAddress)) - intServer <- internalServer(nodeConf, replSrv, deploySrv, proposeSrv, grpcScheduler) + intServer <- internalServer(nodeConf, replSrv, deploySrv, proposeSrv, grpcEC) _ <- Resource.eval(Log[F].info(s"Internal API server started at $host:${intServer.getPort}.")) - extServer <- externalServer(nodeConf, deploySrv, grpcScheduler) + extServer <- externalServer(nodeConf, deploySrv, grpcEC) extServerMsg = s"External API server started at $host:${extServer.getPort}." _ <- Resource.eval(Log[F].info(extServerMsg)) _ <- protocolServer(nodeConf, routingMessageQueue) _ <- Resource.eval(Log[F].info(s"Listening for traffic on $nodeAddress.")) - discovery <- discoveryServer(nodeConf, grpcScheduler) + discovery <- discoveryServer(nodeConf, grpcEC) _ <- Resource.eval(Log[F].info(s"Kademlia RPC server started at $host:${discovery.getPort}.")) prometheusRep = new NewPrometheusReporter() @@ -92,12 +94,12 @@ object NetworkServers { replService: ReplFs2Grpc[F, Metadata], deployService: DeployServiceFs2Grpc[F, Metadata], proposeService: ProposeServiceFs2Grpc[F, Metadata], - grpcScheduler: Scheduler + grpcEC: ExecutionContext ): Resource[F, Server] = api.acquireInternalServer[F]( nodeConf.apiServer.host, nodeConf.apiServer.portGrpcInternal, - grpcScheduler, + grpcEC, replService, deployService, proposeService, @@ -113,12 +115,12 @@ object NetworkServers { def externalServer[F[_]: Concurrent: ConcurrentEffect: Log]( nodeConf: NodeConf, deployService: v1.DeployServiceFs2Grpc[F, Metadata], - grpcScheduler: Scheduler + grpcEC: ExecutionContext ): Resource[F, Server] = api.acquireExternalServer[F]( nodeConf.apiServer.host, nodeConf.apiServer.portGrpcExternal, - grpcScheduler, + grpcEC, deployService, nodeConf.apiServer.grpcMaxRecvMessageSize.toInt, nodeConf.apiServer.keepAliveTime, @@ -129,10 +131,10 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def protocolServer[F[_]: Monixable: Concurrent: ConcurrentEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Timer]( + def protocolServer[F[_]: Concurrent: ConcurrentEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Timer]( nodeConf: NodeConf, routingMessageQueue: Queue[F, RoutingMessage] - )(implicit scheduler: Scheduler): Resource[F, Unit] = { + ): Resource[F, Unit] = { val server = GrpcTransportServer.acquireServer[F]( nodeConf.protocolServer.networkId, nodeConf.protocolServer.port, @@ -149,24 +151,24 @@ object NetworkServers { ) } - def discoveryServer[F[_]: Monixable: Concurrent: ConcurrentEffect: KademliaStore: Log: Metrics]( + def discoveryServer[F[_]: Concurrent: ConcurrentEffect: KademliaStore: Log: Metrics]( nodeConf: NodeConf, - grpcScheduler: Scheduler + grpcEC: ExecutionContext ): Resource[F, Server] = discovery.acquireKademliaRPCServer( nodeConf.protocolServer.networkId, nodeConf.peersDiscovery.port, KademliaHandleRPC.handlePing[F], KademliaHandleRPC.handleLookup[F], - grpcScheduler + grpcEC ) - def webApiServer[F[_]: ConcurrentEffect: Timer: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def webApiServer[F[_]: ContextShift: ConcurrentEffect: Timer: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], reportingRoutes: ReportingHttpRoutes[F], prometheusReporter: NewPrometheusReporter - )(implicit scheduler: Scheduler): Resource[F, server.Server[F]] = + ): Resource[F, server.Server[F]] = web.acquireHttpServer[F]( nodeConf.apiServer.enableReporting, nodeConf.apiServer.host, @@ -177,12 +179,12 @@ object NetworkServers { reportingRoutes ) - def adminWebApiServer[F[_]: ConcurrentEffect: Timer: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def adminWebApiServer[F[_]: ContextShift: ConcurrentEffect: Timer: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], adminWebApi: AdminWebApi[F], reportingRoutes: ReportingHttpRoutes[F] - )(implicit scheduler: Scheduler): Resource[F, server.Server[F]] = + ): Resource[F, server.Server[F]] = web.acquireAdminHttpServer[F]( nodeConf.apiServer.host, nodeConf.apiServer.portAdminHttp, @@ -192,7 +194,7 @@ object NetworkServers { reportingRoutes ) - def metricsInit[F[_]: Monixable: Sync]( + def metricsInit[F[_]: Async]( nodeConf: NodeConf, kamonConf: Config, prometheusReporter: NewPrometheusReporter @@ -205,9 +207,15 @@ object NetworkServers { if (nodeConf.metrics.zipkin) Kamon.addReporter(new ZipkinReporter()).void() if (nodeConf.metrics.sigar) SystemMetrics.startCollecting() } + // TODO: check new version of Kamon if supports custom effect - def stop: Task[Unit] = Task.fromFuture(Kamon.stopAllReporters()) + def stop: F[Unit] = Async[F].async { cb => + Kamon.stopAllReporters().onComplete { + case Success(value) => cb(Right(value)) + case Failure(error) => cb(Left(error)) + } + } - Resource.make(Sync[F].delay(start()))(_ => stop.fromTask) + Resource.make(Sync[F].delay(start()))(_ => stop) } } diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala index 118f41ff1e0..801e5d94890 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala @@ -8,7 +8,6 @@ import coop.rchain.crypto.PrivateKey import coop.rchain.crypto.signatures.{Secp256k1, SignaturesAlg} import coop.rchain.crypto.util.KeyUtil import coop.rchain.models.syntax._ -import coop.rchain.monix.Monixable import coop.rchain.node.configuration.Configuration.Profile import coop.rchain.node.configuration._ import coop.rchain.node.effects @@ -27,14 +26,16 @@ import scala.tools.jline.console.completer.StringsCompleter object NodeMain { + import coop.rchain.shared.RChainScheduler.mainEC // main execution context + /** * Starts RNode instance * * @param options command line options */ - def startNode[F[_]: Monixable: ConcurrentEffect: Parallel: ContextShift: Timer: ConsoleIO: Log]( + def startNode[F[_]: ConcurrentEffect: Parallel: ContextShift: Timer: ConsoleIO: Log]( options: commandline.Options - )(implicit s: Scheduler): F[Unit] = Sync[F].defer { + ): F[Unit] = Sync[F].defer { // Create merged configuration from CLI options and config file val (nodeConf, profile, configFile, kamonConf) = Configuration.build(options) // This system variable is used in Logger config file `node/src/main/resources/logback.xml` @@ -85,7 +86,7 @@ object NodeMain { * @param options command line options * @param console console */ - def runCLI[F[_]: Sync: Monixable: ConcurrentEffect: ConsoleIO: Timer]( + def runCLI[F[_]: Sync: ConcurrentEffect: ConsoleIO: Timer]( options: commandline.Options ): F[Unit] = { val grpcPort = diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala index bc13d967fdb..04b23e9ccd6 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala @@ -14,7 +14,6 @@ import coop.rchain.comm.discovery._ import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfState} import coop.rchain.comm.rp._ import coop.rchain.models.BlockHash.BlockHash -import coop.rchain.monix.Monixable import coop.rchain.node.configuration.NodeConf import coop.rchain.node.runtime.NodeCallCtx.NodeCallCtxReader import coop.rchain.node.runtime.NodeRuntime._ @@ -24,15 +23,18 @@ import coop.rchain.shared.syntax._ import fs2.Stream import monix.execution.Scheduler +import java.util.concurrent.{Executors, ThreadFactory} +import java.util.concurrent.atomic.AtomicLong +import scala.concurrent.ExecutionContext import scala.concurrent.duration._ object NodeRuntime { type LocalEnvironment[F[_]] = ApplicativeLocal[F, NodeCallCtx] - def start[F[_]: Monixable: ConcurrentEffect: Parallel: ContextShift: Timer: Log]( + def start[F[_]: ConcurrentEffect: Parallel: ContextShift: Timer: Log]( nodeConf: NodeConf, kamonConf: Config - )(implicit scheduler: Scheduler): F[Unit] = { + )(implicit mainEC: ExecutionContext): F[Unit] = { val nodeCallCtxReader: NodeCallCtxReader[F] = NodeCallCtxReader[F]() import nodeCallCtxReader._ @@ -42,15 +44,14 @@ object NodeRuntime { * although they can be generated with cats.tagless @autoFunctorK macros but support is missing for IntelliJ. * https://github.com/typelevel/cats-tagless/issues/60 (Cheers, Marcin!!) */ - implicit val lg: Log[ReaderNodeCallCtx] = Log[F].mapK(effToEnv) - implicit val tm: Timer[ReaderNodeCallCtx] = Timer[F].mapK(effToEnv) - implicit val mn: Monixable[ReaderNodeCallCtx] = Monixable[F].mapK(effToEnv, NodeCallCtx.init) + implicit val lg: Log[ReaderNodeCallCtx] = Log[F].mapK(effToEnv) + implicit val tm: Timer[ReaderNodeCallCtx] = Timer[F].mapK(effToEnv) for { id <- NodeEnvironment.create[F](nodeConf) // Create NodeRuntime instance - runtime = new NodeRuntime[ReaderNodeCallCtx](nodeConf, kamonConf, id, scheduler) + runtime = new NodeRuntime[ReaderNodeCallCtx](nodeConf, kamonConf, id) // Run reader layer with initial state _ <- runtime.main.run(NodeCallCtx.init) @@ -74,18 +75,29 @@ object NodeRuntime { } yield () } -class NodeRuntime[F[_]: Monixable: ConcurrentEffect: Parallel: Timer: ContextShift: LocalEnvironment: Log] private[node] ( +class NodeRuntime[F[_]: ConcurrentEffect: Parallel: Timer: ContextShift: LocalEnvironment: Log] private[node] ( nodeConf: NodeConf, kamonConf: Config, - id: NodeIdentifier, - scheduler: Scheduler -) { - // Main scheduler for all CPU bounded tasks - implicit val mainSheduler = scheduler + id: NodeIdentifier +)(implicit mainEC: ExecutionContext) { // TODO: revise use of schedulers for gRPC - private[this] val grpcScheduler = - Scheduler.cached("grpc-io", 4, 64, reporter = UncaughtExceptionLogger) + private[this] val grpcEC = mainEC + + val ioScheduler = Executors.newCachedThreadPool(new ThreadFactory { + private val counter = new AtomicLong(0L) + + def newThread(r: Runnable) = { + val th = new Thread(r) + th.setName( + "io-thread-" + + counter.getAndIncrement.toString + ) + th.setDaemon(true) + th + } + }) + implicit private val logSource: LogSource = LogSource(this.getClass) /** @@ -118,8 +130,7 @@ class NodeRuntime[F[_]: Monixable: ConcurrentEffect: Parallel: Timer: ContextShi nodeConf.tls.certificatePath, nodeConf.tls.keyPath, nodeConf.protocolClient.grpcMaxRecvMessageSize.toInt, - nodeConf.protocolClient.grpcStreamChunkSize.toInt, - grpcScheduler + nodeConf.protocolClient.grpcStreamChunkSize.toInt ) } @@ -142,10 +153,11 @@ class NodeRuntime[F[_]: Monixable: ConcurrentEffect: Parallel: Timer: ContextShi // Node discovery service (Kademlia) kademliaRPC = { - implicit val (p, g, m) = (rpConfAsk, grpcScheduler, metrics) + implicit val (p, m) = (rpConfAsk, metrics) effects.kademliaRPC( nodeConf.protocolServer.networkId, - nodeConf.protocolClient.networkTimeout + nodeConf.protocolClient.networkTimeout, + grpcEC ) } @@ -214,7 +226,7 @@ class NodeRuntime[F[_]: Monixable: ConcurrentEffect: Parallel: Timer: ContextShi reportRoutes, nodeConf, kamonConf, - grpcScheduler + grpcEC ) // Return node launch stream } yield nodeLaunch diff --git a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala index 47241f49e77..efda1cd327a 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala @@ -27,7 +27,6 @@ import coop.rchain.metrics.{Metrics, Span} import coop.rchain.models.BlockHash.BlockHash import coop.rchain.models.Par import coop.rchain.models.syntax.modelsSyntaxByteString -import coop.rchain.monix.Monixable import coop.rchain.node.api.AdminWebApi.AdminWebApiImpl import coop.rchain.node.api.WebApi.WebApiImpl import coop.rchain.node.api.{AdminWebApi, WebApi} @@ -49,14 +48,14 @@ import fs2.concurrent.Queue import monix.execution.Scheduler object Setup { - def setupNodeProgram[F[_]: Monixable: Concurrent: Parallel: ContextShift: Timer: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( + def setupNodeProgram[F[_]: Concurrent: Parallel: ContextShift: Timer: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( storeManager: KeyValueStoreManager[F], rpConnections: ConnectionsCell[F], rpConfAsk: ApplicativeAsk[F, RPConf], commUtil: CommUtil[F], blockRetriever: BlockRetriever[F], conf: NodeConf - )(implicit mainScheduler: Scheduler): F[ + ): F[ ( Stream[F, Unit], // Node startup process (protocol messages handling) Queue[F, RoutingMessage], @@ -90,12 +89,14 @@ object Setup { // Runtime for `rnode eval` evalRuntime <- { implicit val sp = span - storeManager.evalStores.flatMap(RhoRuntime.createRuntime[F](_, Par())) + import RChainScheduler._ + storeManager.evalStores.flatMap(RhoRuntime.createRuntime[F](_, Par(), rholangEC)) } // Runtime manager (play and replay runtimes) runtimeManagerWithHistory <- { implicit val sp = span + import RChainScheduler._ for { rStores <- storeManager.rSpaceStores mergeStore <- RuntimeManager.mergeableStore(storeManager) @@ -104,7 +105,8 @@ object Setup { rStores, mergeStore, BlockRandomSeed.nonNegativeMergeableTagName(conf.casper.shardName), - executionTracker + executionTracker, + rholangEC ) } yield rm } diff --git a/node/src/main/scala/coop/rchain/node/web/package.scala b/node/src/main/scala/coop/rchain/node/web/package.scala index 6923583b84b..fe00c67a814 100644 --- a/node/src/main/scala/coop/rchain/node/web/package.scala +++ b/node/src/main/scala/coop/rchain/node/web/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{ConcurrentEffect, Resource, Sync, Timer} +import cats.effect.{ConcurrentEffect, ContextShift, Resource, Sync, Timer} import cats.syntax.all._ import coop.rchain.comm.discovery.NodeDiscovery import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfAsk} @@ -24,7 +24,7 @@ package object web { def corsPolicy[F[_]: Sync](routes: HttpRoutes[F]) = CORS(routes, CORS.DefaultCORSConfig.copy(allowCredentials = false)) - def acquireHttpServer[F[_]: ConcurrentEffect: Timer: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( + def acquireHttpServer[F[_]: ContextShift: ConcurrentEffect: Timer: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( reporting: Boolean, host: String = "0.0.0.0", httpPort: Int, @@ -32,7 +32,7 @@ package object web { connectionIdleTimeout: FiniteDuration, webApi: WebApi[F], reportingRoutes: ReportingHttpRoutes[F] - )(implicit scheduler: Scheduler): Resource[F, Server[F]] = { + ): Resource[F, Server[F]] = { val reportingRoutesOpt = if (reporting) reportingRoutes else HttpRoutes.empty val baseRoutes = Map( "/metrics" -> corsPolicy(NewPrometheusReporter.service[F](prometheusReporter)), @@ -50,7 +50,8 @@ package object web { Map.empty val allRoutes = baseRoutes ++ extraRoutes - BlazeServerBuilder[F](scheduler) + import coop.rchain.shared.RChainScheduler._ + BlazeServerBuilder[F](mainEC) .bindHttp(httpPort, host) .withHttpApp(RouterFix(allRoutes.toList: _*).orNotFound) .withIdleTimeout(connectionIdleTimeout) @@ -58,20 +59,21 @@ package object web { .resource } - def acquireAdminHttpServer[F[_]: ConcurrentEffect: Timer: Log]( + def acquireAdminHttpServer[F[_]: ContextShift: ConcurrentEffect: Timer: Log]( host: String = "0.0.0.0", httpPort: Int, connectionIdleTimeout: FiniteDuration, webApi: WebApi[F], adminWebApiRoutes: AdminWebApi[F], reportingRoutes: ReportingHttpRoutes[F] - )(implicit scheduler: Scheduler): Resource[F, Server[F]] = { + ): Resource[F, Server[F]] = { val baseRoutes = Map( "/api" -> corsPolicy(AdminWebApiRoutes.service[F](adminWebApiRoutes) <+> reportingRoutes), // Web API v1 (admin) with OpenAPI schema "/api/v1" -> corsPolicy(WebApiRoutesV1.createAdmin[F](webApi, adminWebApiRoutes)) ) - BlazeServerBuilder[F](scheduler) + import coop.rchain.shared.RChainScheduler._ + BlazeServerBuilder[F](mainEC) .bindHttp(httpPort, host) .withHttpApp(RouterFix(baseRoutes.toList: _*).orNotFound) .withResponseHeaderTimeout(connectionIdleTimeout - 1.second) diff --git a/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala b/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala index abdbbb87121..219f29b5a0c 100644 --- a/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala +++ b/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala @@ -1,5 +1,6 @@ package coop.rchain.node +import cats.effect.IO import coop.rchain.casper.api.BlockReportApi import coop.rchain.casper.helper.TestNode import coop.rchain.casper.rholang.{BlockRandomSeed, Resources} @@ -12,8 +13,6 @@ import coop.rchain.node.web.{PreCharge, Refund, Transaction, UserDeploy} import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.models.syntax._ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers @@ -25,14 +24,15 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { TestNode.networkEff(genesis, networkSize = 1, withReadOnlySize = 1).use { nodes => val validator = nodes(0) val readonly = nodes(1) + import coop.rchain.shared.RChainScheduler._ import readonly._ for { - kvm <- Resources.mkTestRNodeStoreManager[Task](readonly.dataDir) + kvm <- Resources.mkTestRNodeStoreManager[IO](readonly.dataDir) rspaceStore <- kvm.rSpaceStores reportingCasper = ReportingCasper - .rhoReporter[Task](rspaceStore, this.genesis.genesisBlock.shardId) - reportingStore <- ReportStore.store[Task](kvm) - blockReportAPI = BlockReportApi[Task]( + .rhoReporter[IO](rspaceStore, this.genesis.genesisBlock.shardId) + reportingStore <- ReportStore.store[IO](kvm) + blockReportAPI = BlockReportApi[IO]( reportingCasper, reportingStore, readonly.validatorIdOpt @@ -44,7 +44,7 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { phloPrice = phloPrice, shardId = this.genesis.genesisBlock.shardId ) - transactionAPI = Transaction[Task]( + transactionAPI = Transaction[IO]( blockReportAPI, BlockRandomSeed.transferUnforgeable( this.genesis.genesisBlock.shardId @@ -105,7 +105,7 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { case _ => () } } - } yield ()).runSyncUnsafe() + } yield ()).unsafeRunSync } "no user deploy log" should "return only precharge and refund transaction" in { @@ -135,7 +135,7 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { } } - } yield ()).runSyncUnsafe() + } yield ()).unsafeRunSync } "preCharge failed case" should "return 1 preCharge transaction" in { @@ -152,7 +152,7 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { _ = t.transaction.failReason should be(Some("Insufficient funds")) - } yield (t, block)).runSyncUnsafe() + } yield (t, block)).unsafeRunSync transaction.transactionType shouldBe a[PreCharge] transaction.transaction.fromAddr shouldBe fromAddr transaction.transaction.amount shouldBe phloLimit * phloPrice - block.state.deploys.head.cost.cost diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala b/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala index 1dfc7f80384..94669c505c0 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala @@ -1,7 +1,7 @@ package coop.rchain.node.mergeablity import cats.Monoid -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.casper.helper.TestRhoRuntime.rhoRuntimeEff import coop.rchain.casper.merging.BlockIndex @@ -17,8 +17,6 @@ import coop.rchain.rholang.syntax._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.internal.{Datum, WaitingContinuation} import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.exceptions.TestFailedException object RhoState { @@ -90,16 +88,17 @@ object OperationOn0Ch { case class Rho( value: String ) { - val rstate: State = state.runSyncUnsafe() + val rstate: State = state.unsafeRunSync def |(other: Rho): Rho = Rho(s"$value | ${other.value}") - def state: Task[State] = { + def state: IO[State] = { import coop.rchain.models.rholang.{implicits => toPar} - implicit val logger: Log[Task] = Log.log[Task] - implicit val metricsEff: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - rhoRuntimeEff[Task](initRegistry = false).use { + implicit val logger: Log[IO] = Log.log[IO] + implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + import coop.rchain.shared.RChainScheduler._ + rhoRuntimeEff[IO](initRegistry = false).use { case (runtime, _, _) => for { _ <- runtime.evaluate(value, Cost(500L)) @@ -170,7 +169,7 @@ trait BasicMergeabilityRules extends ComputeMerge { isConflict = false, mergedState, rejectRight = false // this parameter is not actually important in merge case - ).runSyncUnsafe() + ).unsafeRunSync def ConflictingCase(left: Rho*)( right: Rho* @@ -191,7 +190,7 @@ trait BasicMergeabilityRules extends ComputeMerge { isConflict = true, mergedLeftState, rejectRight = true - )).runSyncUnsafe() + )).unsafeRunSync /** * This is a mark for cases which happen left consume and right produce doesn't match.But because we don't run @@ -242,7 +241,7 @@ trait BasicMergeabilityRules extends ComputeMerge { isConflict: Boolean, mergedStateResult: State, rejectRight: Boolean - ): Task[Unit] = { + ): IO[Unit] = { case class MergingNode(index: BlockIndex, isFinalized: Boolean, postState: Blake2b256Hash) @@ -255,11 +254,12 @@ trait BasicMergeabilityRules extends ComputeMerge { phloLimit = 500, sec = ConstructDeploy.defaultSec2 ) - implicit val metricsEff: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val logger: Log[Task] = Log.log[Task] - val baseDeployRand = Blake2b512Random.defaultRandom - computeMergeCase[Task]( + implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val logger: Log[IO] = Log.log[IO] + val baseDeployRand = Blake2b512Random.defaultRandom + import coop.rchain.shared.RChainScheduler._ + computeMergeCase[IO]( baseDeployRand, Seq(baseDeploy), Seq(leftDeploy), @@ -285,10 +285,10 @@ trait BasicMergeabilityRules extends ComputeMerge { | | conflicts found: ${mergedState._2.size} | """.stripMargin - _ <- Sync[Task] + _ <- Sync[IO] .raiseError(new Exception(errMsg)) .whenA(rejectedDeploys.isEmpty == isConflict) - _ <- Sync[Task] + _ <- Sync[IO] .raiseError(new Exception(errMsg)) .whenA(dataContinuationAtMergedState != mergedStateResult) } yield () diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala b/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala index 090e0f5d4f0..50715d2a0f5 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala @@ -1,6 +1,6 @@ package coop.rchain.node.mergeablity -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.implicits.catsSyntaxApplicative import cats.syntax.all._ import com.google.protobuf.ByteString @@ -16,9 +16,7 @@ import coop.rchain.node.revvaultexport.RhoTrieTraverser import coop.rchain.rholang.interpreter.RhoRuntime import coop.rchain.rholang.interpreter.accounting.Cost import coop.rchain.rspace.hashing.Blake2b256Hash -import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global +import coop.rchain.shared.{Log, RChainScheduler} import org.scalacheck.Gen import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -479,9 +477,10 @@ class TreeHashMapMergeabilitySpec private def runTest(left: String)(right: String)(base: String)( rejectRight: Boolean )(isConflict: Boolean)(expectedKeyValue: List[KeyValue])(treeHashMapDepth: Int): Unit = { - implicit val metricsEff: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val logger: Log[Task] = Log.log[Task] + implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val logger: Log[IO] = Log.log[IO] + import RChainScheduler._ val baseDeploy = ConstructDeploy.sourceDeploy(base, 1L, phloLimit = Cost.UNSAFE_MAX.value) val leftDeploy = @@ -502,7 +501,7 @@ class TreeHashMapMergeabilitySpec ) } - computeMergeCase[Task]( + computeMergeCase[IO]( baseDeployRand, Seq(StandardDeploys.registryGenerator(registry, SHARD_ID), baseDeploy), Seq(leftDeploy), @@ -515,13 +514,13 @@ class TreeHashMapMergeabilitySpec depth = treeHashMapDepth, mergedState._1 ) - _ <- Sync[Task] + _ <- Sync[IO] .raiseError(new Exception(s"Mergeable case failed with :${mergedState}")) .whenA(mergedState._2.nonEmpty && !isConflict) - _ <- Sync[Task] + _ <- Sync[IO] .raiseError(new Exception(s"Conflict case failed with :${mergedState}")) .whenA(mergedState._2.isEmpty && isConflict) - _ <- Sync[Task] + _ <- Sync[IO] .raiseError( new Exception(s"""The mergedTreeHashMap length is not equal to expectedKeyValue. # MergedTreeHashMap: ${mergedTreeMap} @@ -530,7 +529,7 @@ class TreeHashMapMergeabilitySpec .whenA(mergedTreeMap.toList.length != expectedKeyValue.length) _ <- expectedKeyValue.traverse( kv => - Sync[Task] + Sync[IO] .raiseError( new Exception( s"""The mergedTreeHashMap content is not equal to expectedKeyValue. @@ -551,7 +550,7 @@ class TreeHashMapMergeabilitySpec ) } yield (), rejectRight = rejectRight - ).runSyncUnsafe() + ).unsafeRunSync } diff --git a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala index 683d206b932..9db3a139a83 100644 --- a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala +++ b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala @@ -1,7 +1,7 @@ package coop.rchain.node.perf import cats.Parallel -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, ContextShift, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.hashing.Blake2b256Hash @@ -402,14 +402,13 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll } it should "execute with monix" in { - import monix.eval.Task - import monix.execution.Scheduler.Implicits.global + import coop.rchain.shared.RChainScheduler._ - implicit val log: Log.NOPLog[Task] = new Log.NOPLog[Task]() - implicit val met: Metrics.MetricsNOP[Task] = new Metrics.MetricsNOP[Task]() - implicit val spn: NoopSpan[Task] = new NoopSpan[Task]() + implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO]() + implicit val met: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO]() + implicit val spn: NoopSpan[IO] = new NoopSpan[IO]() - val t = new Experiment[Task] - t.test.runSyncUnsafe() + val t = new Experiment[IO] + t.test.unsafeRunSync } } diff --git a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala index 67385e9ce8d..9b24f59ff9e 100644 --- a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala +++ b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala @@ -1,6 +1,6 @@ package coop.rchain.node.revvaultexport -import cats.effect.Concurrent +import cats.effect.{Concurrent, IO} import coop.rchain.casper.genesis.contracts.{Registry, StandardDeploys} import coop.rchain.casper.helper.TestNode.Effect import coop.rchain.casper.helper.TestRhoRuntime.rhoRuntimeEff @@ -11,8 +11,6 @@ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.rholang.RhoType.RhoName import coop.rchain.models.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import scala.collection.compat.immutable.LazyList @@ -21,6 +19,7 @@ import scala.util.Random class RhoTrieTraverserTest extends AnyFlatSpec { private val SHARD_ID = "root-shard" private val registry = Registry(GenesisBuilder.defaultSystemContractPubKey) + import coop.rchain.shared.RChainScheduler._ "traverse the TreeHashMap" should "work" in { val total = 100 @@ -59,10 +58,10 @@ class RhoTrieTraverserTest extends AnyFlatSpec { | } |}""".stripMargin - implicit val concurrent = Concurrent[Task] - implicit val metricsEff: Metrics[Effect] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Effect] = NoopSpan[Task]() - implicit val logger: Log[Effect] = Log.log[Task] + implicit val concurrent = Concurrent[IO] + implicit val metricsEff: Metrics[Effect] = new Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[Effect] = NoopSpan[IO]() + implicit val logger: Log[Effect] = Log.log[IO] val t = rhoRuntimeEff[Effect](false).use { case (runtime, _, _) => for { @@ -111,7 +110,7 @@ class RhoTrieTraverserTest extends AnyFlatSpec { }) } yield () } - t.runSyncUnsafe() + t.unsafeRunSync } } diff --git a/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala b/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala index 37be6418175..ca3f564ed83 100644 --- a/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala +++ b/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala @@ -7,7 +7,7 @@ import coop.rchain.models.syntax._ import coop.rchain.casper.util.GenesisBuilder.{buildGenesis, buildGenesisParameters} import coop.rchain.node.revvaultexport.mainnet1.StateBalanceMain import coop.rchain.rholang.interpreter.util.RevAddress -import monix.execution.Scheduler.Implicits.global + import org.scalatest.flatspec.AnyFlatSpec class VaultBalanceGetterTest extends AnyFlatSpec { @@ -49,7 +49,7 @@ class VaultBalanceGetterTest extends AnyFlatSpec { _ = assert(balance.get == genesisInitialBalance) } yield () } - t.runSyncUnsafe() + t.unsafeRunSync } "Get all vault" should "return all vault balance" in { @@ -84,7 +84,7 @@ class VaultBalanceGetterTest extends AnyFlatSpec { ) } yield () } - t.runSyncUnsafe() + t.unsafeRunSync } } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index c5b764475d3..992ca0c88a1 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -68,15 +68,16 @@ object Dependencies { val logstashLogback = "net.logstash.logback" % "logstash-logback-encoder" % "6.6" val lz4 = "org.lz4" % "lz4-java" % "1.7.1" val magnolia = "com.propensive" %% "magnolia" % "0.17.0" - val mockito = "org.mockito" %% "mockito-scala-cats" % "1.16.42" % "test" + val mockito = "org.mockito" %% "mockito-scala-cats" % "1.17.14" % "test" val monix = "io.monix" %% "monix" % monixVersion val monixTesting = "io.monix" %% "monix-testing-scalatest" % "0.3.0" + val ceTesting = "org.typelevel" %% "cats-effect-testing-scalatest"% "1.2.0" % Test val pureconfig = "com.github.pureconfig" %% "pureconfig" % "0.14.0" val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.9.4" val scalaUri = "io.lemonlabs" %% "scala-uri" % "3.0.0" val scalacheck = "org.scalacheck" %% "scalacheck" % "1.15.0" val scalacheckShapeless = "com.github.alexarchambault" %% "scalacheck-shapeless_1.15" % "1.3.0" % "test" - val scalactic = "org.scalactic" %% "scalactic" % "3.2.9" % "test" + val scalactic = "org.scalactic" %% "scalactic" % "3.2.13" % "test" val scalapbCompiler = "com.thesamet.scalapb" %% "compilerplugin" % scalapb.compiler.Version.scalapbVersion val scalapbRuntime = "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf" val scalapbRuntimeLib = "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion @@ -88,8 +89,8 @@ object Dependencies { val nettyTcnativeLinux = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier "linux-x86_64" val nettyTcnativeFedora = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier "linux-x86_64-fedora" val scalaCompat = "org.scala-lang.modules" %% "scala-collection-compat" % "2.6.0" - val scalatest = "org.scalatest" %% "scalatest" % "3.2.9" % "test" - val scalatestPlus = "org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test" + val scalatest = "org.scalatest" %% "scalatest" % "3.2.13" % "test" + val scalatestPlus = "org.scalatestplus" %% "scalacheck-1-16" % "3.2.13.0" % "test" val scallop = "org.rogach" %% "scallop" % "3.1.4" val scodecCore = "org.scodec" %% "scodec-core" % "1.11.7" val scodecCats = "org.scodec" %% "scodec-cats" % "1.1.0-M4" @@ -118,7 +119,9 @@ object Dependencies { slf4j, kamonCore, sourcecode, + scalatest, // Overrides for transitive dependencies (we don't use them directly, hence no val-s) + "org.objenesis" % "objenesis" % "3.2", "org.typelevel" % "jawn-parser_2.12" % "1.4.0", "com.github.jnr" % "jnr-ffi" % "2.2.13", "com.lihaoyi" %% "geny" % "1.0.0", @@ -147,7 +150,8 @@ object Dependencies { "org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full ) - private val testing = Seq(scalactic, scalatest, scalacheck, scalatestPlus, monixTesting, mockito) + private val testing = + Seq(scalactic, scalatest, scalacheck, scalatestPlus, mockito, ceTesting) private val logging = Seq(slf4j, julToSlf4j, scalaLogging, logbackClassic, logstashLogback) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala index c140989d23c..ef4f0a51dad 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala @@ -29,6 +29,8 @@ import coop.rchain.rspace.{Match, _} import coop.rchain.shared.Log import monix.execution.Scheduler +import scala.concurrent.ExecutionContext + trait RhoRuntime[F[_]] extends HasCost[F] { /** @@ -581,17 +583,17 @@ object RhoRuntime { def createRuntime[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], mergeableTagName: Par, + rholangEC: ExecutionContext, initRegistry: Boolean = false, additionalSystemProcesses: Seq[Definition[F]] = Seq.empty - )( - implicit scheduler: Scheduler ): F[RhoRuntime[F]] = { import coop.rchain.rholang.interpreter.storage._ implicit val m: Match[F, BindPattern, ListParWithRandom] = matchListPar[F] for { space <- RSpace .create[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - stores + stores, + rholangEC ) runtime <- createRhoRuntime[F]( space, diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala index fac698f3db3..aa34578f28f 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter import cats._ -import cats.effect.{Concurrent, Sync} +import cats.effect.{Blocker, Concurrent, ContextShift, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models._ @@ -12,9 +12,9 @@ import coop.rchain.rholang.interpreter.storage.StoragePrinter import coop.rchain.rholang.syntax._ import coop.rchain.rspace.syntax._ import coop.rchain.shared.Log +import coop.rchain.shared.RChainScheduler.rholangEC import coop.rchain.store.LmdbDirStoreManager.{mb, Db, LmdbEnvConfig} import coop.rchain.store.{KeyValueStoreManager, LmdbDirStoreManager} -import monix.eval.Task import monix.execution.{CancelableFuture, Scheduler} import org.rogach.scallop.{stringListConverter, ScallopConf} @@ -22,7 +22,7 @@ import java.io.{BufferedOutputStream, FileOutputStream, FileReader, IOException} import java.nio.file.{Files, Path} import java.util.concurrent.TimeoutException import scala.annotation.tailrec -import scala.concurrent.Await +import scala.concurrent.{Await, ExecutionContext, Future} import scala.concurrent.duration._ import scala.io.Source import scala.util.{Failure, Success, Try, Using} @@ -57,21 +57,22 @@ object RholangCLI { } def main(args: Array[String]): Unit = { - import monix.execution.Scheduler.Implicits.global + import scala.concurrent.ExecutionContext.Implicits.global + implicit val cs: ContextShift[IO] = IO.contextShift(global) - val conf = new Conf(args) + val conf = new Conf(args.toList) - implicit val log: Log[Task] = Log.log[Task] - implicit val metricsF: Metrics[Task] = new Metrics.MetricsNOP[Task]() - implicit val spanF: Span[Task] = NoopSpan[Task]() - implicit val parF: Parallel[Task] = Task.catsParallel + implicit val log: Log[IO] = Log.log[IO] + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val parF: Parallel[IO] = IO.ioParallel - val kvm = mkRSpaceStoreManager[Task](conf.dataDir(), conf.mapSize()).runSyncUnsafe() + val kvm = mkRSpaceStoreManager[IO](conf.dataDir(), conf.mapSize()).unsafeRunSync val runtime = (for { store <- kvm.rSpaceStores - runtime <- RhoRuntime.createRuntime[Task](store, Par()) - } yield runtime).runSyncUnsafe() + runtime <- RhoRuntime.createRuntime[IO](store, Par(), rholangEC) + } yield runtime).unsafeRunSync val problems = try { if (conf.files.supplied) { @@ -102,7 +103,7 @@ object RholangCLI { } } finally { // TODO: Refactor with Resource. - kvm.shutdown.runSyncUnsafe() + kvm.shutdown.unsafeRunSync } if (!problems.isEmpty) { System.exit(1) @@ -174,11 +175,11 @@ object RholangCLI { @tailrec @SuppressWarnings(Array("org.wartremover.warts.Return")) - def repl(runtime: RhoRuntime[Task])(implicit scheduler: Scheduler): Unit = { + def repl(runtime: RhoRuntime[IO]): Unit = { printPrompt() Option(scala.io.StdIn.readLine()) match { case Some(line) => - evaluate(runtime, line).runSyncUnsafe() + evaluate(runtime, line).unsafeRunSync case None => Console.println("\nExiting...") return @@ -188,12 +189,10 @@ object RholangCLI { def processFile( conf: Conf, - runtime: RhoRuntime[Task], + runtime: RhoRuntime[IO], fileName: String, quiet: Boolean, unmatchedSendsOnly: Boolean - )( - implicit scheduler: Scheduler ): Try[Unit] = { val processTerm: Par => Try[Unit] = if (conf.binary()) writeBinary(fileName) @@ -219,7 +218,7 @@ object RholangCLI { x.flatMap(processTerm) } - def evaluate(runtime: RhoRuntime[Task], source: String): Task[Unit] = + def evaluate(runtime: RhoRuntime[IO], source: String): IO[Unit] = runtime.evaluate(source).map { case EvaluateResult(_, Vector(), _) => case EvaluateResult(_, errors, _) => @@ -234,7 +233,7 @@ object RholangCLI { @tailrec @SuppressWarnings(Array("org.wartremover.warts.Throw")) - def waitForSuccess(evaluatorFuture: CancelableFuture[EvaluateResult]): Unit = + def waitForSuccess(evaluatorFuture: Future[EvaluateResult]): Unit = try { Await.ready(evaluatorFuture, 5.seconds).value match { case Some(Success(EvaluateResult(cost, errors, _))) => @@ -273,24 +272,24 @@ object RholangCLI { } def evaluatePar( - runtime: RhoRuntime[Task], + runtime: RhoRuntime[IO], source: String, quiet: Boolean, unmatchedSendsOnly: Boolean )( par: Par - )(implicit scheduler: Scheduler): Try[Unit] = { - val evaluatorTask = + ): Try[Unit] = { + val evaluatorIO = for { - _ <- Task.delay(if (!quiet) { + _ <- IO.delay(if (!quiet) { printNormalizedTerm(par) }) result <- runtime.evaluate(source) } yield result - Try(waitForSuccess(evaluatorTask.runToFuture)).map { _ok => + Try(waitForSuccess(evaluatorIO.unsafeToFuture())).map { _ok => if (!quiet) { - printStorageContents(runtime, unmatchedSendsOnly).runSyncUnsafe() + printStorageContents(runtime, unmatchedSendsOnly).unsafeRunSync } } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/package.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/package.scala index 127ce3664e7..9de29eb0302 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/package.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/package.scala @@ -12,7 +12,7 @@ import coop.rchain.rholang.interpreter.matcher._ import coop.rchain.rspace.{Match => StorageMatch} import coop.rchain.shared.Serialize -//noinspection ConvertExpressionToSAM +//noinspectTaskn ConvertExpressionToSAM package object storage { /* Match instance */ diff --git a/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala index 4474ce0d992..498b3ea5d13 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang +import cats.effect.IO import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Expr.ExprInstance.{GInt, GString} @@ -10,20 +11,18 @@ import coop.rchain.rholang.interpreter.storage.StoragePrinter import coop.rchain.rholang.interpreter.{EvaluateResult, RhoRuntime} import coop.rchain.rholang.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import coop.rchain.shared.RChainScheduler._ import scala.concurrent.duration._ class InterpreterSpec extends AnyFlatSpec with Matchers { - private val tmpPrefix = "rspace-store-" - private val maxDuration = 5.seconds + private val tmpPrefix = "rspace-store-" - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() behavior of "Interpreter" @@ -31,35 +30,32 @@ class InterpreterSpec extends AnyFlatSpec with Matchers { val sendRho = "@{0}!(0)" - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - initStorage <- storageContents(runtime) - _ <- success(runtime, sendRho) - beforeError <- storageContents(runtime) - _ = assert(beforeError.contains(sendRho)) - beforeErrorCheckpoint <- runtime.createCheckpoint - _ <- failure(runtime, "@1!(1) | @2!(3.noSuchMethod())") - afterErrorCheckpoint <- runtime.createCheckpoint - _ = assert(afterErrorCheckpoint.root == beforeErrorCheckpoint.root) - _ <- success(runtime, "new stdout(`rho:io:stdout`) in { stdout!(42) }") - afterSendCheckpoint <- runtime.createCheckpoint - _ = assert(afterSendCheckpoint.root == beforeErrorCheckpoint.root) - _ <- success(runtime, "for (_ <- @0) { Nil }") - finalContent <- storageContents(runtime) - _ = assert(finalContent == initStorage) - } yield () - } - .runSyncUnsafe(maxDuration) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + initStorage <- storageContents(runtime) + _ <- success(runtime, sendRho) + beforeError <- storageContents(runtime) + _ = assert(beforeError.contains(sendRho)) + beforeErrorCheckpoint <- runtime.createCheckpoint + _ <- failure(runtime, "@1!(1) | @2!(3.noSuchMethod())") + afterErrorCheckpoint <- runtime.createCheckpoint + _ = assert(afterErrorCheckpoint.root == beforeErrorCheckpoint.root) + _ <- success(runtime, "new stdout(`rho:io:stdout`) in { stdout!(42) }") + afterSendCheckpoint <- runtime.createCheckpoint + _ = assert(afterSendCheckpoint.root == beforeErrorCheckpoint.root) + _ <- success(runtime, "for (_ <- @0) { Nil }") + finalContent <- storageContents(runtime) + _ = assert(finalContent == initStorage) + } yield () + }.unsafeRunSync } it should "yield correct results for the PrimeCheck contract" in { - val tupleSpace = mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- success( - runtime, - """ + val tupleSpace = mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- success( + runtime, + """ |new loop, primeCheck, stdoutAck(`rho:io:stdoutAck`) in { | contract loop(@x) = { | match x { @@ -83,12 +79,11 @@ class InterpreterSpec extends AnyFlatSpec with Matchers { | loop!([Nil, 7, 7 | 8, 9 | Nil, 9 | 10, Nil, 9]) | } """.stripMargin - ) + ) - tupleSpace <- runtime.getHotChanges - } yield tupleSpace - } - .runSyncUnsafe(maxDuration) + tupleSpace <- runtime.getHotChanges + } yield tupleSpace + }.unsafeRunSync def rhoPar(e: Expr) = Seq(Par(exprs = Seq(e))) def rhoInt(n: Long) = rhoPar(Expr(GInt(n))) @@ -107,11 +102,9 @@ class InterpreterSpec extends AnyFlatSpec with Matchers { it should "signal syntax errors to the caller" in { val badRholang = "new f, x in { f(x) }" val EvaluateResult(_, errors, _) = - mkRuntime[Task](tmpPrefix) - .use { runtime => - execute(runtime, badRholang) - } - .runSyncUnsafe(maxDuration) + mkRuntime[IO](tmpPrefix).use { runtime => + execute(runtime, badRholang) + }.unsafeRunSync errors should not be empty errors(0) shouldBe a[coop.rchain.rholang.interpreter.errors.SyntaxError] @@ -120,11 +113,9 @@ class InterpreterSpec extends AnyFlatSpec with Matchers { it should "capture rholang parsing errors and charge for parsing" in { val badRholang = """ for(@x <- @"x"; @y <- @"y"){ @"xy"!(x + y) | @"x"!(1) | @"y"!("hi") """ val EvaluateResult(cost, errors, _) = - mkRuntime[Task](tmpPrefix) - .use { runtime => - execute(runtime, badRholang) - } - .runSyncUnsafe(maxDuration) + mkRuntime[IO](tmpPrefix).use { runtime => + execute(runtime, badRholang) + }.unsafeRunSync errors should not be empty cost.value shouldEqual (parsingCost(badRholang).value) @@ -134,20 +125,18 @@ class InterpreterSpec extends AnyFlatSpec with Matchers { val sendRho = "@{0}!(0)" val initialPhlo = parsingCost(sendRho) - Cost(1) val EvaluateResult(cost, errors, _) = - mkRuntime[Task](tmpPrefix) - .use { runtime => - runtime.evaluate(sendRho, initialPhlo) - } - .runSyncUnsafe(maxDuration) + mkRuntime[IO](tmpPrefix).use { runtime => + runtime.evaluate(sendRho, initialPhlo) + }.unsafeRunSync errors should not be empty cost.value shouldEqual initialPhlo.value } - private def storageContents(runtime: RhoRuntime[Task]): Task[String] = + private def storageContents(runtime: RhoRuntime[IO]): IO[String] = StoragePrinter.prettyPrint(runtime) - private def success(runtime: RhoRuntime[Task], rho: String): Task[Unit] = + private def success(runtime: RhoRuntime[IO], rho: String): IO[Unit] = execute(runtime, rho).map( res => assert( @@ -158,15 +147,15 @@ class InterpreterSpec extends AnyFlatSpec with Matchers { ) ) - private def failure(runtime: RhoRuntime[Task], rho: String): Task[Unit] = + private def failure(runtime: RhoRuntime[IO], rho: String): IO[Unit] = execute(runtime, rho).map( res => assert(res.errors.nonEmpty, s"Expected $rho to fail - it didn't.") ) private def execute( - runtime: RhoRuntime[Task], + runtime: RhoRuntime[IO], source: String - ): Task[EvaluateResult] = + ): IO[EvaluateResult] = runtime.evaluate(source) } diff --git a/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala index 756b1bb7ed6..48a5d382d66 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala @@ -1,16 +1,14 @@ package coop.rchain.rholang +import cats.effect.IO import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Expr.ExprInstance.{GInt, GString} import coop.rchain.models.rholang.implicits._ import coop.rchain.shared.Log import coop.rchain.rholang.interpreter.InterpreterUtil - -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global +import coop.rchain.shared.RChainScheduler._ import scala.concurrent.duration._ @@ -19,104 +17,92 @@ class PeekSpec extends AnyFlatSpec with Matchers { import Resources._ import InterpreterUtil._ - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() val tmpPrefix = "peek-spec-" "peek" should "not remove read data" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- evaluate[Task](runtime, """@1!("v1") | for(_ <<- @1) { Nil }""") - _ <- evaluate[Task](runtime, """for(_ <- @1) { @2!("v2") }""") - data <- runtime.getData(GInt(2L)) - _ = withClue( - "Continuation didn't produce expected data. Did it fire?" - ) { data should have size 1 } - } yield (data.head.a.pars.head.exprs.head.exprInstance shouldBe GString("v2")) - } - .runSyncUnsafe(2.seconds) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- evaluate[IO](runtime, """@1!("v1") | for(_ <<- @1) { Nil }""") + _ <- evaluate[IO](runtime, """for(_ <- @1) { @2!("v2") }""") + data <- runtime.getData(GInt(2L)) + _ = withClue( + "Continuation didn't produce expected data. Did it fire?" + ) { data should have size 1 } + } yield (data.head.a.pars.head.exprs.head.exprInstance shouldBe GString("v2")) + }.unsafeRunSync } it should "not duplicate read persistent data - send is executed first" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- evaluate[Task](runtime, """@1!!("v1")""") - _ <- evaluate[Task](runtime, """for(_ <<- @1) { Nil }""") - _ <- evaluate[Task](runtime, """for(_ <- @1) { @2!("v2") }""") - v1Data <- runtime.getData(GInt(1L)) - _ = v1Data should have size 1 - resultData <- runtime.getData(GInt(2L)) - _ = withClue( - "Continuation didn't produce expected data. Did it fire?" - ) { resultData should have size 1 } - } yield (resultData.head.a.pars.head.exprs.head.exprInstance shouldBe GString("v2")) - } - .runSyncUnsafe(2.seconds) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- evaluate[IO](runtime, """@1!!("v1")""") + _ <- evaluate[IO](runtime, """for(_ <<- @1) { Nil }""") + _ <- evaluate[IO](runtime, """for(_ <- @1) { @2!("v2") }""") + v1Data <- runtime.getData(GInt(1L)) + _ = v1Data should have size 1 + resultData <- runtime.getData(GInt(2L)) + _ = withClue( + "Continuation didn't produce expected data. Did it fire?" + ) { resultData should have size 1 } + } yield (resultData.head.a.pars.head.exprs.head.exprInstance shouldBe GString("v2")) + }.unsafeRunSync } it should "not duplicate read persistent data - send is executed second" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- evaluate[Task](runtime, """for(_ <<- @1) { Nil }""") - _ <- evaluate[Task](runtime, """@1!!("v1")""") - _ <- evaluate[Task](runtime, """for(_ <- @1) { @2!("v2") }""") - v1Data <- runtime.getData(GInt(1L)) - _ = v1Data should have size 1 - resultData <- runtime.getData(GInt(2L)) - _ = withClue( - "Continuation didn't produce expected data. Did it fire?" - ) { resultData should have size 1 } - } yield (resultData.head.a.pars.head.exprs.head.exprInstance shouldBe GString("v2")) - } - .runSyncUnsafe(2.seconds) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- evaluate[IO](runtime, """for(_ <<- @1) { Nil }""") + _ <- evaluate[IO](runtime, """@1!!("v1")""") + _ <- evaluate[IO](runtime, """for(_ <- @1) { @2!("v2") }""") + v1Data <- runtime.getData(GInt(1L)) + _ = v1Data should have size 1 + resultData <- runtime.getData(GInt(2L)) + _ = withClue( + "Continuation didn't produce expected data. Did it fire?" + ) { resultData should have size 1 } + } yield (resultData.head.a.pars.head.exprs.head.exprInstance shouldBe GString("v2")) + }.unsafeRunSync } it should "clear all peeks when inserting a persistent send" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- evaluate[Task](runtime, """for (_ <<- @0) { @1!(0) }""") - _ <- evaluate[Task](runtime, """for (_ <<- @0) { @1!(0) }""") - _ <- evaluate[Task](runtime, """@0!!(0)""") - res <- runtime.getData(GInt(1L)).map(_.size) - } yield (res shouldBe 2) - } - .runSyncUnsafe(2.seconds) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- evaluate[IO](runtime, """for (_ <<- @0) { @1!(0) }""") + _ <- evaluate[IO](runtime, """for (_ <<- @0) { @1!(0) }""") + _ <- evaluate[IO](runtime, """@0!!(0)""") + res <- runtime.getData(GInt(1L)).map(_.size) + } yield (res shouldBe 2) + }.unsafeRunSync } it should "clear all peeks when inserting a send" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- evaluate[Task](runtime, """for (_ <<- @0) { @1!(0) }""") - _ <- evaluate[Task](runtime, """for (_ <<- @0) { @1!(0) }""") - _ <- evaluate[Task](runtime, """@0!(0)""") - res <- runtime.getData(GInt(1L)).map(_.size) - } yield (res shouldBe 2) - } - .runSyncUnsafe(2.seconds) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- evaluate[IO](runtime, """for (_ <<- @0) { @1!(0) }""") + _ <- evaluate[IO](runtime, """for (_ <<- @0) { @1!(0) }""") + _ <- evaluate[IO](runtime, """@0!(0)""") + res <- runtime.getData(GInt(1L)).map(_.size) + } yield (res shouldBe 2) + }.unsafeRunSync } it should "continue executing the loop until quiescence" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- evaluate[Task](runtime, """for (_ <<- @0 & _ <<- @1) { @2!(0) }""") - _ <- evaluate[Task](runtime, """for (_ <<- @0 & _ <<- @1) { @2!(0) }""") - _ <- evaluate[Task](runtime, """@1!!(1)""") - _ <- evaluate[Task](runtime, """@0!(0)""") - r1 <- runtime.getData(GInt(0L)).map(_.size) - r2 <- runtime.getData(GInt(1L)).map(_.size) - r3 <- runtime.getData(GInt(2L)).map(_.size) - _ = r1 shouldBe 1 - _ = r2 shouldBe 1 - } yield (r3 shouldBe 2) - } - .runSyncUnsafe(2.seconds) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- evaluate[IO](runtime, """for (_ <<- @0 & _ <<- @1) { @2!(0) }""") + _ <- evaluate[IO](runtime, """for (_ <<- @0 & _ <<- @1) { @2!(0) }""") + _ <- evaluate[IO](runtime, """@1!!(1)""") + _ <- evaluate[IO](runtime, """@0!(0)""") + r1 <- runtime.getData(GInt(0L)).map(_.size) + r2 <- runtime.getData(GInt(1L)).map(_.size) + r3 <- runtime.getData(GInt(2L)).map(_.size) + _ = r1 shouldBe 1 + _ = r2 shouldBe 1 + } yield (r3 shouldBe 2) + }.unsafeRunSync } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala index 983ca9c41c4..8b71b4ef9e9 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala @@ -14,7 +14,7 @@ import coop.rchain.rspace import coop.rchain.rspace.RSpace.RSpaceStore import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rspace.{Match, RSpace} -import coop.rchain.shared.Log +import coop.rchain.shared.{Log, RChainScheduler} import coop.rchain.store.KeyValueStoreManager import monix.execution.Scheduler @@ -39,32 +39,32 @@ object Resources { }) ) - def mkRhoISpace[F[_]: Concurrent: Parallel: ContextShift: KeyValueStoreManager: Metrics: Span: Log]( - implicit scheduler: Scheduler - ): F[RhoISpace[F]] = { + def mkRhoISpace[F[_]: Concurrent: Parallel: ContextShift: KeyValueStoreManager: Metrics: Span: Log] + : F[RhoISpace[F]] = { import coop.rchain.rholang.interpreter.storage._ implicit val m: rspace.Match[F, BindPattern, ListParWithRandom] = matchListPar[F] for { store <- KeyValueStoreManager[F].rSpaceStores - space <- RSpace.create[F, Par, BindPattern, ListParWithRandom, TaggedContinuation](store) + space <- RSpace.create[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( + store, + RChainScheduler.rholangEC + ) } yield space } def mkRuntime[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( prefix: String - )(implicit scheduler: Scheduler): Resource[F, RhoRuntime[F]] = + ): Resource[F, RhoRuntime[F]] = mkTempDir(prefix) .evalMap(RholangCLI.mkRSpaceStoreManager[F](_)) .evalMap(_.rSpaceStores) - .evalMap(RhoRuntime.createRuntime(_, Par())) + .evalMap(RhoRuntime.createRuntime(_, Par(), RChainScheduler.rholangEC)) def mkRuntimes[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( prefix: String, initRegistry: Boolean = false - )( - implicit scheduler: Scheduler ): Resource[F, (RhoRuntime[F], ReplayRhoRuntime[F], RhoHistoryRepository[F])] = mkTempDir(prefix) .evalMap(RholangCLI.mkRSpaceStoreManager[F](_)) @@ -75,15 +75,14 @@ object Resources { stores: RSpaceStore[F], initRegistry: Boolean = false, additionalSystemProcesses: Seq[Definition[F]] = Seq.empty - )( - implicit scheduler: Scheduler ): F[(RhoRuntime[F], ReplayRhoRuntime[F], RhoHistoryRepository[F])] = { import coop.rchain.rholang.interpreter.storage._ implicit val m: Match[F, BindPattern, ListParWithRandom] = matchListPar[F] for { hrstores <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - stores + stores, + RChainScheduler.rholangEC ) (space, replay) = hrstores runtimes <- RhoRuntime diff --git a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala index d923e4a47ba..fba700161f0 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang import cats.Eval +import cats.effect.IO import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Connective.ConnectiveInstance.ConnNotBody @@ -14,8 +15,6 @@ import coop.rchain.rholang.syntax._ import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.rholang.interpreter.{Interpreter, InterpreterUtil, ParBuilderUtil, PrettyPrinter} import coop.rchain.shared.{Log, Serialize} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.Assertions import org.scalatest.flatspec.AnyFlatSpec @@ -27,12 +26,12 @@ import scala.concurrent.duration._ object StackSafetySpec extends Assertions { - val mapSize = 10L * 1024L * 1024L - val tmpPrefix = "rspace-store-" - val maxDuration = 20.seconds - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + val mapSize = 10L * 1024L * 1024L + val tmpPrefix = "rspace-store-" + val maxDuration = 20.seconds + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() def findMaxRecursionDepth(): Int = { def count(i: Int): Int = @@ -175,7 +174,7 @@ class StackSafetySpec extends AnyFlatSpec with TableDrivenPropertyChecks with Ma Seq.fill(depth)(left).mkString + middle + Seq.fill(depth)(right).mkString private def checkAll(term: String): Unit = { - implicit val logF: Log[Task] = Log.log[Task] + implicit val logF: Log[IO] = Log.log[IO] val rho = s""" @@ -203,17 +202,16 @@ class StackSafetySpec extends AnyFlatSpec with TableDrivenPropertyChecks with Ma val ast = Compiler[Eval].sourceToADT(rho).value PrettyPrinter().buildString(ast) checkSuccess(rho) { - mkRuntime[Task](tmpPrefix).use { runtime => + import coop.rchain.shared.RChainScheduler._ + mkRuntime[IO](tmpPrefix).use { runtime => runtime.evaluate(rho) } } } } - private def checkSuccess(rho: String)(task: => Task[_]): Unit = - task.attempt - .runSyncUnsafe(maxDuration) - .swap + private def checkSuccess(rho: String)(task: => IO[_]): Unit = + task.attempt.unsafeRunSync.swap .foreach(error => fail(s"""Execution failed for: $rho |Cause: |$error""".stripMargin)) diff --git a/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala index ee626490802..cd199a025d1 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang +import cats.effect.IO import coop.rchain.casper.protocol.DeployData import coop.rchain.crypto.PrivateKey import coop.rchain.crypto.signatures.{Secp256k1, Signed} @@ -9,41 +10,37 @@ import coop.rchain.rholang.Resources.mkRuntime import coop.rchain.rholang.interpreter.storage.StoragePrinter import coop.rchain.rholang.syntax._ import coop.rchain.shared.{Base16, Log} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import coop.rchain.shared.RChainScheduler._ import scala.concurrent.duration._ class StoragePrinterSpec extends AnyFlatSpec with Matchers { - private val tmpPrefix = "rspace-store-" - private val maxDuration = 5.seconds + private val tmpPrefix = "rspace-store-" private val deployerSk = PrivateKey( Base16.unsafeDecode("17f242c34491ff8187ec94ec1508fed8b487b872f2bb97b437f4d4e44345cee6") ) private val SHARD_ID = "root-shard" - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() behavior of "StoragePrinter.prettyPrintUnmatchedSends" it should "print unmatched sends" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- { - runtime.evaluate( - "@1!(Nil) | @2!(Nil) | for(_ <- @2) { Nil }" - ) - } - pretty <- StoragePrinter.prettyPrintUnmatchedSends(runtime) - _ = assert(pretty == "@{1}!(Nil)") - } yield () - } - .runSyncUnsafe(maxDuration) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- { + runtime.evaluate( + "@1!(Nil) | @2!(Nil) | for(_ <- @2) { Nil }" + ) + } + pretty <- StoragePrinter.prettyPrintUnmatchedSends(runtime) + _ = assert(pretty == "@{1}!(Nil)") + } yield () + }.unsafeRunSync } private def mkDeploy(term: String) = @@ -61,40 +58,36 @@ class StoragePrinterSpec extends AnyFlatSpec with Matchers { ) it should "print unmatched sends of multiple deploys" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - val deploy1 = "@1!(Nil)" - val deploy2 = "@2!(Nil)" - val deploy3 = "@3!(Nil) | for(_ <- @3) { Nil }" - for { - unmatchedSends <- StoragePrinter.prettyPrintUnmatchedSends( - List(deploy1, deploy2, deploy3).map(mkDeploy), - runtime - ) - result = """Deploy 304302205f0ca86c04a9614b474372e38e3d92c173690ffb864989f555720e004e96da72021f1f2243e6d33e5cd444f5321533b1afb52bfe830fc2f78aab77e0f7e6130669: + mkRuntime[IO](tmpPrefix).use { runtime => + val deploy1 = "@1!(Nil)" + val deploy2 = "@2!(Nil)" + val deploy3 = "@3!(Nil) | for(_ <- @3) { Nil }" + for { + unmatchedSends <- StoragePrinter.prettyPrintUnmatchedSends( + List(deploy1, deploy2, deploy3).map(mkDeploy), + runtime + ) + result = """Deploy 304302205f0ca86c04a9614b474372e38e3d92c173690ffb864989f555720e004e96da72021f1f2243e6d33e5cd444f5321533b1afb52bfe830fc2f78aab77e0f7e6130669: |@{1}!(Nil) | |Deploy 3045022100c126afae3a9c135ab08812bafb6fe54353f80f77c7b628b93a738891518197b5022031e220e1a3d4b859752ebe80153148223a0d75308c8b49bf6c922c5dd51129a1: |@{2}!(Nil)""".stripMargin - _ = assert(unmatchedSends == result) - } yield () - } - .runSyncUnsafe(maxDuration) + _ = assert(unmatchedSends == result) + } yield () + }.unsafeRunSync } it should "not print unmatched sends from previous deploys" in { - mkRuntime[Task](tmpPrefix) - .use { runtime => - for { - _ <- runtime.evaluate("@0!(Nil) | for(_ <- @1) { Nil }") - deploy = mkDeploy("@1!(Nil) | @2!(Nil)") - unmatchedSends <- StoragePrinter.prettyPrintUnmatchedSends( - deploy, - runtime - ) - _ = assert(unmatchedSends == "@{2}!(Nil)") - } yield () - } - .runSyncUnsafe(maxDuration) + mkRuntime[IO](tmpPrefix).use { runtime => + for { + _ <- runtime.evaluate("@0!(Nil) | for(_ <- @1) { Nil }") + deploy = mkDeploy("@1!(Nil) | @2!(Nil)") + unmatchedSends <- StoragePrinter.prettyPrintUnmatchedSends( + deploy, + runtime + ) + _ = assert(unmatchedSends == "@{2}!(Nil)") + } yield () + }.unsafeRunSync } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala index a72a74eb615..dddfe461f9a 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala @@ -1,7 +1,8 @@ package coop.rchain.rholang.interpreter import cats.Parallel -import cats.effect.{Concurrent, ContextShift} +import cats.effect.testing.scalatest.AsyncIOSpec +import cats.effect.{Concurrent, ContextShift, IO} import cats.syntax.all._ import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -11,133 +12,133 @@ import coop.rchain.rholang.Resources.mkRuntime import coop.rchain.rholang.interpreter.errors.{InterpreterError, SyntaxError} import coop.rchain.rholang.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.testing.scalatest.MonixTaskTest import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers -class BigIntNormalizerSpec extends AsyncFlatSpec with MonixTaskTest with Matchers { - implicit val logF: Log[Task] = Log.log[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - - val outcomeCh = "ret" - - private def execute[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( - source: String - ): F[Either[InterpreterError, BigInt]] = - mkRuntime[F]("rholang-bigint") - .use { runtime => - for { - evalResult <- runtime.evaluate(source) - result <- if (evalResult.errors.isEmpty) - for { - data <- runtime.getData(GString(outcomeCh)).map(_.head) - bigIntResult = data.a.pars.head.exprs.head.getGBigInt - } yield Right(bigIntResult) - else Left(evalResult.errors.head).pure[F] - } yield result - } - - "method toBigInt()" should "convert Rholang Int value to BigInt" in { - val termWithNull = - s""" - # @"$outcomeCh"!(0.toBigInt()) - # """.stripMargin('#') - val termWithMaxLong = - s""" - # @"$outcomeCh"!(9223372036854775807.toBigInt()) - # """.stripMargin('#') - val termWithMinLong = - s""" - # @"$outcomeCh"!((-9223372036854775807).toBigInt()) - # """.stripMargin('#') - for { - r1 <- execute[Task](termWithNull) - r2 <- execute[Task](termWithMaxLong) - r3 <- execute[Task](termWithMinLong) - } yield { - r1 should equal(Right(BigInt(0))) - r2 should equal(Right(BigInt(9223372036854775807L))) - r3 should equal(Right(BigInt(-9223372036854775807L))) - } - } - - it should "convert Rholang String to BigInt" in { - val termWithNull = - s""" - # @"$outcomeCh"!("0".toBigInt()) - # """.stripMargin('#') - val termWithPositiveBigValue = - s""" - # @"$outcomeCh"!("9999999999999999999999999999999999999999999999".toBigInt()) - # """.stripMargin('#') - val termWithNegativeBigValue = - s""" - # @"$outcomeCh"!("-9999999999999999999999999999999999999999999999".toBigInt()) - # """.stripMargin('#') - for { - r1 <- execute[Task](termWithNull) - r2 <- execute[Task](termWithPositiveBigValue) - r3 <- execute[Task](termWithNegativeBigValue) - } yield { - r1 should equal(Right(BigInt("0"))) - r2 should equal(Right(BigInt("9999999999999999999999999999999999999999999999"))) - r3 should equal(Right(BigInt("-9999999999999999999999999999999999999999999999"))) - } - } - - "BigInt() constructor" should "create BigInt value" in { - val termWithNull = - s""" - # @"$outcomeCh"!( BigInt(0) ) - # """.stripMargin('#') - val termWithPositiveBigValue = - s""" - # @"$outcomeCh"!( BigInt( 9999999999999999999999999999999999999999999999 ) ) - # """.stripMargin('#') - val termWithNegativeBigValue = - s""" - # @"$outcomeCh"!( -BigInt(9999999999999999999999999999999999999999999999) ) - # """.stripMargin('#') - for { - r1 <- execute[Task](termWithNull) - r2 <- execute[Task](termWithPositiveBigValue) - r3 <- execute[Task](termWithNegativeBigValue) - } yield { - r1 should equal(Right(BigInt("0"))) - r2 should equal(Right(BigInt("9999999999999999999999999999999999999999999999"))) - r3 should equal(Right(BigInt("-9999999999999999999999999999999999999999999999"))) - } - } - - it should "return throw error if input data isn't number string or it is negative number" in { - val term1 = - s""" - # @"$outcomeCh"!(BigInt(NOTNUMBER)) - # """.stripMargin('#') - val term2 = - s""" - # @"$outcomeCh"!(BigInt(9999999999999999999999999999999999999999999999NOTNUMBER)) - # """.stripMargin('#') - val term3 = - s""" - # @"$outcomeCh"!(BigInt(9999999999999999999999999999999999999999999999 NOTNUMBER)) - # """.stripMargin('#') - val term4 = - s""" - # @"$outcomeCh"!(BigInt(-9999999999999999999999999999999999999999999999)) - # """.stripMargin('#') - for { - r1 <- execute[Task](term1) - r2 <- execute[Task](term2) - r3 <- execute[Task](term3) - r4 <- execute[Task](term4) - } yield { - r1 should equal(Left(SyntaxError("syntax error(): NOTNUMBER at 2:17-2:26"))) - r2 should equal(Left(SyntaxError("syntax error(): NOTNUMBER at 2:63-2:72"))) - r3 should equal(Left(SyntaxError("syntax error(): NOTNUMBER at 2:64-2:73"))) - r4 should equal(Left(SyntaxError("syntax error(): - at 2:17-2:18"))) - } - } -} +// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) +//class BigIntNormalizerSpec extends AsyncFlatSpec with AsyncIOSpec with Matchers { +// implicit val logF: Log[IO] = Log.log[IO] +// implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] +// implicit val noopSpan: Span[IO] = NoopSpan[IO]() +// +// import coop.rchain.shared.RChainScheduler._ +// val outcomeCh = "ret" +// +// private def execute[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( +// source: String +// ): F[Either[InterpreterError, BigInt]] = +// mkRuntime[F]("rholang-bigint") +// .use { runtime => +// for { +// evalResult <- runtime.evaluate(source) +// result <- if (evalResult.errors.isEmpty) +// for { +// data <- runtime.getData(GString(outcomeCh)).map(_.head) +// bigIntResult = data.a.pars.head.exprs.head.getGBigInt +// } yield Right(bigIntResult) +// else Left(evalResult.errors.head).pure[F] +// } yield result +// } +// +// "method toBigInt()" should "convert Rholang Int value to BigInt" in { +// val termWithNull = +// s""" +// # @"$outcomeCh"!(0.toBigInt()) +// # """.stripMargin('#') +// val termWithMaxLong = +// s""" +// # @"$outcomeCh"!(9223372036854775807.toBigInt()) +// # """.stripMargin('#') +// val termWithMinLong = +// s""" +// # @"$outcomeCh"!((-9223372036854775807).toBigInt()) +// # """.stripMargin('#') +// (for { +// r1 <- execute[IO](termWithNull) +// r2 <- execute[IO](termWithMaxLong) +// r3 <- execute[IO](termWithMinLong) +// } yield { +// r1 should equal(Right(BigInt(0))) +// r2 should equal(Right(BigInt(9223372036854775807L))) +// r3 should equal(Right(BigInt(-9223372036854775807L))) +// }).unsafeToFuture() +// } +// +// it should "convert Rholang String to BigInt" in { +// val termWithNull = +// s""" +// # @"$outcomeCh"!("0".toBigInt()) +// # """.stripMargin('#') +// val termWithPositiveBigValue = +// s""" +// # @"$outcomeCh"!("9999999999999999999999999999999999999999999999".toBigInt()) +// # """.stripMargin('#') +// val termWithNegativeBigValue = +// s""" +// # @"$outcomeCh"!("-9999999999999999999999999999999999999999999999".toBigInt()) +// # """.stripMargin('#') +// (for { +// r1 <- execute[IO](termWithNull) +// r2 <- execute[IO](termWithPositiveBigValue) +// r3 <- execute[IO](termWithNegativeBigValue) +// } yield { +// r1 should equal(Right(BigInt("0"))) +// r2 should equal(Right(BigInt("9999999999999999999999999999999999999999999999"))) +// r3 should equal(Right(BigInt("-9999999999999999999999999999999999999999999999"))) +// }).unsafeToFuture() +// } +// +// "BigInt() constructor" should "create BigInt value" in { +// val termWithNull = +// s""" +// # @"$outcomeCh"!( BigInt(0) ) +// # """.stripMargin('#') +// val termWithPositiveBigValue = +// s""" +// # @"$outcomeCh"!( BigInt( 9999999999999999999999999999999999999999999999 ) ) +// # """.stripMargin('#') +// val termWithNegativeBigValue = +// s""" +// # @"$outcomeCh"!( -BigInt(9999999999999999999999999999999999999999999999) ) +// # """.stripMargin('#') +// (for { +// r1 <- execute[IO](termWithNull) +// r2 <- execute[IO](termWithPositiveBigValue) +// r3 <- execute[IO](termWithNegativeBigValue) +// } yield { +// r1 should equal(Right(BigInt("0"))) +// r2 should equal(Right(BigInt("9999999999999999999999999999999999999999999999"))) +// r3 should equal(Right(BigInt("-9999999999999999999999999999999999999999999999"))) +// }).unsafeToFuture() +// } +// +// it should "return throw error if input data isn't number string or it is negative number" in { +// val term1 = +// s""" +// # @"$outcomeCh"!(BigInt(NOTNUMBER)) +// # """.stripMargin('#') +// val term2 = +// s""" +// # @"$outcomeCh"!(BigInt(9999999999999999999999999999999999999999999999NOTNUMBER)) +// # """.stripMargin('#') +// val term3 = +// s""" +// # @"$outcomeCh"!(BigInt(9999999999999999999999999999999999999999999999 NOTNUMBER)) +// # """.stripMargin('#') +// val term4 = +// s""" +// # @"$outcomeCh"!(BigInt(-9999999999999999999999999999999999999999999999)) +// # """.stripMargin('#') +// (for { +// r1 <- execute[IO](term1) +// r2 <- execute[IO](term2) +// r3 <- execute[IO](term3) +// r4 <- execute[IO](term4) +// } yield { +// r1 should equal(Left(SyntaxError("syntax error(): NOTNUMBER at 2:17-2:26"))) +// r2 should equal(Left(SyntaxError("syntax error(): NOTNUMBER at 2:63-2:72"))) +// r3 should equal(Left(SyntaxError("syntax error(): NOTNUMBER at 2:64-2:73"))) +// r4 should equal(Left(SyntaxError("syntax error(): - at 2:17-2:18"))) +// }).unsafeToFuture() +// } +//} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala index 1aa1817a42a..eea3fcf6dca 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter import cats.Parallel -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.effect.concurrent.Ref import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.{Metrics, Span} @@ -19,8 +19,6 @@ import coop.rchain.rspace._ import coop.rchain.rspace.internal.{Datum, Row} import coop.rchain.shared.Log import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalactic.TripleEqualsSupport import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -29,9 +27,9 @@ import scala.concurrent.duration._ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqualsSupport { - implicit val noopSpan: Span[Task] = Span.noop - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val ms: Metrics.Source = Metrics.BaseSource + implicit val noopSpan: Span[IO] = Span.noop + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val ms: Metrics.Source = Metrics.BaseSource behavior of "Cost accounting in Reducer" @@ -56,16 +54,17 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu val substTerm = term(Expr(GString("1"))) val termCost = Chargeable[Par].cost(substTerm) val initCost = Cost(1000) + import coop.rchain.shared.RChainScheduler._ (for { - cost <- CostAccounting.initialCost[Task](initCost) + cost <- CostAccounting.initialCost[IO](initCost) res <- { implicit val c = cost - Substitute.charge(Task.now(substTerm), Cost(10000)).attempt + Substitute.charge(IO(substTerm), Cost(10000)).attempt } _ = assert(res === Right(substTerm)) finalCost <- cost.get _ = assert(finalCost === (initCost - Cost(termCost))) - } yield ()).runSyncUnsafe(5.seconds) + } yield ()).unsafeRunSync } it should "charge for failed substitution" in { @@ -73,25 +72,26 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu val varTerm = term(Expr(EVarBody(EVar(Var(FreeVar(0)))))) val originalTermCost = Chargeable[Par].cost(varTerm) val initCost = Cost(1000) + import coop.rchain.shared.RChainScheduler._ (for { - cost <- CostAccounting.initialCost[Task](initCost) + cost <- CostAccounting.initialCost[IO](initCost) res <- { implicit val c = cost Substitute - .charge(Task.raiseError[Par](new RuntimeException("")), Cost(originalTermCost)) + .charge(IO.raiseError[Par](new RuntimeException("")), Cost(originalTermCost)) .attempt } _ = assert(res.isLeft) finalCost <- cost.get _ = assert(finalCost === (initCost - Cost(originalTermCost))) - } yield ()).runSyncUnsafe(5.seconds) + } yield ()).unsafeRunSync } it should "stop if OutOfPhloError is returned from RSpace" in { val iSpace = new ISpaceStub[ - Task, + IO, Par, BindPattern, ListParWithRandom, @@ -101,20 +101,21 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu channel: Par, data: ListParWithRandom, persist: Boolean - ): Task[ + ): IO[ Option[ (ContResult[Par, BindPattern, TaggedContinuation], Seq[Result[Par, ListParWithRandom]]) ] ] = - Task.raiseError[Option[ + IO.raiseError[Option[ (ContResult[Par, BindPattern, TaggedContinuation], Seq[Result[Par, ListParWithRandom]]) ]](OutOfPhlogistonsError) } + import coop.rchain.shared.RChainScheduler._ implicit val rand = Blake2b512Random.defaultRandom - implicit val cost = CostAccounting.initialCost[Task](Cost(1000)).runSyncUnsafe(1.second) + implicit val cost = CostAccounting.initialCost[IO](Cost(1000)).unsafeRunSync val (_, chargingReducer) = createDispatcher(iSpace, Map.empty, Map.empty) val send = Send(Par(exprs = Seq(GString("x"))), Seq(Par())) - val test = chargingReducer.inj(send).attempt.runSyncUnsafe(1.second) + val test = chargingReducer.inj(send).attempt.unsafeRunSync assert(test === Left(OutOfPhlogistonsError)) } @@ -131,18 +132,19 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu val program = Par(sends = Seq(Send(channel, Seq(a)), Send(channel, Seq(b)))) - implicit val rand = Blake2b512Random(Array.empty[Byte]) - implicit val logF: Log[Task] = Log.log[Task] - implicit val kvm = InMemoryStoreManager[Task] + implicit val rand = Blake2b512Random(Array.empty[Byte]) + implicit val logF: Log[IO] = Log.log[IO] + implicit val kvm = InMemoryStoreManager[IO] + import coop.rchain.shared.RChainScheduler._ - def testImplementation(pureRSpace: RhoISpace[Task]): Task[ + def testImplementation(pureRSpace: RhoISpace[IO]): IO[ ( Either[Throwable, Unit], Map[Seq[Par], Row[BindPattern, ListParWithRandom, TaggedContinuation]] ) ] = { - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe(1.second) + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync lazy val (_, reducer) = createDispatcher(pureRSpace, Map.empty, Map.empty) @@ -177,11 +179,11 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu map.get(List(channel)) === Some(data(p, rand)) (for { - res <- mkRhoISpace[Task].flatMap(testImplementation) + res <- mkRhoISpace[IO].flatMap(testImplementation) (result, map) = res _ = assert(result === Left(OutOfPhlogistonsError)) _ = assert(stored(map, a, rand.splitByte(0)) || stored(map, b, rand.splitByte(1))) - } yield ()).runSyncUnsafe(5.seconds) + } yield ()).unsafeRunTimed(5.seconds) } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala index 58d86067515..87e80d04c52 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang.interpreter +import cats.effect.IO import com.google.protobuf.ByteString import coop.rchain.crypto.hash.{Blake2b256, Blake2b512Random, Keccak256, Sha256} import coop.rchain.crypto.signatures.{Ed25519, Secp256k1} @@ -19,8 +20,7 @@ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.shared.PathOps._ import coop.rchain.shared.{Base16, Log, Serialize} import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global +import cats.syntax.all._ import org.scalactic.TripleEqualsSupport import org.scalatest.flatspec.FixtureAnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -55,17 +55,17 @@ class CryptoChannelsSpec def clearStore( ackChannel: Par, timeout: Duration = 3.seconds - )(implicit env: Env[Par], runtime: RhoRuntime[Task]): Unit = { + )(implicit env: Env[Par], runtime: RhoRuntime[IO]): Unit = { val consume: Par = Receive( Seq(ReceiveBind(Seq(EVar(Var(Wildcard(WildcardMsg())))), ackChannel)), Par() ) - Await.ready(runtime.inj(consume, env).runToFuture, 3.seconds) + Await.ready(runtime.inj(consume, env).unsafeToFuture(), 3.seconds) } def assertStoreContains( - runtime: RhoRuntime[Task] - )(ackChannel: GString)(data: ListParWithRandom): Task[Assertion] = { + runtime: RhoRuntime[IO] + )(ackChannel: GString)(data: ListParWithRandom): IO[Assertion] = { val channel: Par = ackChannel for { spaceMap <- runtime.getHotChanges @@ -93,7 +93,7 @@ class CryptoChannelsSpec val ackChannel = GString("x") implicit val emptyEnv: Env[Par] = Env[Par]() - val storeContainsTest: ListParWithRandom => Task[Assertion] = + val storeContainsTest: ListParWithRandom => IO[Assertion] = assertStoreContains(runtime)(ackChannel)(_) forAll { par: Par => @@ -108,7 +108,7 @@ class CryptoChannelsSpec // 2. hash input array // 3. send result on supplied ack channel (runtime.inj(send) >> - storeContainsTest(ListParWithRandom(Seq(expected), rand))).runSyncUnsafe(3.seconds) + storeContainsTest(ListParWithRandom(Seq(expected), rand))).unsafeRunSync clearStore(ackChannel) } } @@ -145,7 +145,7 @@ class CryptoChannelsSpec val ackChannel = GString("x") implicit val emptyEnv: Env[Par] = Env[Par]() - val storeContainsTest: ListParWithRandom => Task[Assertion] = + val storeContainsTest: ListParWithRandom => IO[Assertion] = assertStoreContains(runtime)(ackChannel) forAll { par: Par => @@ -169,7 +169,7 @@ class CryptoChannelsSpec (runtime.inj(send) >> storeContainsTest( ListParWithRandom(Seq(Expr(GBool(true))), rand) - )).runSyncUnsafe(3.seconds) + )).unsafeRunSync clearStore(ackChannel) } } @@ -185,7 +185,7 @@ class CryptoChannelsSpec val ackChannel = GString("x") implicit val emptyEnv: Env[Par] = Env[Par]() - val storeContainsTest: ListParWithRandom => Task[Assertion] = + val storeContainsTest: ListParWithRandom => IO[Assertion] = assertStoreContains(runtime)(ackChannel) forAll { par: Par => @@ -208,30 +208,31 @@ class CryptoChannelsSpec ) (runtime.inj(send) >> storeContainsTest( ListParWithRandom(List(Expr(GBool(true))), rand) - )).runSyncUnsafe(3.seconds) + )).unsafeRunSync clearStore(ackChannel) } } protected override def withFixture(test: OneArgTest): Outcome = { - val randomInt = scala.util.Random.nextInt - val dbDir = Files.createTempDirectory(s"rchain-storage-test-$randomInt-") - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val kvm = InMemoryStoreManager[Task] + val randomInt = scala.util.Random.nextInt + val dbDir = Files.createTempDirectory(s"rchain-storage-test-$randomInt-") + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val kvm = InMemoryStoreManager[IO] + import coop.rchain.shared.RChainScheduler._ val runtime = (for { store <- kvm.rSpaceStores - spaces <- Resources.createRuntimes[Task](store) + spaces <- Resources.createRuntimes[IO](store) (runtime, replayRuntime, _) = spaces _ <- runtime.cost.set(Cost.UNSAFE_MAX) - } yield runtime).runSyncUnsafe() + } yield runtime).unsafeRunSync try { test(runtime) } finally { - kvm.shutdown.runSyncUnsafe() + kvm.shutdown.unsafeRunSync dbDir.recursivelyDelete() } } @@ -239,6 +240,6 @@ class CryptoChannelsSpec /** TODO(mateusz.gorski): once we refactor Rholang[AndScala]Dispatcher * to push effect choice up until declaration site refactor to `Reduce[Eval]` */ - override type FixtureParam = RhoRuntime[Task] + override type FixtureParam = RhoRuntime[IO] } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala index 6836efdd7ef..2c20e51bc14 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang.interpreter +import cats.effect.IO import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.{BindPattern, ListParWithRandom, Par, TaggedContinuation} @@ -11,53 +12,50 @@ import coop.rchain.rspace.RSpace import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.shared.Log import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import scala.concurrent.duration._ -final case class TestFixture(space: RhoISpace[Task], reducer: DebruijnInterpreter[Task]) +final case class TestFixture(space: RhoISpace[IO], reducer: DebruijnInterpreter[IO]) trait PersistentStoreTester { implicit val ms: Metrics.Source = Metrics.BaseSource + import coop.rchain.shared.RChainScheduler._ def withTestSpace[R](f: TestFixture => R): R = { - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe() - implicit val m = matchListPar[Task] - implicit val kvm = InMemoryStoreManager[Task] - val store = kvm.rSpaceStores.runSyncUnsafe() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync + implicit val m = matchListPar[IO] + implicit val kvm = InMemoryStoreManager[IO] + val store = kvm.rSpaceStores.unsafeRunSync val space = RSpace - .create[Task, Par, BindPattern, ListParWithRandom, TaggedContinuation](store) - .runSyncUnsafe() + .create[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation](store, rholangEC) + .unsafeRunSync val reducer = RholangOnlyDispatcher(space)._2 - cost.set(Cost.UNSAFE_MAX).runSyncUnsafe(1.second) + cost.set(Cost.UNSAFE_MAX).unsafeRunSync // Execute test f(TestFixture(space, reducer)) } - def fixture[R](f: (RhoISpace[Task], Reduce[Task]) => Task[R]): R = { - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val kvm = InMemoryStoreManager[Task] - mkRhoISpace[Task] - .flatMap { - case rspace => - for { - cost <- CostAccounting.emptyCost[Task] - reducer = { - implicit val c = cost - RholangOnlyDispatcher(rspace)._2 - } - _ <- cost.set(Cost.UNSAFE_MAX) - res <- f(rspace, reducer) - } yield res - } - .runSyncUnsafe(3.seconds) + def fixture[R](f: (RhoISpace[IO], Reduce[IO]) => IO[R]): R = { + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val kvm = InMemoryStoreManager[IO] + mkRhoISpace[IO].flatMap { + case rspace => + for { + cost <- CostAccounting.emptyCost[IO] + reducer = { + implicit val c = cost + RholangOnlyDispatcher(rspace)._2 + } + _ <- cost.set(Cost.UNSAFE_MAX) + res <- f(rspace, reducer) + } yield res + }.unsafeRunSync } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala index 9f2d8ec56e5..3f23b42d86a 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang.interpreter +import cats.effect.IO import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.crypto.hash.Blake2b512Random @@ -16,8 +17,6 @@ import coop.rchain.rholang.interpreter.errors._ import coop.rchain.rholang.interpreter.storage._ import coop.rchain.rspace.internal.{Datum, Row, WaitingContinuation} import coop.rchain.shared.{Base16, Serialize} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks._ @@ -32,7 +31,7 @@ import scala.util.Failure class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with PersistentStoreTester { implicit val rand: Blake2b512Random = Blake2b512Random(Array.empty[Byte]) - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP[Task] + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO] case class DataMapEntry(data: Seq[Par], rand: Blake2b512Random) @@ -97,7 +96,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val addExpr = EPlus(GInt(7L), GInt(8L)) implicit val env = Env[Par]() val resultTask = reducer.evalExpr(addExpr) - Await.result(resultTask.runToFuture, 3.seconds) + Await.result(resultTask.unsafeToFuture(), 3.seconds) } val expected = Seq(Expr(GInt(15L))) @@ -110,7 +109,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val addExpr = EPlus(GInt(Int.MaxValue), GInt(Int.MaxValue)) implicit val env = Env[Par]() val resultTask = reducer.evalExpr(addExpr) - Await.result(resultTask.runToFuture, 3.seconds) + Await.result(resultTask.unsafeToFuture(), 3.seconds) } val expected = Seq(Expr(GInt(2 * Int.MaxValue.toLong))) @@ -123,7 +122,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val groundExpr = GInt(7L) implicit val env = Env[Par]() val resultTask = reducer.evalExpr(groundExpr) - Await.result(resultTask.runToFuture, 3.seconds) + Await.result(resultTask.unsafeToFuture(), 3.seconds) } val expected = Seq(Expr(GInt(7L))) @@ -136,7 +135,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val eqExpr = EEq(GPrivateBuilder("private_name"), GPrivateBuilder("private_name")) implicit val env = Env[Par]() val resultTask = reducer.evalExpr(eqExpr) - Await.result(resultTask.runToFuture, 3.seconds) + Await.result(resultTask.unsafeToFuture(), 3.seconds) } val expected = Seq(Expr(GBool(true))) result.exprs should be(expected) @@ -148,7 +147,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val emptyEnv = Env.makeEnv(Par(), Par()) val eqExpr = EEq(EVar(BoundVar(0)), EVar(BoundVar(1))) val resultTask = reducer.evalExpr(eqExpr) - Await.result(resultTask.runToFuture, 3.seconds) + Await.result(resultTask.unsafeToFuture(), 3.seconds) } val expected = Seq(Expr(GBool(true))) result.exprs should be(expected) @@ -167,7 +166,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- resultTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val expectedResult = mapData( @@ -191,7 +190,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(space, reducer) => implicit val env = Env[Par]() val task = reducer.eval(receive) >> space.toMap - Await.ready(task.runToFuture, 3.seconds) + Await.ready(task.unsafeToFuture(), 3.seconds) } receiveResult.value shouldBe Failure(ReduceError("Trying to read from non-readable channel.")).some @@ -206,7 +205,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val env = Env[Par]() val task = reducer.eval(send) >> space.toMap - Await.ready(task.runToFuture, 3.seconds) + Await.ready(task.unsafeToFuture(), 3.seconds) } sendResult.value shouldBe Failure(ReduceError("Trying to send on non-writeable channel.")).some } @@ -224,7 +223,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- resultTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val expectedResult = mapData( @@ -247,7 +246,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(space, reducer) => implicit val env = Env[Par]() val task = reducer.eval(send)(env, splitRand).flatMap(_ => space.toMap) - Await.result(task.runToFuture, 3.seconds) + Await.result(task.unsafeToFuture(), 3.seconds) } val expectedResult = mapData( @@ -283,7 +282,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- resultTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val bindPattern = BindPattern( List( @@ -316,7 +315,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(space, reducer) => implicit val env = Env[Par]() val task = reducer.eval(receive)(env, splitRand).flatMap(_ => space.toMap) - Await.result(task.runToFuture, 3.seconds) + Await.result(task.unsafeToFuture(), 3.seconds) } val channels = List[Par](y) @@ -355,7 +354,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(receive)(env, splitRand1) res <- space.toMap } yield res - Await.result(inspectTaskSendFirst.runToFuture, 3.seconds) + Await.result(inspectTaskSendFirst.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -376,7 +375,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send)(env, splitRand0) res <- space.toMap } yield res - Await.result(inspectTaskReceiveFirst.runToFuture, 3.seconds) + Await.result(inspectTaskReceiveFirst.unsafeToFuture(), 3.seconds) } receiveFirstResult.toIterable should contain theSameElementsAs expectedResult @@ -466,7 +465,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(receive)(env, splitRand1) res <- space.toMap } yield res - Await.result(inspectTaskSendFirst.runToFuture, 3.seconds) + Await.result(inspectTaskSendFirst.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -485,7 +484,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send)(env, splitRand0) res <- space.toMap } yield res - Await.result(inspectTaskReceiveFirst.runToFuture, 3.seconds) + Await.result(inspectTaskReceiveFirst.unsafeToFuture(), 3.seconds) } receiveFirstResult.toIterable should contain theSameElementsAs expectedResult @@ -525,7 +524,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(receive)(env, splitRand1) res <- space.toMap } yield res - Await.result(inspectTaskSendFirst.runToFuture, 3.seconds) + Await.result(inspectTaskSendFirst.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -544,7 +543,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send)(env, splitRand0) res <- space.toMap } yield res - Await.result(inspectTaskReceiveFirst.runToFuture, 3.seconds) + Await.result(inspectTaskReceiveFirst.unsafeToFuture(), 3.seconds) } receiveFirstResult.toIterable should contain theSameElementsAs expectedResult } @@ -581,7 +580,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(receive)(env, splitRand1) res <- space.toMap } yield res - Await.result(inspectTaskSendFirst.runToFuture, 3.seconds) + Await.result(inspectTaskSendFirst.unsafeToFuture(), 3.seconds) } val channels = List[Par](GInt(2L)) @@ -601,7 +600,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send)(env, splitRand0) res <- space.toMap } yield res - Await.result(inspectTaskReceiveFirst.runToFuture, 3.seconds) + Await.result(inspectTaskReceiveFirst.unsafeToFuture(), 3.seconds) } checkContinuation(receiveFirstResult)( @@ -617,7 +616,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(Par(receives = Seq(receive), sends = Seq(send)))(env, baseRand) res <- space.toMap } yield res - Await.result(inspectTaskReceiveFirst.runToFuture, 3.seconds) + Await.result(inspectTaskReceiveFirst.unsafeToFuture(), 3.seconds) } checkContinuation(bothResult)( @@ -664,7 +663,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- matchTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -712,7 +711,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.inj(receive)(splitRand2) res <- space.toMap } yield res - Await.result(inspectTaskSendFirst.runToFuture, 3.seconds) + Await.result(inspectTaskSendFirst.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -732,7 +731,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send2)(env, splitRand1) res <- space.toMap } yield res - Await.result(inspectTaskReceiveFirst.runToFuture, 3.seconds) + Await.result(inspectTaskReceiveFirst.unsafeToFuture(), 3.seconds) } receiveFirstResult.toIterable should contain theSameElementsAs expectedResult @@ -746,7 +745,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send2)(env, splitRand1) res <- space.toMap } yield res - Await.result(inspectTaskInterleaved.runToFuture, 3.seconds) + Await.result(inspectTaskInterleaved.unsafeToFuture(), 3.seconds) } interleavedResult.toIterable should contain theSameElementsAs expectedResult @@ -772,7 +771,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- reducer.eval(send)(env, splitRand0) res <- space.toMap } yield res - Await.result(task.runToFuture, 3.seconds) + Await.result(task.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -791,7 +790,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val directResult: Par = withTestSpace { case TestFixture(_, reducer) => implicit val env = Env[Par]() - Await.result(reducer.evalExprToPar(nthCall).runToFuture, 3.seconds) + Await.result(reducer.evalExprToPar(nthCall).unsafeToFuture(), 3.seconds) } val expectedResult: Par = GInt(9L) directResult should be(expectedResult) @@ -817,7 +816,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- nthTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -835,7 +834,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val directResult: Par = withTestSpace { case TestFixture(_, reducer) => implicit val env = Env[Par]() - Await.result(reducer.evalExprToPar(nthCall).runToFuture, 3.seconds) + Await.result(reducer.evalExprToPar(nthCall).unsafeToFuture(), 3.seconds) } val expectedResult: Par = GInt(255.toLong) directResult should be(expectedResult) @@ -847,7 +846,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val directResult: Par = withTestSpace { case TestFixture(_, reducer) => implicit val env = Env[Par]() - Await.result(reducer.evalExprToPar(nthCall).runToFuture, 3.seconds) + Await.result(reducer.evalExprToPar(nthCall).unsafeToFuture(), 3.seconds) } val expectedResult: Par = GInt(3.toLong) directResult should be(expectedResult) @@ -874,17 +873,18 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val result = withTestSpace { case TestFixture(space, _) => - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe() + import coop.rchain.shared.RChainScheduler._ + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync def byteName(b: Byte): Par = GPrivate(ByteString.copyFrom(Array[Byte](b))) val reducer = RholangOnlyDispatcher(space, Map("rho:test:foo" -> byteName(42)))._2 - cost.set(Cost.UNSAFE_MAX).runSyncUnsafe(1.second) + cost.set(Cost.UNSAFE_MAX).unsafeRunSync implicit val env = Env[Par]() val nthTask = reducer.eval(newProc)(env, splitRand) val inspectTask = for { _ <- nthTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel0: Par = GString("result0") @@ -937,7 +937,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi _ <- nthTask res <- space.toMap } yield res - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -961,7 +961,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val directResult: Par = withTestSpace { case TestFixture(_, reducer) => implicit val env = Env.makeEnv[Par](Expr(GString("deadbeef"))) - Await.result(reducer.evalExprToPar(hexToBytesCall).runToFuture, 3.seconds) + Await.result(reducer.evalExprToPar(hexToBytesCall).unsafeToFuture(), 3.seconds) } val expectedResult: Par = Expr(GByteArray("deadbeef".unsafeHexToByteString)) directResult should be(expectedResult) @@ -987,7 +987,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(wrapWithSend(toByteArrayCall))(env, splitRand) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -1015,7 +1015,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env.makeEnv[Par](GPrivateBuilder("one"), GPrivateBuilder("zero")) val task = reducer.eval(wrapWithSend(toByteArrayCall))(env, splitRand) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val expectedResult = mapData( @@ -1042,7 +1042,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val env = Env[Par]() val nthTask = reducer.eval(toByteArrayWithArgumentsCall) val inspectTask = nthTask >> space.toMap - Await.ready(inspectTask.runToFuture, 3.seconds) + Await.ready(inspectTask.unsafeToFuture(), 3.seconds) } result.value shouldBe Failure(MethodArgumentNumberMismatch("toByteArray", 0, 1)).some } @@ -1060,7 +1060,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(wrapWithSend(toByteArrayCall))(env, splitRand) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -1085,7 +1085,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(wrapWithSend(toStringCall))(env, splitRand) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -1109,7 +1109,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(wrapWithSend(toUtf8BytesCall))(env, splitRand) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -1137,7 +1137,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val env = Env[Par]() val nthTask = reducer.eval(toUtfBytesWithArgumentsCall) val inspectTask = nthTask >> space.toMap - Await.ready(inspectTask.runToFuture, 3.seconds) + Await.ready(inspectTask.unsafeToFuture(), 3.seconds) } result.value shouldBe Failure(MethodArgumentNumberMismatch("toUtf8Bytes", 0, 1)).some } @@ -1150,7 +1150,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val env = Env[Par]() val nthTask = reducer.eval(toUtfBytesCall) val inspectTask = nthTask >> space.toMap - Await.ready(inspectTask.runToFuture, 3.seconds) + Await.ready(inspectTask.unsafeToFuture(), 3.seconds) } result.value shouldBe Failure(MethodNotDefined("toUtf8Bytes", "Int")).some } @@ -1192,7 +1192,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(proc)(env, splitRandSrc) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -1226,7 +1226,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(proc)(env, splitRandSrc) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") val expectedResult = mapData( @@ -1271,7 +1271,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val env = Env[Par]() val task = reducer.eval(proc)(env, baseRand) val inspectTask = task >> space.toMap - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val channel: Par = GString("result") @@ -1288,7 +1288,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env.makeEnv[Par]() val inspectTask = reducer.evalExpr(EMatches(GInt(1L), GInt(1L))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GBool(true)))) @@ -1299,7 +1299,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env.makeEnv[Par]() val inspectTask = reducer.evalExpr(EMatches(GInt(1L), GInt(0L))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GBool(false)))) @@ -1310,7 +1310,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env.makeEnv[Par]() val inspectTask = reducer.evalExpr(EMatches(GInt(1L), EVar(Wildcard(Var.WildcardMsg())))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GBool(true)))) @@ -1321,7 +1321,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env.makeEnv[Par](GInt(1L)) val inspectTask = reducer.evalExpr(EMatches(EVar(BoundVar(0)), GInt(1L))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GBool(true)))) @@ -1334,7 +1334,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr(EMatches(GInt(1L), Connective(VarRefBody(VarRef(0, 1))))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GBool(true)))) @@ -1345,7 +1345,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env.makeEnv[Par]() val inspectTask = reducer.evalExpr(EMethodBody(EMethod("length", GString("abc")))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GInt(3L)))) } @@ -1357,7 +1357,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", GString("abcabac"), List(GInt(3L), GInt(6L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("aba")))) } @@ -1369,7 +1369,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", GString("abcabac"), List(GInt(2L), GInt(1L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("")))) } @@ -1381,7 +1381,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", GString("abcabac"), List(GInt(8L), GInt(9L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("")))) } @@ -1393,7 +1393,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", GString("abcabac"), List(GInt(-2L), GInt(2L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("ab")))) } @@ -1410,7 +1410,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("Hello, Alice!")))) } @@ -1427,7 +1427,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("abcdef")))) } @@ -1444,7 +1444,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be( Seq(Expr(GByteArray("deadbeef".unsafeHexToByteString))) @@ -1472,7 +1472,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("1 ${b} 2 ${a}")))) } @@ -1490,7 +1490,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(task.runToFuture, 3.seconds) + Await.result(task.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("false true")))) @@ -1509,7 +1509,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(task.runToFuture, 3.seconds) + Await.result(task.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("testUriA testUriB")))) @@ -1522,7 +1522,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val list = EList(List(GInt(0L), GInt(1L), GInt(2L), GInt(3L))) val inspectTask = reducer.evalExpr(EMethodBody(EMethod("length", list))) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GInt(4L)))) } @@ -1535,7 +1535,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", list, List(GInt(3L), GInt(5L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(EListBody(EList(List(GInt(9L), GInt(4L))))))) } @@ -1548,7 +1548,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", list, List(GInt(5L), GInt(4L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(EListBody(EList(List()))))) } @@ -1561,7 +1561,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", list, List(GInt(7L), GInt(8L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(EListBody(EList(List()))))) } @@ -1574,7 +1574,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("slice", list, List(GInt(-2L), GInt(2L)))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(EListBody(EList(List(GInt(3L), GInt(7L))))))) } @@ -1593,7 +1593,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi ) ) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultList = EList(List(GInt(3L), GInt(2L), GInt(9L), GInt(6L), GInt(1L), GInt(7L))) result.exprs should be(Seq(Expr(EListBody(resultList)))) @@ -1608,7 +1608,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("getOrElse", map, List(GInt(1L), GString("c")))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("a")))) } @@ -1622,7 +1622,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("getOrElse", map, List(GInt(3L), GString("c")))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GString("c")))) } @@ -1636,7 +1636,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("set", map, List(GInt(3L), GString("c")))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultMap = EMapBody( ParMap( @@ -1659,7 +1659,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("set", map, List(GInt(2L), GString("c")))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultMap = EMapBody(ParMap(List[(Par, Par)]((GInt(1L), GString("a")), (GInt(2L), GString("c"))))) @@ -1682,7 +1682,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("keys", map)) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultSet = ESetBody( ParSet( @@ -1708,7 +1708,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMethodBody(EMethod("size", map)) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GInt(3L)))) } @@ -1723,7 +1723,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi EMethodBody(EMethod("size", set)) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } result.exprs should be(Seq(Expr(GInt(3L)))) } @@ -1736,7 +1736,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EPlusBody(EPlus(set, GInt(3L))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultSet = ESetBody(ParSet(List[Par](GInt(1L), GInt(2L), GInt(3L)))) result.exprs should be(Seq(Expr(resultSet))) @@ -1758,7 +1758,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMinusBody(EMinus(map, GInt(3L))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultMap = EMapBody(ParMap(List[(Par, Par)]((GInt(1L), GString("a")), (GInt(2L), GString("b"))))) @@ -1773,7 +1773,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMinusBody(EMinus(set, GInt(3L))) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultSet = ESetBody(ParSet(List[Par](GInt(1L), GInt(2L)))) result.exprs should be(Seq(Expr(resultSet))) @@ -1788,7 +1788,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EPlusPlusBody(EPlusPlus(lhsSet, rhsSet)) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultSet = ESetBody(ParSet(List[Par](GInt(1L), GInt(2L), GInt(3L), GInt(4L)))) result.exprs should be(Seq(Expr(resultSet))) @@ -1805,7 +1805,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EPlusPlusBody(EPlusPlus(lhsMap, rhsMap)) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultMap = EMapBody( ParMap( @@ -1829,7 +1829,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val inspectTask = reducer.evalExpr( EMinusMinusBody(EMinusMinus(lhsSet, rhsSet)) ) - Await.result(inspectTask.runToFuture, 3.seconds) + Await.result(inspectTask.unsafeToFuture(), 3.seconds) } val resultSet = ESetBody(ParSet(List[Par](GInt(3L), GInt(4L)))) result.exprs should be(Seq(Expr(resultSet))) @@ -1841,7 +1841,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val env = Env.makeEnv[Par]() val set = ESetBody(ParSet(List[Par](GInt(1L), GInt(2L), GInt(3L)))) val inspectTask = reducer.eval(EMethodBody(EMethod("get", set, List(GInt(1L))))) - Await.ready(inspectTask.runToFuture, 3.seconds) + Await.ready(inspectTask.unsafeToFuture(), 3.seconds) } result.value shouldBe Failure(MethodNotDefined("get", "Set")).some } @@ -1853,7 +1853,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val map = EMapBody(ParMap(List[(Par, Par)]((GInt(1L), GString("a")), (GInt(2L), GString("b"))))) val inspectTask = reducer.eval(EMethodBody(EMethod("add", map, List(GInt(1L))))) - Await.ready(inspectTask.runToFuture, 3.seconds) + Await.ready(inspectTask.unsafeToFuture(), 3.seconds) } result.value shouldBe Failure(MethodNotDefined("add", "Map")).some } @@ -1871,7 +1871,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi implicit val env = Env[Par]() val nthTask = reducer.eval(toListCall) val inspectTask = nthTask >> space.toMap - Await.ready(inspectTask.runToFuture, 3.seconds) + Await.ready(inspectTask.unsafeToFuture(), 3.seconds) } result.value shouldBe Failure(MethodArgumentNumberMismatch("toList", 0, 1)).some } @@ -1896,7 +1896,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env: Env[Par] = Env[Par]() val toListTask = reducer.evalExpr(toListCall) - Await.result(toListTask.runToFuture, 3.seconds) + Await.result(toListTask.unsafeToFuture(), 3.seconds) } val resultList = EListBody( @@ -1930,7 +1930,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env: Env[Par] = Env[Par]() val toListTask = reducer.evalExpr(toListCall) - Await.result(toListTask.runToFuture, 3.seconds) + Await.result(toListTask.unsafeToFuture(), 3.seconds) } val resultList = EListBody( @@ -1964,7 +1964,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env: Env[Par] = Env[Par]() val toListTask = reducer.evalExpr(toListCall) - Await.result(toListTask.runToFuture, 3.seconds) + Await.result(toListTask.unsafeToFuture(), 3.seconds) } val resultList = EListBody( @@ -2298,7 +2298,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi reducer.evalExpr(input).attempt } - task.runSyncUnsafe(timeout) + task.unsafeRunSync } /** @@ -2320,7 +2320,7 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env[Par]() val task = reducer.eval(proc) - Await.result(task.runToFuture, 30.seconds) + Await.result(task.unsafeToFuture(), 30.seconds) } result should be(()) @@ -2335,11 +2335,13 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi case TestFixture(_, reducer) => implicit val env = Env[Par]() val task = reducer.eval(proc) - Await.result(task.failed.runToFuture, 1.seconds) + Await.result(task.attempt.unsafeToFuture(), 1.seconds) } result should be( - ReduceError("The number of terms in the Par is 32768, which exceeds the limit of 32767.") + Left( + ReduceError("The number of terms in the Par is 32768, which exceeds the limit of 32767.") + ) ) } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala index 622f74c9b98..7b2b259f0c4 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang.interpreter +import cats.effect.IO import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics @@ -8,8 +9,6 @@ import coop.rchain.rholang.Resources import coop.rchain.rholang.interpreter.accounting.Cost import coop.rchain.rspace.SoftCheckpoint import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -54,22 +53,20 @@ class ReplaySpec extends AnyFlatSpec with Matchers { case (runtime, replayRuntime) => for (i <- 1 to iterations) { val (playRes, replayRes) = - evaluateWithRuntime(runtime, replayRuntime)(term, Cost(Integer.MAX_VALUE)) - .onError { - case _: Throwable => - println(s"Test retry count: $i").pure[Task] - } - .runSyncUnsafe(1.seconds) + evaluateWithRuntime(runtime, replayRuntime)(term, Cost(Integer.MAX_VALUE)).onError { + case _: Throwable => + println(s"Test retry count: $i").pure[IO] + }.unsafeRunSync assert(playRes.errors.isEmpty) assert(replayRes.errors.isEmpty) } - ().pure[Task] - }.runSyncUnsafe(timeout) + ().pure[IO] + }.unsafeRunSync def evaluateWithRuntime( - runtime: RhoRuntime[Task], - replayRuntime: ReplayRhoRuntime[Task] + runtime: RhoRuntime[IO], + replayRuntime: ReplayRhoRuntime[IO] )(term: String, initialPhlo: Cost) = { implicit def rand: Blake2b512Random = Blake2b512Random(Array.empty[Byte]) for { @@ -93,13 +90,13 @@ class ReplaySpec extends AnyFlatSpec with Matchers { .onError { case _: Throwable => println(s"Executed term: $term") - println(s"Event log: $log").pure[Task] + println(s"Event log: $log").pure[IO] } _ <- replayRuntime.checkReplayData.onError { case _: Throwable => println(s"Executed term: $term") println(s"Event log: $log") - println(s"Replay result: $replayResult").pure[Task] + println(s"Replay result: $replayResult").pure[IO] } // Revert all changes / reset to initial state @@ -108,13 +105,14 @@ class ReplaySpec extends AnyFlatSpec with Matchers { } yield (playResult, replayResult) } - def withRSpaceAndRuntime(op: (RhoRuntime[Task], ReplayRhoRuntime[Task]) => Task[Unit]) = { - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + def withRSpaceAndRuntime(op: (RhoRuntime[IO], ReplayRhoRuntime[IO]) => IO[Unit]) = { + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + import coop.rchain.shared.RChainScheduler._ val resources = for { - res <- Resources.mkRuntimes[Task]("cost-accounting-spec-") + res <- Resources.mkRuntimes[IO]("cost-accounting-spec-") } yield res resources.use { diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala index a89729d6da0..20f80a2aaa5 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala @@ -1,23 +1,21 @@ package coop.rchain.rholang.interpreter +import cats.effect.IO import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rholang.Resources.mkRuntime import coop.rchain.rholang.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class RuntimeSpec extends AnyFlatSpec with Matchers { - private val tmpPrefix = "rspace-store-" - private val maxDuration = 5.seconds - implicit val logF: Log[Task] = Log.log[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + private val tmpPrefix = "rspace-store-" + implicit val logF: Log[IO] = Log.log[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() private val channelReadOnlyError = "ReduceError: Trying to read from non-readable channel." @@ -51,10 +49,10 @@ class RuntimeSpec extends AnyFlatSpec with Matchers { private def checkError(rho: String, error: String): Unit = assert(execute(rho).errors.nonEmpty, s"Expected $rho to fail - it didn't.") - private def execute(source: String): EvaluateResult = - mkRuntime[Task](tmpPrefix) - .use { runtime => - runtime.evaluate(source) - } - .runSyncUnsafe(maxDuration) + private def execute(source: String): EvaluateResult = { + import coop.rchain.shared.RChainScheduler._ + mkRuntime[IO](tmpPrefix).use { runtime => + runtime.evaluate(source) + }.unsafeRunSync + } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala index 3242e202df1..41322e59477 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala @@ -1,11 +1,11 @@ package coop.rchain.rholang.interpreter +import cats.effect.IO import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Expr.ExprInstance.GString import coop.rchain.rholang.Resources.mkRuntime import coop.rchain.shared.Log -import monix.eval.Task import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import coop.rchain.rholang.syntax._ @@ -13,19 +13,18 @@ import coop.rchain.rholang.syntax._ import scala.concurrent.duration._ import coop.rchain.models.rholang.implicits._ import coop.rchain.rholang.interpreter.errors.{InterpreterError, ReduceError} -import monix.execution.Scheduler.Implicits.global class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { - implicit val logF: Log[Task] = Log.log[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - private val maxDuration = 5.seconds + import coop.rchain.shared.RChainScheduler._ + implicit val logF: Log[IO] = Log.log[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() val outcomeCh = "ret" val reduceErrorMsg = "Error: index out of bound: -1" - private def execute(source: String): Task[Either[InterpreterError, Boolean]] = - mkRuntime[Task]("rholang-short-circuit-boolean") + private def execute(source: String): IO[Either[InterpreterError, Boolean]] = + mkRuntime[IO]("rholang-short-circuit-boolean") .use { runtime => for { evalResult <- runtime.evaluate(source) @@ -34,7 +33,7 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { data <- runtime.getData(GString(outcomeCh)).map(_.head) boolResult = data.a.pars.head.exprs.head.getGBool } yield Right(boolResult) - else Task.pure(Left(evalResult.errors.head)) + else IO.pure(Left(evalResult.errors.head)) } yield result } @@ -44,13 +43,13 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { s""" # @"${outcomeCh}"!(false && [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Right(false)) + execute(term).unsafeRunSync should equal(Right(false)) val term2 = s""" # @"${outcomeCh}"!(1 < 0 && [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(false)) + execute(term2).unsafeRunSync should equal(Right(false)) } "execute both par1 and par2 if par1 == true" in { val term = @@ -58,14 +57,14 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # @"${outcomeCh}"!(true && [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) val term2 = s""" # @"${outcomeCh}"!(1>0 && [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term2).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) } } @@ -75,13 +74,13 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { s""" # @"${outcomeCh}"!(true || [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Right(true)) + execute(term).unsafeRunSync should equal(Right(true)) val term2 = s""" # @"${outcomeCh}"!(1 > 0 || [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(true)) + execute(term2).unsafeRunSync should equal(Right(true)) } "evaluate both par1 and par2 if par1 == false" in { val term = @@ -89,14 +88,14 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # @"${outcomeCh}"!(false || [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) val term2 = s""" # @"${outcomeCh}"!(1<0 || [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term2).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) } } @@ -107,14 +106,14 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # @"${outcomeCh}"!(false && 1>0 and [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) val term2 = s""" # @"${outcomeCh}"!(false and 1>0 && [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(false)) + execute(term2).unsafeRunSync should equal(Right(false)) } @@ -124,14 +123,14 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # @"${outcomeCh}"!(false && 1>0 or [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) val term2 = s""" # @"${outcomeCh}"!(false or 1<0 && [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(false)) + execute(term2).unsafeRunSync should equal(Right(false)) } } @@ -143,14 +142,14 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # @"${outcomeCh}"!(false || 1>0 and [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) val term2 = s""" # @"${outcomeCh}"!(true and 1>0 || [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(true)) + execute(term2).unsafeRunSync should equal(Right(true)) } "work with the same precedence with `or`" in { @@ -159,14 +158,14 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # @"${outcomeCh}"!(false || 1>0 or [1,2].nth(-1)) # """.stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Left(ReduceError(reduceErrorMsg))) + execute(term).unsafeRunSync should equal(Left(ReduceError(reduceErrorMsg))) val term2 = s""" # @"${outcomeCh}"!(true or 1>0 || [1,2].nth(-1)) # """.stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(true)) + execute(term2).unsafeRunSync should equal(Right(true)) } } @@ -183,7 +182,7 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # } #}""".stripMargin('#') - execute(term).runSyncUnsafe(maxDuration) should equal(Right(false)) + execute(term).unsafeRunSync should equal(Right(false)) val term2 = s""" new ret1, ret2 in { # ret1!(true) | @@ -196,7 +195,7 @@ class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { # #}""".stripMargin('#') - execute(term2).runSyncUnsafe(maxDuration) should equal(Right(true)) + execute(term2).unsafeRunSync should equal(Right(true)) } } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala index 10c64b8a259..3650870c40a 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala @@ -14,8 +14,6 @@ import coop.rchain.rholang.ast.rholang_mercury.PrettyPrinter import coop.rchain.rholang.syntax._ import coop.rchain.rholang.{GenTools, ProcGen} import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalacheck.Test.Parameters import org.scalacheck.{Arbitrary, Gen} import org.scalatest.flatspec.AnyFlatSpec @@ -78,11 +76,12 @@ class CostAccountingPropertyTest extends AnyFlatSpec with ScalaCheckPropertyChec object CostAccountingPropertyTest { - def haveEqualResults[A](tasks: Task[A]*)(implicit duration: Duration): Boolean = + def haveEqualResults[A](tasks: IO[A]*)(implicit duration: Duration): Boolean = tasks.toList - .sequence[Task, A] + .sequence[IO, A] .map { _.sliding(2).forall { case List(r1, r2) => r1 == r2 } } - .runSyncUnsafe(duration) + .unsafeRunTimed(duration) + .get def execute[F[_]: Sync](runtime: RhoRuntime[F], p: Proc): F[Long] = for { @@ -99,17 +98,18 @@ object CostAccountingPropertyTest { runtime.evaluate(term) } - def costOfExecution(procs: Proc*): Task[Long] = { - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val ms: Metrics.Source = Metrics.BaseSource + def costOfExecution(procs: Proc*): IO[Long] = { + import coop.rchain.shared.RChainScheduler._ + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val ms: Metrics.Source = Metrics.BaseSource val prefix = "cost-accounting-property-test" - mkRuntime[Task](prefix).use { runtime => + mkRuntime[IO](prefix).use { runtime => for { _ <- runtime.cost.set(Cost.UNSAFE_MAX) - cost <- CostAccounting.emptyCost[Task] + cost <- CostAccounting.emptyCost[IO] res <- { procs.toStream .traverse(execute(runtime, _)) diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala index e161bdb3840..cd8d596c848 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala @@ -21,8 +21,6 @@ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rspace.{Checkpoint, Match, RSpace} import coop.rchain.shared.Log import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalacheck.Prop.forAllNoShrink import org.scalacheck._ import org.scalatest.{AppendedClues, Assertion} @@ -33,6 +31,7 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ +import coop.rchain.shared.RChainScheduler._ class CostAccountingSpec extends AnyFlatSpec @@ -45,26 +44,24 @@ class CostAccountingSpec initialPhlo: Long, contract: String ): (EvaluateResult, Chain[Cost]) = { - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val kvm = InMemoryStoreManager[Task] + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val kvm = InMemoryStoreManager[IO] val resources = for { - costLog <- costLog[Task]() + costLog <- costLog[IO]() store <- kvm.rSpaceStores - spaces <- createRuntimesWithCostLog[Task](store, costLog) + spaces <- createRuntimesWithCostLog[IO](store, costLog) (runtime, _, _) = spaces } yield (runtime, costLog) - resources - .flatMap { - case (runtime, costL) => - costL.listen { - runtime.evaluate(contract, Cost(initialPhlo)) - } - } - .runSyncUnsafe(75.seconds) + resources.flatMap { + case (runtime, costL) => + costL.listen { + runtime.evaluate(contract, Cost(initialPhlo)) + } + }.unsafeRunSync } private def createRuntimesWithCostLog[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( @@ -80,7 +77,8 @@ class CostAccountingSpec for { hrstores <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - stores + stores, + rholangEC ) (space, replay) = hrstores rhoRuntime <- RhoRuntime @@ -99,17 +97,17 @@ class CostAccountingSpec term: String ): (EvaluateResult, EvaluateResult) = { - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val metricsEff: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val ms: Metrics.Source = Metrics.BaseSource - implicit val kvm = InMemoryStoreManager[Task] + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val ms: Metrics.Source = Metrics.BaseSource + implicit val kvm = InMemoryStoreManager[IO] val evaluaResult = for { - costLog <- costLog[Task]() - cost <- CostAccounting.emptyCost[Task](implicitly, metricsEff, costLog, ms) + costLog <- costLog[IO]() + cost <- CostAccounting.emptyCost[IO](implicitly, metricsEff, costLog, ms) store <- kvm.rSpaceStores - spaces <- Resources.createRuntimes[Task](store) + spaces <- Resources.createRuntimes[IO](store) (runtime, replayRuntime, _) = spaces result <- { implicit def rand: Blake2b512Random = Blake2b512Random(Array.empty[Byte]) @@ -125,7 +123,7 @@ class CostAccountingSpec } } yield result - evaluaResult.runSyncUnsafe(75.seconds) + evaluaResult.unsafeRunSync } // Uses Godel numbering and a https://en.wikipedia.org/wiki/Mixed_radix diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala index 917bb99289c..c91c8c79273 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang.interpreter.accounting +import cats.effect.IO import com.google.protobuf.ByteString import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -13,8 +14,6 @@ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rspace.{Match, RSpace} import coop.rchain.shared.Log import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalactic.TripleEqualsSupport import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec @@ -24,6 +23,7 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks._ import java.nio.file.{Files, Path} import scala.collection.immutable.BitSet import scala.concurrent.duration._ +import coop.rchain.shared.RChainScheduler._ class RholangMethodsCostsSpec extends AnyWordSpec @@ -52,7 +52,7 @@ class RholangMethodsCostsSpec (listN(0), 1L) ) forAll(table) { (pars, n) => - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe(1.second) + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync implicit val env = Env[Par]() val method = methodCall("nth", EList(pars), List(GInt(n))) withReducer[Assertion] { reducer => @@ -86,7 +86,7 @@ class RholangMethodsCostsSpec (listN(0), 1L) ) forAll(table) { (pars, n) => - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe(1.second) + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync implicit val env = Env[Par]() val method = methodCall("nth", EList(pars), List(GInt(n))) withReducer[Assertion] { reducer => @@ -109,7 +109,7 @@ class RholangMethodsCostsSpec factor: Double, method: Expr ): Assertion = { - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe(1.second) + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync implicit val env = Env[Par]() withReducer { reducer => for { @@ -967,11 +967,11 @@ class RholangMethodsCostsSpec def methodCall(method: String, target: Par, arguments: List[Par]): Expr = EMethod(method, target, arguments) - def methodCallCost(reducer: Reduce[Task])(implicit cost: _cost[Task]): Task[Cost] = + def methodCallCost(reducer: Reduce[IO])(implicit cost: _cost[IO]): IO[Cost] = cost.get .map(balance => Cost.UNSAFE_MAX - balance - METHOD_CALL_COST) - def exprCallCost(reducer: Reduce[Task])(implicit cost: _cost[Task]): Task[Cost] = + def exprCallCost(reducer: Reduce[IO])(implicit cost: _cost[IO]): IO[Cost] = cost.get .map(balance => Cost.UNSAFE_MAX - balance) @@ -1008,7 +1008,7 @@ class RholangMethodsCostsSpec def emptyString: String = "" def test(expr: Expr, expectedCost: Cost): Assertion = { - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe(1.second) + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync implicit val env = Env[Par]() withReducer[Assertion] { reducer => for { @@ -1023,32 +1023,34 @@ class RholangMethodsCostsSpec } def withReducer[R]( - f: DebruijnInterpreter[Task] => Task[R] - )(implicit cost: _cost[Task]): R = { + f: DebruijnInterpreter[IO] => IO[R] + )(implicit cost: _cost[IO]): R = { val test = for { _ <- cost.set(Cost.UNSAFE_MAX) res <- f(RholangOnlyDispatcher(space)._2) } yield res - test.runSyncUnsafe(5.seconds) + test.unsafeRunSync } - private var dbDir: Path = null - private var space: RhoTuplespace[Task] = null + private var dbDir: Path = null + private var space: RhoTuplespace[IO] = null + + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val ms: Metrics.Source = Metrics.BaseSource + implicit val kvm = InMemoryStoreManager[IO] + val rSpaceStore = kvm.rSpaceStores.unsafeRunSync + import coop.rchain.shared.RChainScheduler._ - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val ms: Metrics.Source = Metrics.BaseSource - implicit val kvm = InMemoryStoreManager[Task] - val rSpaceStore = kvm.rSpaceStores.runSyncUnsafe() protected override def beforeAll(): Unit = { import coop.rchain.rholang.interpreter.storage._ - implicit val m: Match[Task, BindPattern, ListParWithRandom] = matchListPar[Task] + implicit val m: Match[IO, BindPattern, ListParWithRandom] = matchListPar[IO] dbDir = Files.createTempDirectory("rholang-interpreter-test-") space = RSpace - .create[Task, Par, BindPattern, ListParWithRandom, TaggedContinuation](rSpaceStore) - .runSyncUnsafe() + .create[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation](rSpaceStore, rholangEC) + .unsafeRunSync } protected override def afterAll(): Unit = { diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala index 8980e348a00..6cdb8ec2d8d 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala @@ -13,13 +13,11 @@ import coop.rchain.models.Var.WildcardMsg import coop.rchain.models._ import coop.rchain.models.rholang.sorter.Sortable import coop.rchain.rholang.interpreter._ -import monix.eval.Task import org.scalactic.TripleEqualsSupport import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.concurrent.TimeLimits -import monix.execution.Scheduler.Implicits.global import scala.collection.immutable.BitSet import scala.concurrent.duration._ @@ -30,7 +28,7 @@ class VarMatcherSpec extends AnyFlatSpec with Matchers with TimeLimits with Trip private val printer = PrettyPrinter() - type F[A] = MatcherMonadT[Task, A] + type F[A] = MatcherMonadT[IO, A] def assertSpatialMatch( target: Par, @@ -50,7 +48,7 @@ class VarMatcherSpec extends AnyFlatSpec with Matchers with TimeLimits with Trip maybeResultWithCost <- runFirst(spatialMatch[F, Par, Par](target, pattern)) result = maybeResultWithCost.map(_._1) _ = assert(prettyCaptures(result) == prettyCaptures(expectedCaptures)) - } yield (assert(result === expectedCaptures))).runSyncUnsafe(5.seconds) + } yield (assert(result === expectedCaptures))).unsafeRunSync } private def explainMatch( diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala index 5edec6a50fb..3cb5d549851 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala @@ -10,34 +10,32 @@ import coop.rchain.rholang.interpreter._ import coop.rchain.rholang.interpreter.accounting._ import coop.rchain.rholang.interpreter.errors.OutOfPhlogistonsError import coop.rchain.rholang.interpreter.matcher.{run => runMatcher} - import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global class MatcherMonadSpec extends AnyFlatSpec with Matchers { - implicit val metrics: Metrics[Task] = new Metrics.MetricsNOP[Task] - implicit val ms: Metrics.Source = Metrics.BaseSource + implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO] + implicit val ms: Metrics.Source = Metrics.BaseSource + import coop.rchain.shared.RChainScheduler._ - type F[A] = MatcherMonadT[Task, A] + type F[A] = MatcherMonadT[IO, A] val A: Alternative[F] = Alternative[F] - implicit val cost = CostAccounting.emptyCost[Task].runSyncUnsafe() + implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync - implicit val costF: _cost[F] = matcherMonadCostLog[Task] + implicit val costF: _cost[F] = matcherMonadCostLog[IO] implicit val matcherMonadError = implicitly[Sync[F]] private def combineK[FF[_]: MonoidK, G[_]: Foldable, A](gfa: G[FF[A]]): FF[A] = gfa.foldLeft(MonoidK[FF].empty[A])(SemigroupK[FF].combineK[A]) - private def runWithCost[A](f: Task[A], phlo: Int) = + private def runWithCost[A](f: IO[A], phlo: Int) = (for { _ <- cost.set(Cost(phlo, "initial cost")) result <- f phloLeft <- cost.get - } yield (phloLeft, result)).runSyncUnsafe() + } yield (phloLeft, result)).unsafeRunSync behavior of "MatcherMonad" diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala index 9506f348d98..9f35be2b246 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala @@ -1,10 +1,9 @@ package coop.rchain.rholang.interpreter.merging import cats.Applicative +import cats.effect.IO import cats.syntax.all._ import coop.rchain.shared.scalatestcontrib._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -40,7 +39,8 @@ class RholangMergingLogicSpec extends AnyFlatSpec with Matchers { def getDataOnHash[F[_]: Applicative](hash: String): F[Option[Long]] = initValues.get(hash).pure[F] - RholangMergingLogic.calculateNumChannelDiff(input, getDataOnHash[Task]).map { res => + import coop.rchain.shared.RChainScheduler._ + RholangMergingLogic.calculateNumChannelDiff(input, getDataOnHash[IO]).map { res => res shouldBe Seq(Map(("A", 10)), Map(("B", 3)), Map(("A", -5), ("C", -10))) } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala index ecf79574358..7823774ad63 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala @@ -1,6 +1,6 @@ package coop.rchain.rholang.interpreter.storage -import cats.effect.Sync +import cats.effect.{IO, Sync} import com.google.protobuf.ByteString import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics @@ -19,8 +19,6 @@ import coop.rchain.rholang.interpreter.storage.ChargingRSpaceTest.{ChargingRSpac import coop.rchain.shared.Log import coop.rchain.shared.scalatestcontrib._ import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalactic.TripleEqualsSupport import org.scalatest.Outcome import org.scalatest.flatspec.FixtureAnyFlatSpec @@ -53,7 +51,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- cost.get shouldBeF Cost(0) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "refund if data doesn't stay in tuplespace" in { fixture => @@ -69,7 +67,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- cost.get shouldBeF (consumeStorageCost + produceStorageCost) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "fail with OutOfPhloError when deploy runs out of it" in { fixture => @@ -80,10 +78,10 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- chargingRSpace.produce(channel, data, false) } yield () - val outOfPhloTest = test.attempt.runSyncUnsafe(1.second) + val outOfPhloTest = test.attempt.unsafeRunSync assert(outOfPhloTest === Left(OutOfPhlogistonsError)) - val costTest = cost.get.runSyncUnsafe(1.second) + val costTest = cost.get.unsafeRunSync assert(costTest.value === -1) } @@ -123,7 +121,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ = phlosLeft.value shouldBe (firstProdCost + secondProdCost + joinCost).value } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "not charge for storage if linear terms create a COMM" in { fixture => @@ -144,7 +142,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- cost.get shouldBeF (consumeStorageCost + produceStorageCost) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "charge for storing persistent produce that create a COMM" in { fixture => @@ -167,7 +165,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit ) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "charge for storing persistent consume that create a COMM" in { fixture => @@ -188,7 +186,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit ) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "refund for linear data in join" in { fixture => @@ -221,7 +219,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- cost.get shouldBeF (initPhlos + produceYCost - consumeEventStorageCost - commEventStorageCost) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "refund for removing consume" in { fixture => @@ -242,7 +240,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- cost.get shouldBeF (initPhlos + consumeStorageCost - produceEventStorageCost - commEventStorageCost) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "refund for removing produce" in { fixture => @@ -266,7 +264,7 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit _ <- cost.get shouldBeF (initPhlos + produceCost - consumeEventStorageCost - commEventStorageCost) } yield () - test.runSyncUnsafe(1.second) + test.unsafeRunSync } it should "refund for clearing tuplespace" in { fixture => @@ -302,32 +300,33 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit ) } yield () - test.runSyncUnsafe(5.seconds) + test.unsafeRunSync } override type FixtureParam = TestFixture protected override def withFixture(test: OneArgTest): Outcome = { - val cost: _cost[Task] = CostAccounting.emptyCost[Task].runSyncUnsafe(1.second) - implicit val span = NoopSpan[Task] - implicit val kvm = InMemoryStoreManager[Task] - - def mkChargingRspace(rhoISpace: RhoISpace[Task]): Task[ChargingRSpace] = { - val s = implicitly[Sync[Task]] - Task.delay(ChargingRSpace.chargingRSpace(rhoISpace)(s, span, cost)) + import coop.rchain.shared.RChainScheduler._ + val cost: _cost[IO] = CostAccounting.emptyCost[IO].unsafeRunSync + implicit val span = NoopSpan[IO] + implicit val kvm = InMemoryStoreManager[IO] + + def mkChargingRspace(rhoISpace: RhoISpace[IO]): IO[ChargingRSpace] = { + val s = implicitly[Sync[IO]] + IO.delay(ChargingRSpace.chargingRSpace(rhoISpace)(s, span, cost)) } - mkRhoISpace[Task] + mkRhoISpace[IO] .flatMap(mkChargingRspace) - .flatMap(chargingRSpace => Task.delay { test(TestFixture(chargingRSpace, cost)) }) - .runSyncUnsafe(10.seconds) + .flatMap(chargingRSpace => IO.delay { test(TestFixture(chargingRSpace, cost)) }) + .unsafeRunSync } } object ChargingRSpaceTest { - type ChargingRSpace = RhoTuplespace[Task] - final case class TestFixture(chargingRSpace: ChargingRSpace, cost: _cost[Task]) + type ChargingRSpace = RhoTuplespace[IO] + final case class TestFixture(chargingRSpace: ChargingRSpace, cost: _cost[IO]) val NilPar = ListParWithRandom().withPars(Seq(Par())) @@ -349,7 +348,7 @@ object ChargingRSpaceTest { ): TaggedContinuation = TaggedContinuation(ParBody(ParWithRandom(par, r))) - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val ms: Metrics.Source = Metrics.BaseSource + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val ms: Metrics.Source = Metrics.BaseSource } diff --git a/rholang/src/test/scala/rholang/rosette/CompilerTests.scala b/rholang/src/test/scala/rholang/rosette/CompilerTests.scala index d8e4c31fff1..5bb03995338 100644 --- a/rholang/src/test/scala/rholang/rosette/CompilerTests.scala +++ b/rholang/src/test/scala/rholang/rosette/CompilerTests.scala @@ -1,13 +1,12 @@ package rholang.rosette +import cats.effect.IO import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rholang.Resources.mkRuntime import coop.rchain.rholang.interpreter.EvaluateResult import coop.rchain.rholang.syntax._ import coop.rchain.shared.Log -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers @@ -18,11 +17,10 @@ import scala.io.Source import scala.util.Using class CompilerTests extends AnyFunSuite with Matchers { - private val tmpPrefix = "rspace-store-" - private val maxDuration = 5.seconds - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() + private val tmpPrefix = "rspace-store-" + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() private val testFiles: Iterator[Path] = Files.walk(Paths.get(getClass.getResource("/tests").getPath)).iterator().asScala @@ -44,15 +42,15 @@ class CompilerTests extends AnyFunSuite with Matchers { } } - private def execute(file: Path): EvaluateResult = - mkRuntime[Task](tmpPrefix) - .use { runtime => - Using.resource(Source.fromFile(file.toString))( - fileContents => { - runtime.evaluate(fileContents.mkString) - } - ) - } - .runSyncUnsafe(maxDuration) + private def execute(file: Path): EvaluateResult = { + import coop.rchain.shared.RChainScheduler._ + mkRuntime[IO](tmpPrefix).use { runtime => + Using.resource(Source.fromFile(file.toString))( + fileContents => { + runtime.evaluate(fileContents.mkString) + } + ) + }.unsafeRunSync + } } diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala index ca0926eb344..5ac392770ec 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala @@ -10,7 +10,7 @@ import coop.rchain.models._ import coop.rchain.rholang.interpreter.RholangCLI import coop.rchain.rholang.interpreter.accounting._ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager -import coop.rchain.rspace.{Match, _} +import coop.rchain.rspace.{Match, RSpace, _} import coop.rchain.shared.Log import coop.rchain.shared.PathOps.RichPath import monix.eval.Task @@ -114,7 +114,7 @@ object BasicBench { private val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") implicit val kvm = RholangCLI.mkRSpaceStoreManager[Task](dbDir).runSyncUnsafe() val rSpaceStore = kvm.rSpaceStores.runSyncUnsafe() - + import coop.rchain.shared.RChainScheduler._ val testSpace: ISpace[ Task, Par, @@ -129,9 +129,8 @@ object BasicBench { BindPattern, ListParWithRandom, TaggedContinuation - ](rSpaceStore) + ](rSpaceStore, rholangEC) .runSyncUnsafe() - implicit val cost = CostAccounting.initialCost[Task](Cost.UNSAFE_MAX).runSyncUnsafe() val initSeed = 123456789L diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala index abd1fa2566c..251d0f680ff 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala @@ -1,20 +1,18 @@ package coop.rchain.rspace.bench -import cats.Eval -import cats.implicits.catsSyntaxOptionId +import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Par import coop.rchain.rholang.Resources -import coop.rchain.rholang.interpreter.RholangCLI -import coop.rchain.rholang.interpreter.compiler.Compiler +import coop.rchain.rholang.interpreter.{ParBuilderUtil, RhoRuntime, RholangCLI} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager +import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.shared.Log -import monix.eval.Task +import monix.eval.{Coeval, Task} import monix.execution.Scheduler.Implicits.global import org.openjdk.jmh.annotations.{Setup, TearDown} -import coop.rchain.catscontrib.effect.implicits.sEval import java.io.{FileNotFoundException, InputStreamReader} import java.nio.file.{Files, Path} @@ -39,9 +37,10 @@ trait EvalBenchStateBase { def doSetup(): Unit = { deleteOldStorage(dbDir) - term = try { - Compiler[Eval].sourceToADT(resourceFileReader(rhoScriptSource)).value.some - } catch { case x: Throwable => throw x } + term = Compiler[Coeval].sourceToADT(resourceFileReader(rhoScriptSource)).runAttempt match { + case Right(par) => Some(par) + case Left(err) => throw err + } } @TearDown diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala index 4fdf8102fe9..f66f1ce07b7 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala @@ -98,9 +98,10 @@ class RSpaceBench extends RSpaceBenchBase { val kvm = RholangCLI.mkRSpaceStoreManager(dbDir) val rspaceStores = kvm.rSpaceStores + import coop.rchain.shared.RChainScheduler._ @Setup def setup() = - space = RSpace.create[Id, Channel, Pattern, Entry, EntriesCaptor](rspaceStores) + space = RSpace.create[Id, Channel, Pattern, Entry, EntriesCaptor](rspaceStores, rholangEC) @TearDown def tearDown() = { diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala index ebe54e9f2e2..627190ccb13 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala @@ -75,11 +75,12 @@ object ReplayRSpaceBench { @Setup def setup() = { + import coop.rchain.shared.RChainScheduler._ dbDir = Files.createTempDirectory("replay-rspace-bench-") val kvm = RholangCLI.mkRSpaceStoreManager[Id](dbDir) val store = kvm.rSpaceStores val (space, replaySpace) = - RSpace.createWithReplay[Id, Channel, Pattern, Entry, EntriesCaptor](store) + RSpace.createWithReplay[Id, Channel, Pattern, Entry, EntriesCaptor](store, rholangEC) this.space = space this.replaySpace = replaySpace } diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala index 439e3f11d0f..8bd29ccc7f7 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala @@ -1,21 +1,19 @@ package coop.rchain.rspace.bench -import cats.Eval -import cats.implicits.catsSyntaxOptionId +import coop.rchain.rholang.interpreter.{ReplayRhoRuntime, RhoRuntime, RholangCLI} +import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Par import coop.rchain.rholang.Resources -import coop.rchain.rholang.interpreter.compiler.Compiler -import coop.rchain.rholang.interpreter.{ReplayRhoRuntime, RhoRuntime, RholangCLI} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager +import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.shared.Log -import monix.eval.Task +import monix.eval.{Coeval, Task} import monix.execution.Scheduler import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole -import coop.rchain.catscontrib.effect.implicits.sEval import java.nio.file.{Files, Path} import scala.concurrent.Await @@ -64,14 +62,16 @@ abstract class RhoBenchBaseState { def doSetup(): Unit = { deleteOldStorage(dbDir) setupTerm = setupRho.flatMap { p => - try { - Compiler[Eval].sourceToADT(p).value.some - } catch { case err: Throwable => throw err } + Compiler[Coeval].sourceToADT(p).runAttempt match { + case Right(par) => Some(par) + case Left(err) => throw err + } } - term = try { - Compiler[Eval].sourceToADT(testedRho).value - } catch { case err: Throwable => throw err } + term = Compiler[Coeval].sourceToADT(testedRho).runAttempt match { + case Right(par) => par + case Left(err) => throw err + } val runtimes = createRuntime.runSyncUnsafe() runtime = runtimes._1 diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala index c6432dc8069..5f842492bf5 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace.bench +import coop.rchain.catscontrib.TaskContrib._ import org.openjdk.jmh.annotations.{Level, Setup} import org.openjdk.jmh.infra.Blackhole diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala index 0849db9ca16..c9476df0e18 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala @@ -20,16 +20,16 @@ import scala.concurrent.ExecutionContext class RSpace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - storeAtom: AtomicAny[HotStore[F, C, P, A, K]] + storeAtom: AtomicAny[HotStore[F, C, P, A, K]], + rholangEC: ExecutionContext )( implicit serializeC: Serialize[C], serializeP: Serialize[P], serializeA: Serialize[A], serializeK: Serialize[K], - val m: Match[F, P, A], - scheduler: ExecutionContext -) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom) + val m: Match[F, P, A] +) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom, rholangEC) with ISpace[F, C, P, A, K] { protected[this] override val logger: Logger = Logger[this.type] @@ -215,7 +215,7 @@ class RSpace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( nextHistory <- historyRepo.reset(historyRepo.history.root) historyReader <- nextHistory.getHistoryReader(nextHistory.root) hotStore <- HotStore(historyReader.base) - rSpace <- RSpace(nextHistory, hotStore) + rSpace <- RSpace(nextHistory, hotStore, rholangEC) _ <- rSpace.restoreInstalls() } yield rSpace } @@ -237,60 +237,60 @@ object RSpace { */ def apply[F[_]: Concurrent: ContextShift: Span: Metrics: Log, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - store: HotStore[F, C, P, A, K] + store: HotStore[F, C, P, A, K], + rholangEC: ExecutionContext )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext + m: Match[F, P, A] ): F[RSpace[F, C, P, A, K]] = - Sync[F].delay(new RSpace[F, C, P, A, K](historyRepository, AtomicAny(store))) + Sync[F].delay(new RSpace[F, C, P, A, K](historyRepository, AtomicAny(store), rholangEC)) /** * Creates [[RSpace]] from [[KeyValueStore]]'s, */ def create[F[_]: Concurrent: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( - store: RSpaceStore[F] + store: RSpaceStore[F], + rholangEC: ExecutionContext )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext + m: Match[F, P, A] ): F[RSpace[F, C, P, A, K]] = for { setup <- createHistoryRepo[F, C, P, A, K](store) (historyReader, store) = setup - space <- RSpace(historyReader, store) + space <- RSpace(historyReader, store, rholangEC) } yield space /** * Creates [[RSpace]] and [[ReplayRSpace]] from [[KeyValueStore]]'s. */ def createWithReplay[F[_]: Concurrent: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( - store: RSpaceStore[F] + store: RSpaceStore[F], + rholangEC: ExecutionContext )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext + m: Match[F, P, A] ): F[(RSpace[F, C, P, A, K], ReplayRSpace[F, C, P, A, K])] = for { setup <- createHistoryRepo[F, C, P, A, K](store) (historyRepo, store) = setup // Play - space <- RSpace(historyRepo, store) + space <- RSpace(historyRepo, store, rholangEC) // Replay historyReader <- historyRepo.getHistoryReader(historyRepo.root) replayStore <- HotStore(historyReader.base) - replay <- ReplayRSpace(historyRepo, replayStore) + replay <- ReplayRSpace(historyRepo, replayStore, rholangEC) } yield (space, replay) /** diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala index 2cf3be8ae66..473ba8e9af2 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala @@ -23,14 +23,14 @@ import scala.util.Random abstract class RSpaceOps[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - val storeAtom: AtomicAny[HotStore[F, C, P, A, K]] + val storeAtom: AtomicAny[HotStore[F, C, P, A, K]], + rholangEC: ExecutionContext )( implicit serializeC: Serialize[C], serializeP: Serialize[P], serializeA: Serialize[A], - serializeK: Serialize[K], - scheduler: ExecutionContext + serializeK: Serialize[K] ) extends SpaceMatcher[F, C, P, A, K] { override def syncF: Sync[F] = Sync[F] @@ -182,7 +182,7 @@ abstract class RSpaceOps[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, persist: Boolean, peeks: SortedSet[Int] = SortedSet.empty ): F[MaybeActionResult] = - ContextShift[F].evalOn(scheduler) { + ContextShift[F].evalOn(rholangEC) { if (channels.isEmpty) { val msg = "channels can't be empty" Log[F].error(msg) >> Sync[F] @@ -221,7 +221,7 @@ abstract class RSpaceOps[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, data: A, persist: Boolean ): F[MaybeActionResult] = - ContextShift[F].evalOn(scheduler) { + ContextShift[F].evalOn(rholangEC) { (for { produceRef <- Sync[F].delay(Produce(channel, data, persist)) result <- produceLockF(channel)( diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala index 41ef36ae15f..5be5f70c6f3 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala @@ -20,16 +20,16 @@ import scala.concurrent.ExecutionContext class ReplayRSpace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - storeAtom: AtomicAny[HotStore[F, C, P, A, K]] + storeAtom: AtomicAny[HotStore[F, C, P, A, K]], + rholangEC: ExecutionContext )( implicit serializeC: Serialize[C], serializeP: Serialize[P], serializeA: Serialize[A], serializeK: Serialize[K], - val m: Match[F, P, A], - scheduler: ExecutionContext -) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom) + val m: Match[F, P, A] +) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom, rholangEC) with IReplaySpace[F, C, P, A, K] { protected override def logF: Log[F] = Log[F] @@ -306,7 +306,7 @@ class ReplayRSpace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, nextHistory <- historyRepo.reset(historyRepo.history.root) historyReader <- nextHistory.getHistoryReader(nextHistory.root) hotStore <- HotStore(historyReader.base) - rSpaceReplay <- ReplayRSpace(nextHistory, hotStore) + rSpaceReplay <- ReplayRSpace(nextHistory, hotStore, rholangEC) _ <- rSpaceReplay.restoreInstalls() } yield rSpaceReplay } @@ -319,17 +319,17 @@ object ReplayRSpace { */ def apply[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - store: HotStore[F, C, P, A, K] + store: HotStore[F, C, P, A, K], + rholangEC: ExecutionContext )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext + m: Match[F, P, A] ): F[ReplayRSpace[F, C, P, A, K]] = Sync[F].delay { - new ReplayRSpace[F, C, P, A, K](historyRepository, AtomicAny(store)) + new ReplayRSpace[F, C, P, A, K](historyRepository, AtomicAny(store), rholangEC) } } diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala index 464e4d527be..5c68b076bff 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala @@ -16,6 +16,7 @@ import coop.rchain.rspace.ReportingRspace.{ import coop.rchain.rspace.history.HistoryRepository import coop.rchain.rspace.internal._ import coop.rchain.rspace.trace._ +import coop.rchain.shared.RChainScheduler.rholangEC import coop.rchain.shared.{Log, Serialize} import coop.rchain.store.KeyValueStore import monix.execution.atomic.AtomicAny @@ -62,10 +63,11 @@ object ReportingRspace { sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext + m: Match[F, P, A] ): F[ReportingRspace[F, C, P, A, K]] = - Sync[F].delay(new ReportingRspace[F, C, P, A, K](historyRepository, AtomicAny(store))) + Sync[F].delay( + new ReportingRspace[F, C, P, A, K](historyRepository, AtomicAny(store)) + ) /** * Creates [[RSpace]] from [[KeyValueStore]]'s, @@ -77,8 +79,7 @@ object ReportingRspace { sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext + m: Match[F, P, A] ): F[ReportingRspace[F, C, P, A, K]] = for { history <- RSpace.createHistoryRepo[F, C, P, A, K](store) @@ -96,9 +97,8 @@ class ReportingRspace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, serializeP: Serialize[P], serializeA: Serialize[A], serializeK: Serialize[K], - m: Match[F, P, A], - scheduler: ExecutionContext -) extends ReplayRSpace[F, C, P, A, K](historyRepository, storeAtom) { + m: Match[F, P, A] +) extends ReplayRSpace[F, C, P, A, K](historyRepository, storeAtom, rholangEC) { protected[this] override val logger: Logger = Logger[this.type] diff --git a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala index ca38bf5973f..1d5184cb334 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.examples -import cats.effect.{Concurrent, ContextShift} +import cats.effect.{Concurrent, ContextShift, IO} import cats.{Applicative, Id} import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager @@ -14,10 +14,10 @@ import scodec.bits.ByteVector import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.ListBuffer import scala.concurrent.ExecutionContext -import scala.concurrent.ExecutionContext.Implicits.global @SuppressWarnings(Array("org.wartremover.warts.EitherProjectionPartial")) object AddressBookExample { + import coop.rchain.shared.RChainScheduler._ /* Here we define a type for channels */ @@ -195,7 +195,6 @@ object AddressBookExample { ) def exampleOne(): Unit = { - implicit val log: Log[Id] = Log.log implicit val metricsF: Metrics[Id] = new Metrics.MetricsNOP[Id]() implicit val spanF: Span[Id] = NoopSpan[Id]() @@ -203,7 +202,7 @@ object AddressBookExample { // Let's define our store val store = keyValueStoreManager.rSpaceStores - val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store) + val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store, rholangEC) Console.printf("\nExample One: Let's consume and then produce...\n") @@ -238,7 +237,7 @@ object AddressBookExample { // Let's define our store val store = keyValueStoreManager.rSpaceStores - val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store) + val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store, rholangEC) Console.printf("\nExample Two: Let's produce and then consume...\n") @@ -325,7 +324,7 @@ object AddressBookExample { // Let's define our store val store = keyValueStoreManager.rSpaceStores - val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store) + val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store, rholangEC) try { f(space) } finally { diff --git a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala index 07621f3c5e5..eeeaaf82379 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace +import cats.effect.IO import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -13,11 +14,11 @@ import coop.rchain.rspace.state.{RSpaceExporter, RSpaceImporter} import coop.rchain.shared.ByteVectorOps.RichByteVector import coop.rchain.shared.{Log, Serialize} import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task import monix.execution.atomic.AtomicAny import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector +import coop.rchain.shared.RChainScheduler._ class ExportImportTests extends AnyFlatSpec @@ -26,13 +27,13 @@ class ExportImportTests "export and import of one page" should "works correctly" in fixture { (space1, exporter1, importer1, space2, _, importer2) => - implicit val log: Log.NOPLog[Task] = new Log.NOPLog[Task]() - val pageSize = 1000 // Match more than dataSize - val dataSize: Int = 10 - val startSkip: Int = 0 - val range = 0 until dataSize - val pattern = List(Wildcard) - val continuation = "continuation" + implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO]() + val pageSize = 1000 // Match more than dataSize + val dataSize: Int = 10 + val startSkip: Int = 0 + val range = 0 until dataSize + val pattern = List(Wildcard) + val continuation = "continuation" for { // Generate init data in space1 @@ -54,7 +55,7 @@ class ExportImportTests dataItems = exportData._2.items.toVector // Validate exporting page - _ <- RSpaceImporter.validateStateItems[Task]( + _ <- RSpaceImporter.validateStateItems[IO]( historyItems, dataItems, initStartPath, @@ -81,20 +82,20 @@ class ExportImportTests "multipage export" should "works correctly" in fixture { (space1, exporter1, importer1, space2, _, importer2) => - implicit val log: Log.NOPLog[Task] = new Log.NOPLog[Task]() - val pageSize = 10 - val dataSize: Int = 1000 - val startSkip: Int = 0 - val range = 0 until dataSize - val pattern = List(Wildcard) - val continuation = "continuation" + implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO]() + val pageSize = 10 + val dataSize: Int = 1000 + val startSkip: Int = 0 + val range = 0 until dataSize + val pattern = List(Wildcard) + val continuation = "continuation" type Params = ( Seq[(Blake2b256Hash, ByteVector)], // HistoryItems Seq[(Blake2b256Hash, ByteVector)], // DataItems Seq[(Blake2b256Hash, Option[Byte])] // StartPath ) - def multipageExport(params: Params): Task[Either[Params, Params]] = + def multipageExport(params: Params): IO[Either[Params, Params]] = params match { case (historyItems, dataItems, startPath) => for { @@ -111,7 +112,7 @@ class ExportImportTests lastPath = exportData._1.lastPath // Validate exporting page - _ <- RSpaceImporter.validateStateItems[Task]( + _ <- RSpaceImporter.validateStateItems[IO]( historyItemsPage, dataItemsPage, startPath, @@ -162,13 +163,13 @@ class ExportImportTests // But on the other hand, this allows you to work simultaneously with several nodes. "multipage export with skip" should "works correctly" in fixture { (space1, exporter1, importer1, space2, _, importer2) => - implicit val log: Log.NOPLog[Task] = new Log.NOPLog[Task]() - val pageSize = 10 - val dataSize: Int = 1000 - val startSkip: Int = 0 - val range = 0 until dataSize - val pattern = List(Wildcard) - val continuation = "continuation" + implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO]() + val pageSize = 10 + val dataSize: Int = 1000 + val startSkip: Int = 0 + val range = 0 until dataSize + val pattern = List(Wildcard) + val continuation = "continuation" type Params = ( Seq[(Blake2b256Hash, ByteVector)], // HistoryItems @@ -176,7 +177,7 @@ class ExportImportTests Seq[(Blake2b256Hash, Option[Byte])], // StartPath Int // Size of skip ) - def multipageExportWithSkip(params: Params): Task[Either[Params, Params]] = + def multipageExportWithSkip(params: Params): IO[Either[Params, Params]] = params match { case (historyItems, dataItems, startPath, skip) => for { @@ -192,7 +193,7 @@ class ExportImportTests dataItemsPage = exportData._2.items // Validate exporting page - _ <- RSpaceImporter.validateStateItems[Task]( + _ <- RSpaceImporter.validateStateItems[IO]( historyItemsPage, dataItemsPage, startPath, @@ -249,44 +250,45 @@ trait InMemoryExportImportTestsBase[C, P, A, K] { import SchedulerPools.global def fixture[S]( f: ( - ISpace[Task, C, P, A, K], - RSpaceExporter[Task], - RSpaceImporter[Task], - ISpace[Task, C, P, A, K], - RSpaceExporter[Task], - RSpaceImporter[Task] - ) => Task[S] + ISpace[IO, C, P, A, K], + RSpaceExporter[IO], + RSpaceImporter[IO], + ISpace[IO, C, P, A, K], + RSpaceExporter[IO], + RSpaceImporter[IO] + ) => IO[S] )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[Task, P, A] + m: Match[IO, P, A] ): S = { - implicit val log: Log[Task] = Log.log[Task] - implicit val metricsF: Metrics[Task] = new Metrics.MetricsNOP[Task]() - implicit val spanF: Span[Task] = NoopSpan[Task]() - implicit val kvm: InMemoryStoreManager[Task] = InMemoryStoreManager[Task] + implicit val log: Log[IO] = Log.log[IO] + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val kvm: InMemoryStoreManager[IO] = InMemoryStoreManager[IO] (for { roots1 <- kvm.store("roots1") cold1 <- kvm.store("cold1") history1 <- kvm.store("history1") - historyRepository1 <- HistoryRepositoryInstances.lmdbRepository[Task, C, P, A, K]( + historyRepository1 <- HistoryRepositoryInstances.lmdbRepository[IO, C, P, A, K]( roots1, cold1, history1 ) - cache1 <- Ref.of[Task, HotStoreState[C, P, A, K]](HotStoreState[C, P, A, K]()) + cache1 <- Ref.of[IO, HotStoreState[C, P, A, K]](HotStoreState[C, P, A, K]()) historyReader <- historyRepository1.getHistoryReader(historyRepository1.root) store1 <- { val hr = historyReader.base - HotStore[Task, C, P, A, K](cache1, hr).map(AtomicAny(_)) + HotStore[IO, C, P, A, K](cache1, hr).map(AtomicAny(_)) } - space1 = new RSpace[Task, C, P, A, K]( + space1 = new RSpace[IO, C, P, A, K]( historyRepository1, - store1 + store1, + rholangEC ) exporter1 <- historyRepository1.exporter importer1 <- historyRepository1.importer @@ -294,25 +296,26 @@ trait InMemoryExportImportTestsBase[C, P, A, K] { roots2 <- kvm.store("roots2") cold2 <- kvm.store("cold2") history2 <- kvm.store("history2") - historyRepository2 <- HistoryRepositoryInstances.lmdbRepository[Task, C, P, A, K]( + historyRepository2 <- HistoryRepositoryInstances.lmdbRepository[IO, C, P, A, K]( roots2, cold2, history2 ) - cache2 <- Ref.of[Task, HotStoreState[C, P, A, K]](HotStoreState[C, P, A, K]()) + cache2 <- Ref.of[IO, HotStoreState[C, P, A, K]](HotStoreState[C, P, A, K]()) historyReader <- historyRepository2.getHistoryReader(historyRepository2.root) store2 <- { val hr = historyReader.base - HotStore[Task, C, P, A, K](cache2, hr).map(AtomicAny(_)) + HotStore[IO, C, P, A, K](cache2, hr).map(AtomicAny(_)) } - space2 = new RSpace[Task, C, P, A, K]( + space2 = new RSpace[IO, C, P, A, K]( historyRepository2, - store2 + store2, + rholangEC ) exporter2 <- historyRepository2.exporter importer2 <- historyRepository2.importer res <- f(space1, exporter1, importer1, space2, exporter2, importer2) - } yield { res }).runSyncUnsafe() + } yield { res }).unsafeRunSync } } diff --git a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala index d168caa1773..1e5bd57991c 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.rspace.examples.StringExamples.{StringsCaptor, _} @@ -10,8 +10,6 @@ import coop.rchain.rspace.history.HistoryReaderBase import coop.rchain.rspace.internal._ import coop.rchain.rspace.test.ArbitraryInstances._ import coop.rchain.shared.GeneratorUtils._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalacheck.{Arbitrary, Gen} import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec @@ -1115,11 +1113,12 @@ class History[F[_]: Sync, C, P, A, K](R: Ref[F, HotStoreState[C, P, A, K]]) override def getJoinsProj[R](key: C): ((Seq[C], ByteVector) => R) => F[Seq[R]] = ??? } -trait InMemHotStoreSpec extends HotStoreSpec[Task] { +trait InMemHotStoreSpec extends HotStoreSpec[IO] { - protected type F[A] = Task[A] - implicit override val S: Sync[F] = implicitly[Concurrent[Task]] - implicit override val P: Parallel[Task] = Task.catsParallel + import coop.rchain.shared.RChainScheduler._ + protected type F[A] = IO[A] + implicit override val S: Sync[F] = implicitly[Concurrent[IO]] + implicit override val P: Parallel[IO] = IO.ioParallel def C( c: HotStoreState[String, Pattern, String, StringsCaptor] = HotStoreState() ): F[Ref[F, HotStoreState[String, Pattern, String, StringsCaptor]]] @@ -1137,7 +1136,7 @@ trait InMemHotStoreSpec extends HotStoreSpec[Task] { cache <- C() hotStore <- HotStore[F, String, Pattern, String, StringsCaptor](cache, history) res <- f(cache, history, hotStore) - } yield res).runSyncUnsafe(1.second) + } yield res).unsafeRunTimed(1.second) override def fixture(cache: HotStoreState[String, Pattern, String, StringsCaptor])( f: HotStore[F, String, Pattern, String, StringsCaptor] => F[Unit] @@ -1148,7 +1147,7 @@ trait InMemHotStoreSpec extends HotStoreSpec[Task] { cache <- C(cache) hotStore <- HotStore[F, String, Pattern, String, StringsCaptor](cache, history) res <- f(hotStore) - } yield res).runSyncUnsafe(1.second) + } yield res).unsafeRunTimed(1.second) } diff --git a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala index 3fe3dcc10ca..906d3265760 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala @@ -1,6 +1,7 @@ package coop.rchain.rspace import cats.Functor +import cats.effect.IO import cats.effect.concurrent.Ref import cats.syntax.all._ import com.typesafe.scalalogging.Logger @@ -14,7 +15,6 @@ import coop.rchain.rspace.trace.Consume import coop.rchain.rspace.util.ReplayException import coop.rchain.shared.{Log, Serialize} import coop.rchain.store.InMemoryStoreManager -import monix.eval.Task import monix.execution.Scheduler import monix.execution.atomic.AtomicAny import org.scalacheck._ @@ -32,12 +32,13 @@ object SchedulerPools { val rspacePool = Scheduler.fixedPool("RSpacePool", 5) } -//noinspection ZeroIndexToHead,NameBooleanParameters +//noinspectTaskn ZeroIndexToHead,NameBooleanParameters trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, String] { - import SchedulerPools.global - import cats.syntax.parallel._ - implicit val log: Log[Task] = new Log.NOPLog[Task] + import coop.rchain.shared.RChainScheduler._ + implicit val pIO = IO.ioParallel + + implicit val log: Log[IO] = new Log.NOPLog[IO] val arbitraryRangeSize: Gen[Int] = Gen.chooseNum[Int](1, 10) val arbitraryRangesSize: Gen[(Int, Int)] = for { m <- Gen.chooseNum[Int](1, 10) @@ -45,14 +46,14 @@ trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, S } yield (n, m) def consumeMany[C, P, A, K]( - space: ISpace[Task, C, P, A, K], + space: ISpace[IO, C, P, A, K], range: Seq[Int], channelsCreator: Int => List[C], patterns: List[P], continuationCreator: Int => K, persist: Boolean, peeks: SortedSet[Int] = SortedSet.empty - ): Task[List[Option[(ContResult[C, P, K], Seq[Result[C, A]])]]] = + ): IO[List[Option[(ContResult[C, P, K], Seq[Result[C, A]])]]] = shuffle(range).toList.parTraverse { i: Int => logger.debug("Started consume {}", i) space @@ -64,12 +65,12 @@ trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, S } def produceMany[C, P, A, K]( - space: ISpace[Task, C, P, A, K], + space: ISpace[IO, C, P, A, K], range: Seq[Int], channelCreator: Int => C, datumCreator: Int => A, persist: Boolean - ): Task[List[Option[(ContResult[C, P, K], Seq[Result[C, A]])]]] = + ): IO[List[Option[(ContResult[C, P, K], Seq[Result[C, A]])]]] = shuffle(range).toList.parTraverse { i: Int => logger.debug("Started produce {}", i) space.produce(channelCreator(i), datumCreator(i), persist).map { r => @@ -496,7 +497,7 @@ trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, S val peeks: SortedSet[Int] = SortedSet.apply(Random.shuffle(channelsRange).take(amountOfPeekedChannels): _*) val produces = Random.shuffle(channels) - def consumeAndProduce(s: ISpace[Task, String, Pattern, String, String]) = + def consumeAndProduce(s: ISpace[IO, String, Pattern, String, String]) = for { r <- s.consume(channels, patterns, continuation, false, peeks = peeks) rs <- produces.traverse(ch => s.produce(ch, s"datum-$ch", false)) @@ -1025,11 +1026,11 @@ trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, S n: Int => def process(indices: Seq[Int]): Checkpoint = fixture { (store, replayStore, space, replaySpace) => - Task.delay { + IO.delay { for (i <- indices) { - replaySpace.produce("ch1", s"datum$i", false).runSyncUnsafe() + replaySpace.produce("ch1", s"datum$i", false).unsafeRunSync } - space.createCheckpoint().runSyncUnsafe() + space.createCheckpoint().unsafeRunSync } } @@ -1083,7 +1084,7 @@ trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, S consume2 <- replaySpace.consume(channels, patterns, continuation, false) _ = consume2 shouldBe None - _ <- replayStore.get().isEmpty.map(_ shouldBe false) + _ <- replayStore.get().isEmpty().map(_ shouldBe false) _ <- replayStore .get() .changes @@ -1091,7 +1092,7 @@ trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, S .map(_.length shouldBe 1) _ <- replaySpace.reset(emptyPoint.root) - _ <- replayStore.get().isEmpty.map(_ shouldBe true) + _ <- replayStore.get().isEmpty().map(_ shouldBe true) _ = replaySpace.replayData shouldBe empty checkpoint1 <- replaySpace.createCheckpoint() @@ -1239,18 +1240,18 @@ trait ReplayRSpaceTestsBase[C, P, A, K] def fixture[S]( f: ( - AtomicAny[HotStore[Task, C, P, A, K]], - AtomicAny[HotStore[Task, C, P, A, K]], - ISpace[Task, C, P, A, K], - IReplaySpace[Task, C, P, A, K] - ) => Task[S] + AtomicAny[HotStore[IO, C, P, A, K]], + AtomicAny[HotStore[IO, C, P, A, K]], + ISpace[IO, C, P, A, K], + IReplaySpace[IO, C, P, A, K] + ) => IO[S] )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[Task, P, A] + m: Match[IO, P, A] ): S } @@ -1258,55 +1259,58 @@ trait InMemoryReplayRSpaceTestsBase[C, P, A, K] extends ReplayRSpaceTestsBase[C, import SchedulerPools.global override def fixture[S]( f: ( - AtomicAny[HotStore[Task, C, P, A, K]], - AtomicAny[HotStore[Task, C, P, A, K]], - ISpace[Task, C, P, A, K], - IReplaySpace[Task, C, P, A, K] - ) => Task[S] + AtomicAny[HotStore[IO, C, P, A, K]], + AtomicAny[HotStore[IO, C, P, A, K]], + ISpace[IO, C, P, A, K], + IReplaySpace[IO, C, P, A, K] + ) => IO[S] )( implicit sc: Serialize[C], sp: Serialize[P], sa: Serialize[A], sk: Serialize[K], - m: Match[Task, P, A] + m: Match[IO, P, A] ): S = { - implicit val log: Log[Task] = Log.log[Task] - implicit val metricsF: Metrics[Task] = new Metrics.MetricsNOP[Task]() - implicit val spanF: Span[Task] = NoopSpan[Task]() - implicit val kvm = InMemoryStoreManager[Task] + import coop.rchain.shared.RChainScheduler._ + implicit val log: Log[IO] = Log.log[IO] + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val kvm = InMemoryStoreManager[IO] (for { roots <- kvm.store("roots") cold <- kvm.store("cold") history <- kvm.store("history") - historyRepository <- HistoryRepositoryInstances.lmdbRepository[Task, C, P, A, K]( + historyRepository <- HistoryRepositoryInstances.lmdbRepository[IO, C, P, A, K]( roots, cold, history ) - cache <- Ref[Task].of(HotStoreState[C, P, A, K]()) + cache <- Ref[IO].of(HotStoreState[C, P, A, K]()) historyReader <- historyRepository.getHistoryReader(historyRepository.root) store <- { val hr = historyReader.base - HotStore[Task, C, P, A, K](cache, hr).map(AtomicAny(_)) + HotStore[IO, C, P, A, K](cache, hr).map(AtomicAny(_)) } - space = new RSpace[Task, C, P, A, K]( + space = new RSpace[IO, C, P, A, K]( historyRepository, - store + store, + rholangEC ) - historyCache <- Ref[Task].of(HotStoreState[C, P, A, K]()) + historyCache <- Ref[IO].of(HotStoreState[C, P, A, K]()) replayStore <- { val hr = historyReader.base - HotStore[Task, C, P, A, K](historyCache, hr).map(AtomicAny(_)) + HotStore[IO, C, P, A, K](historyCache, hr).map(AtomicAny(_)) } - replaySpace = new ReplayRSpace[Task, C, P, A, K]( + replaySpace = new ReplayRSpace[IO, C, P, A, K]( historyRepository, - replayStore + replayStore, + rholangEC ) res <- f(store, replayStore, space, replaySpace) - } yield { res }).runSyncUnsafe() + } yield { res }).unsafeRunSync } } diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala index ae570e1da22..b7188991feb 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace +import cats.Parallel import cats.effect._ import cats.syntax.all._ import coop.rchain.rspace.examples.StringExamples._ @@ -10,7 +11,6 @@ import coop.rchain.rspace.test._ import coop.rchain.rspace.trace.Consume import coop.rchain.rspace.util.{getK, runK, unpackOption} import coop.rchain.shared.Serialize -import monix.eval.Task import org.scalatestplus.scalacheck._ import scala.collection.SortedSet @@ -1184,9 +1184,9 @@ trait StorageActionsTests[F[_]] } class InMemoryHotStoreStorageActionsTests - extends InMemoryHotStoreTestsBase[Task] + extends InMemoryHotStoreTestsBase[IO] with TaskTests[String, Pattern, Nothing, String, StringsCaptor] - with StorageActionsTests[Task] - with StorageTestsBase[Task, String, Pattern, String, StringsCaptor] { - implicit val parF = Task.catsParallel + with StorageActionsTests[IO] + with StorageTestsBase[IO, String, Pattern, String, StringsCaptor] { + implicit val parF: Parallel[IO] = IO.ioParallel } diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala index 725954eed56..3ee00536d8c 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala @@ -1,13 +1,15 @@ package coop.rchain.rspace +import cats.Parallel.Aux import cats._ +import cats.effect.{Concurrent, ContextShift, IO} import cats.syntax.all._ import coop.rchain.rspace.examples.AddressBookExample import coop.rchain.rspace.examples.AddressBookExample._ import coop.rchain.rspace.examples.AddressBookExample.implicits._ import coop.rchain.rspace.test._ import coop.rchain.rspace.util.{getK, runK, unpackOption} -import monix.eval.Task +import coop.rchain.shared.RChainScheduler import monix.execution.atomic.AtomicAny import scodec.Codec @@ -272,7 +274,11 @@ abstract class InMemoryHotStoreStorageExamplesTestsBase[F[_]] (hr, ts) => { val atomicStore = AtomicAny(ts) val space = - new RSpace[F, Channel, Pattern, Entry, EntriesCaptor](hr, atomicStore) + new RSpace[F, Channel, Pattern, Entry, EntriesCaptor]( + hr, + atomicStore, + RChainScheduler.rholangEC + ) Applicative[F].pure((ts, atomicStore, space)) } setupTestingSpace(creator, f) @@ -280,8 +286,8 @@ abstract class InMemoryHotStoreStorageExamplesTestsBase[F[_]] } class InMemoryHotStoreStorageExamplesTests - extends InMemoryHotStoreStorageExamplesTestsBase[Task] + extends InMemoryHotStoreStorageExamplesTestsBase[IO] with TaskTests[Channel, Pattern, Entry, Entry, EntriesCaptor] - with StorageExamplesTests[Task] { - implicit val parF = Task.catsParallel + with StorageExamplesTests[IO] { + implicit val parF: Parallel[IO] = IO.ioParallel } diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala index f025148eb68..ab9668d0843 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala @@ -11,7 +11,7 @@ import coop.rchain.rspace.examples.StringExamples._ import coop.rchain.rspace.examples.StringExamples.implicits._ import coop.rchain.rspace.history.{HistoryRepository, HistoryRepositoryInstances} import coop.rchain.rspace.syntax._ -import coop.rchain.shared.{Log, Serialize} +import coop.rchain.shared.{Log, RChainScheduler, Serialize} import coop.rchain.store.InMemoryStoreManager import monix.eval._ import monix.execution.atomic.AtomicAny @@ -88,28 +88,14 @@ trait StorageTestsBase[F[_], C, P, A, K] extends AnyFlatSpec with Matchers with } } -trait TaskTests[C, P, A, R, K] extends StorageTestsBase[Task, C, P, R, K] { - import scala.concurrent.ExecutionContext - - implicit override val concurrentF: Concurrent[Task] = - new monix.eval.instances.CatsConcurrentEffectForTask()( - monix.execution.Scheduler.Implicits.global, - Task.defaultOptions - ) - implicit val logF: Log[Task] = Log.log[Task] - implicit val metricsF: Metrics[Task] = new Metrics.MetricsNOP[Task]() - implicit val spanF: Span[Task] = NoopSpan[Task]() - - implicit override val monadF: Monad[Task] = concurrentF - implicit override val contextShiftF: ContextShift[Task] = new ContextShift[Task] { - override def shift: Task[Unit] = - Task.shift - override def evalOn[B](ec: ExecutionContext)(fa: Task[B]): Task[B] = - Task.shift(ec).bracket(_ => fa)(_ => Task.shift) - } - - import monix.execution.Scheduler.Implicits.global - override def run[RES](f: Task[RES]): RES = f.runSyncUnsafe() +trait TaskTests[C, P, A, R, K] extends StorageTestsBase[IO, C, P, R, K] { + implicit val logF: Log[IO] = Log.log[IO] + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val contextShiftF: ContextShift[IO] = coop.rchain.shared.RChainScheduler.csIO + implicit val concurrentF: Concurrent[IO] = Concurrent[IO] + implicit val monadF: Monad[IO] = Monad[IO] + override def run[RES](f: IO[RES]): RES = f.unsafeRunSync } abstract class InMemoryHotStoreTestsBase[F[_]] @@ -121,7 +107,11 @@ abstract class InMemoryHotStoreTestsBase[F[_]] (hr, ts) => { val atomicStore = AtomicAny(ts) val space = - new RSpace[F, String, Pattern, String, StringsCaptor](hr, atomicStore) + new RSpace[F, String, Pattern, String, StringsCaptor]( + hr, + atomicStore, + RChainScheduler.rholangEC + ) Applicative[F].pure((ts, atomicStore, space)) } setupTestingSpace(creator, f) diff --git a/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala b/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala index 76677ade83d..3670080f839 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala @@ -2,7 +2,7 @@ package coop.rchain.rspace import cats.Id import org.scalatest.enablers.Definition -//noinspection ConvertExpressionToSAM +//noinspectTaskn ConvertExpressionToSAM trait TestImplicitHelpers { // Some helpers for usage only in the tests -- save us A LOT of explicit casting from Either to Option // it is safe because left type of `Either` is `Nothing` -- we don't expect any invalid states from the matcher diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala index f94079b9620..10a3384b97c 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala @@ -1,33 +1,34 @@ package coop.rchain.rspace.concurrent +import cats.effect.{IO, Sync} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import monix.eval.Task + import scala.collection._ import scala.collection.immutable.Seq - import coop.rchain.metrics import coop.rchain.metrics.Metrics +import coop.rchain.shared.RChainScheduler._ class MultiLockTest extends AnyFlatSpec with Matchers { import monix.execution.Scheduler implicit val s = Scheduler.fixedPool("test-scheduler", 8) - implicit val metrics = new Metrics.MetricsNOP[Task] + implicit val metrics = new Metrics.MetricsNOP[IO] - implicit class TaskOps[A](task: Task[A])(implicit scheduler: Scheduler) { + implicit class TaskOps[A](task: IO[A]) { import scala.concurrent.Await import scala.concurrent.duration._ def unsafeRunSync: A = - Await.result(task.runToFuture, Duration.Inf) + Await.result(task.unsafeToFuture(), Duration.Inf) } - val tested = new MultiLock[Task, String](Metrics.BaseSource) + val tested = new MultiLock[IO, String](Metrics.BaseSource) def acquire(m: mutable.Map[String, Int])(seq: Seq[String]) = tested.acquire(seq) { - Task.delay { + IO.delay { for { k <- seq v = m.getOrElse(k, 0) + 1 @@ -65,11 +66,9 @@ class MultiLockTest extends AnyFlatSpec with Matchers { (for { _ <- acquire(m)(Seq("a", "b")) - _ <- Task - .delay { - tested.acquire(Seq("a", "c")) { throw new Exception() } - } - .onErrorRecoverWith { case _: Exception => Task.now(()) } + _ <- IO.delay { + tested.acquire(Seq("a", "c")) { throw new Exception() } + }.attempt _ <- acquire(m)(Seq("a", "c")) } yield ()).unsafeRunSync @@ -91,9 +90,6 @@ class MultiLockTest extends AnyFlatSpec with Matchers { import cats.effect.{Concurrent, ContextShift, IO} import cats.implicits._ - implicit val ioContextShift: ContextShift[IO] = - IO.contextShift(scala.concurrent.ExecutionContext.Implicits.global) - implicit val metrics: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO] val tested = new MultiLock[IO, String](Metrics.BaseSource) diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala index 356a9b9e92a..854345770be 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala @@ -1,33 +1,33 @@ package coop.rchain.rspace.concurrent +import cats.effect.{IO, Sync} import coop.rchain.metrics.Metrics -import monix.eval.Task import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import cats.syntax.all._ class TwoStepLockTest extends AnyFlatSpec with Matchers { - import monix.execution.Scheduler - implicit val s = Scheduler.fixedPool("test-scheduler", 8) - implicit val metrics = new Metrics.MetricsNOP[Task] + import coop.rchain.shared.RChainScheduler._ + implicit val metrics = new Metrics.MetricsNOP[IO] "DefaultTwoStepLock" should "gate concurrent access to shared resources" in { - val lock = new ConcurrentTwoStepLockF[Task, String](Metrics.BaseSource) + val lock = new ConcurrentTwoStepLockF[IO, String](Metrics.BaseSource) var a = 0 val t1 = acquireLock(lock, List("a", "b"), List("w1", "w2"), { a = a + 1 }) val t2 = acquireLock(lock, List("a", "b"), List("w1", "w2"), { a = a - 3 }) val t3 = acquireLock(lock, List("a", "b"), List("w1", "w2"), { a = a + 5 }) val t4 = acquireLock(lock, List("a", "b"), List("w1", "w2"), { a = a - 8 }) - val r = Task.parSequenceUnordered(List(t1, t2, t3, t4)) - r.runSyncUnsafe() + val r = List(t1, t2, t3, t4).parSequence + r.unsafeRunSync } def acquireLock( - lock: TwoStepLock[Task, String], + lock: TwoStepLock[IO, String], a: List[String], b: List[String], update: => Unit - ): Task[Unit] = - lock.acquire(a)(() => Task.delay(b))(Task.delay(update)) + ): IO[Unit] = + lock.acquire(a)(() => IO.delay(b))(IO.delay(update)) } diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/Blake2b256HashTests.scala b/rspace/src/test/scala/coop/rchain/rspace/history/Blake2b256HashTests.scala index 2db4a62aa13..6222e1452b6 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/Blake2b256HashTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/Blake2b256HashTests.scala @@ -14,7 +14,7 @@ class Blake2b256HashTests extends AnyFlatSpec with Checkers { "The bytes of a Blake2b256 hash" should "be the same as if it was created directly" in { - //noinspection ReferenceMustBePrefixed + //noinspectTaskn ReferenceMustBePrefixed val propCreate: Prop = Prop.forAll { (bytes: Array[Byte]) => Arrays.equals(Blake2b256.hash(bytes), Blake2b256Hash.create(bytes).bytes.toArray) } diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala index f05b922c475..3f69e696902 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala @@ -1,12 +1,11 @@ package coop.rchain.rspace.history +import cats.effect.IO import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.history.TestData._ import coop.rchain.shared.Base16 import coop.rchain.store.InMemoryKeyValueStore -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.Assertion import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -230,9 +229,9 @@ class HistoryActionTests extends AnyFlatSpec with Matchers { for { emptyHistory <- emptyHistoryF _ <- (1 to 10).toList.foldLeftM[ - Task, + IO, ( - History[Task], + History[IO], List[InsertAction], TrieMap[KeySegment, Blake2b256Hash] ) @@ -276,17 +275,19 @@ class HistoryActionTests extends AnyFlatSpec with Matchers { } yield () } - protected def withEmptyHistory(f: Task[History[Task]] => Task[Unit]): Unit = { - val emptyHistory = History.create(History.emptyRootHash, InMemoryKeyValueStore[Task]) - f(emptyHistory).runSyncUnsafe(1.minute) + import coop.rchain.shared.RChainScheduler._ + + protected def withEmptyHistory(f: IO[History[IO]] => IO[Unit]): Unit = { + val emptyHistory = History.create(History.emptyRootHash, InMemoryKeyValueStore[IO]) + f(emptyHistory).unsafeRunTimed(1.minute) } protected def withEmptyHistoryAndStore( - f: (Task[History[Task]], InMemoryKeyValueStore[Task]) => Task[Unit] + f: (IO[History[IO]], InMemoryKeyValueStore[IO]) => IO[Unit] ): Unit = { - val store = InMemoryKeyValueStore[Task] + val store = InMemoryKeyValueStore[IO] val emptyHistory = History.create(History.emptyRootHash, store) - f(emptyHistory, store).runSyncUnsafe(20.seconds) + f(emptyHistory, store).unsafeRunTimed(20.seconds) } def randomKey(size: Int): KeySegment = diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala index eea1f4aa350..df3a1de6d98 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.history -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{NoopSpan, Span} import coop.rchain.rspace._ @@ -13,8 +13,6 @@ import coop.rchain.rspace.test.ArbitraryInstances.{arbitraryDatumString, _} import coop.rchain.shared.PathOps._ import coop.rchain.shared.{Log, Serialize} import coop.rchain.store.{InMemoryKeyValueStore, InMemoryStoreManager} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalacheck.{Arbitrary, Gen, Shrink} import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec @@ -23,6 +21,7 @@ import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks import java.nio.file.{Files, Path} import scala.concurrent.duration._ +import coop.rchain.shared.RChainScheduler._ class LMDBHistoryRepositoryGenerativeSpec extends HistoryRepositoryGenerativeDefinition @@ -30,23 +29,23 @@ class LMDBHistoryRepositoryGenerativeSpec val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") - val kvm = InMemoryStoreManager[Task] + val kvm = InMemoryStoreManager[IO] - override def repo: Task[HistoryRepository[Task, String, Pattern, String, StringsCaptor]] = { - implicit val log: Log[Task] = new Log.NOPLog[Task] - implicit val span: Span[Task] = new NoopSpan[Task] + override def repo: IO[HistoryRepository[IO, String, Pattern, String, StringsCaptor]] = { + implicit val log: Log[IO] = new Log.NOPLog[IO] + implicit val span: Span[IO] = new NoopSpan[IO] for { historyLmdbKVStore <- kvm.store("history") coldLmdbKVStore <- kvm.store("cold") coldStore = ColdStoreInstances.coldStore(coldLmdbKVStore) rootsLmdbKVStore <- kvm.store("roots") rootsStore = RootsStoreInstances.rootsStore(rootsLmdbKVStore) - rootRepository = new RootRepository[Task](rootsStore) + rootRepository = new RootRepository[IO](rootsStore) emptyHistory <- History.create(History.emptyRootHash, historyLmdbKVStore) - exporter = RSpaceExporterStore[Task](historyLmdbKVStore, coldLmdbKVStore, rootsLmdbKVStore) - importer = RSpaceImporterStore[Task](historyLmdbKVStore, coldLmdbKVStore, rootsLmdbKVStore) - repository: HistoryRepository[Task, String, Pattern, String, StringsCaptor] = HistoryRepositoryImpl - .apply[Task, String, Pattern, String, StringsCaptor]( + exporter = RSpaceExporterStore[IO](historyLmdbKVStore, coldLmdbKVStore, rootsLmdbKVStore) + importer = RSpaceImporterStore[IO](historyLmdbKVStore, coldLmdbKVStore, rootsLmdbKVStore) + repository: HistoryRepository[IO, String, Pattern, String, StringsCaptor] = HistoryRepositoryImpl + .apply[IO, String, Pattern, String, StringsCaptor]( emptyHistory, rootRepository, coldStore, @@ -68,13 +67,13 @@ class InMemHistoryRepositoryGenerativeSpec extends HistoryRepositoryGenerativeDefinition with InMemoryHistoryRepositoryTestBase { - override def repo: Task[HistoryRepository[Task, String, Pattern, String, StringsCaptor]] = { + override def repo: IO[HistoryRepository[IO, String, Pattern, String, StringsCaptor]] = { - implicit val log: Log[Task] = new Log.NOPLog[Task] - implicit val span: Span[Task] = new NoopSpan[Task] + implicit val log: Log[IO] = new Log.NOPLog[IO] + implicit val span: Span[IO] = new NoopSpan[IO] for { - emptyHistory <- History.create(History.emptyRootHash, InMemoryKeyValueStore[Task]) - r = HistoryRepositoryImpl[Task, String, Pattern, String, StringsCaptor]( + emptyHistory <- History.create(History.emptyRootHash, InMemoryKeyValueStore[IO]) + r = HistoryRepositoryImpl[IO, String, Pattern, String, StringsCaptor]( emptyHistory, rootRepository, inMemColdStore, @@ -102,7 +101,7 @@ abstract class HistoryRepositoryGenerativeDefinition implicit def noShrink[T]: Shrink[T] = Shrink.shrinkAny - def repo: Task[HistoryRepository[Task, String, Pattern, String, StringsCaptor]] + def repo: IO[HistoryRepository[IO, String, Pattern, String, StringsCaptor]] "HistoryRepository" should "accept all HotStoreActions" in forAll( minSize(1), @@ -120,7 +119,7 @@ abstract class HistoryRepositoryGenerativeDefinition } yield next } } - .runSyncUnsafe(20.seconds) + .unsafeRunTimed(20.seconds) } def checkData(seq: Seq[Datum[String]], data: Seq[Datum[Any]]): Assertion = @@ -139,8 +138,8 @@ abstract class HistoryRepositoryGenerativeDefinition def checkActionResult( action: HotStoreAction, - historyReader: HistoryReader[Task, Blake2b256Hash, String, Pattern, String, StringsCaptor] - ): Task[Unit] = { + historyReader: HistoryReader[IO, Blake2b256Hash, String, Pattern, String, StringsCaptor] + ): IO[Unit] = { val reader = historyReader.base action match { case InsertData(channel: String, data) => @@ -157,7 +156,7 @@ abstract class HistoryRepositoryGenerativeDefinition reader.getJoins(channel).map(_ shouldBe empty) case DeleteContinuations(channels) => reader.getContinuations(channels.asInstanceOf[Seq[String]]).map(_ shouldBe empty) - case _ => Sync[Task].raiseError(new RuntimeException("unknown action")) + case _ => Sync[IO].raiseError(new RuntimeException("unknown action")) } } diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala index 70c969b988c..221ac74b4ff 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.history -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{NoopSpan, Span} import coop.rchain.rspace._ @@ -17,8 +17,6 @@ import coop.rchain.shared.Log.NOPLog import coop.rchain.shared.syntax._ import coop.rchain.state.TrieNode import coop.rchain.store.InMemoryKeyValueStore -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.OptionValues @@ -35,7 +33,7 @@ class HistoryRepositorySpec with OptionValues with InMemoryHistoryRepositoryTestBase { - type TestHistoryRepository = HistoryRepository[Task, String, String, String, String] + type TestHistoryRepository = HistoryRepository[IO, String, String, String, String] "HistoryRepository" should "process insert one datum" in withEmptyRepository { repo => val testDatum = datum(1) @@ -175,15 +173,16 @@ class HistoryRepositorySpec def datum(s: Any): Datum[String] = Datum[String]("data-" + s, false, Produce(randomBlake, randomBlake, false)) - protected def withEmptyRepository(f: TestHistoryRepository => Task[Unit]): Unit = { - val pastRoots = rootRepository - implicit val log: Log[Task] = new NOPLog() - implicit val span: Span[Task] = new NoopSpan[Task]() + protected def withEmptyRepository(f: TestHistoryRepository => IO[Unit]): Unit = { + val pastRoots = rootRepository + implicit val log: Log[IO] = new NOPLog() + implicit val span: Span[IO] = new NoopSpan[IO]() + import coop.rchain.shared.RChainScheduler._ (for { - emptyHistory <- History.create(History.emptyRootHash, InMemoryKeyValueStore[Task]) + emptyHistory <- History.create(History.emptyRootHash, InMemoryKeyValueStore[IO]) _ <- pastRoots.commit(History.emptyRootHash) - repo = HistoryRepositoryImpl[Task, String, String, String, String]( + repo = HistoryRepositoryImpl[IO, String, String, String, String]( emptyHistory, pastRoots, inMemColdStore, @@ -195,7 +194,7 @@ class HistoryRepositorySpec stringSerialize ) _ <- f(repo) - } yield ()).runSyncUnsafe(20.seconds) + } yield ()).unsafeRunTimed(20.seconds) } } @@ -206,17 +205,17 @@ object RuntimeException { trait InMemoryHistoryRepositoryTestBase { def inmemRootsStore = - new RootsStore[Task] { + new RootsStore[IO] { var roots: Set[Blake2b256Hash] = Set.empty var maybeCurrentRoot: Option[Blake2b256Hash] = None - override def currentRoot(): Task[Option[Blake2b256Hash]] = - Task.delay { + override def currentRoot(): IO[Option[Blake2b256Hash]] = + IO.delay { maybeCurrentRoot } - override def validateAndSetCurrentRoot(key: Blake2b256Hash): Task[Option[Blake2b256Hash]] = - Task.delay { + override def validateAndSetCurrentRoot(key: Blake2b256Hash): IO[Option[Blake2b256Hash]] = + IO.delay { if (roots.contains(key)) { maybeCurrentRoot = Some(key) maybeCurrentRoot @@ -225,8 +224,8 @@ trait InMemoryHistoryRepositoryTestBase { } } - override def recordRoot(key: Blake2b256Hash): Task[Unit] = - Task.delay { + override def recordRoot(key: Blake2b256Hash): IO[Unit] = + IO.delay { maybeCurrentRoot = Some(key) roots += key } @@ -234,10 +233,10 @@ trait InMemoryHistoryRepositoryTestBase { } def rootRepository = - new RootRepository[Task](inmemRootsStore) + new RootRepository[IO](inmemRootsStore) - def inMemColdStore: ColdKeyValueStore[Task] = { - val store = InMemoryKeyValueStore[Task] + def inMemColdStore: ColdKeyValueStore[IO] = { + val store = InMemoryKeyValueStore[IO] store.toTypedStore(codecBlake2b256Hash, codecPersistedData) } diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala index be3b64a30ab..c53efe46400 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.history -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.history.RadixTree._ @@ -8,8 +8,6 @@ import coop.rchain.rspace.history.instances.RadixHistory import coop.rchain.shared.Base16 import coop.rchain.shared.syntax.{sharedSyntaxKeyValueStore, sharedSyntaxKeyValueTypedStore} import coop.rchain.store.{InMemoryKeyValueStore, KeyValueTypedStore} -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.OptionValues @@ -208,7 +206,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { noPrintFlag = true ) - itemIdx <- Sync[Task].delay(byteToInt(dataSet.head.rKey.head)) + itemIdx <- IO.delay(byteToInt(dataSet.head.rKey.head)) itemToDelete = rootNode1(itemIdx) item3Opt <- impl.delete(itemToDelete, dataSet.head.rKey.tail) @@ -301,7 +299,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { "Call of functions saveNode() and then commit()" should "put node into store" in withImplAndStore { (impl, inMemoStore) => for { - nodesCount1 <- Sync[Task].delay(inMemoStore.numRecords()) + nodesCount1 <- IO.delay(inMemoStore.numRecords()) _ = impl.saveNode(emptyNode) _ <- impl.commit @@ -314,7 +312,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { "function loadNode" should "load node from store" in withImplAndStore { (impl, _) => for { - hash <- Sync[Task].delay(impl.saveNode(emptyNode)) + hash <- IO.delay(impl.saveNode(emptyNode)) _ <- impl.commit _ = impl.clearReadCache() @@ -327,7 +325,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { "Trying to load a non-existent node" should "throw error" in withImplAndStore { (impl, store) => { for { - hash <- Sync[Task].delay(impl.saveNode(emptyNode)) + hash <- IO.delay(impl.saveNode(emptyNode)) _ <- impl.commit _ = store.clear() // Clearing database _ = impl.clearReadCache() @@ -487,7 +485,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { None, skipSize = 0, takeSize = 100, - x => Sync[Task].delay(localStorage.get(x)), + x => IO.delay(localStorage.get(x)), exportSettings ) } @@ -706,7 +704,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { ) case class ExportParameters( rootHash: Blake2b256Hash, // hash - typedStore: KeyValueTypedStore[Task, Blake2b256Hash, ByteVector], + typedStore: KeyValueTypedStore[IO, Blake2b256Hash, ByteVector], takeSize: Int, // take size skipSize: Int, // skip size withSkip: Boolean, // start with skip is true @@ -721,11 +719,11 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { def validateMultipageExport( rootHash: Blake2b256Hash, - store: KeyValueTypedStore[Task, Blake2b256Hash, ByteVector], + store: KeyValueTypedStore[IO, Blake2b256Hash, ByteVector], withSkip: Boolean - ): Task[MultipageExportResults] = { + ): IO[MultipageExportResults] = { - def multipageExport(p: ExportParameters): Task[Either[ExportParameters, ExportParameters]] = { + def multipageExport(p: ExportParameters): IO[Either[ExportParameters, ExportParameters]] = { def collectExportData(prevData: ExportData, pageData: ExportData): ExportData = ExportData( prevData.nodePrefixes ++ pageData.nodePrefixes, @@ -788,7 +786,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { skipSize = 0, takeSize = 100, x => - Sync[Task] + Sync[IO] .delay(localStorage.get(x)), exportSettings ) @@ -799,13 +797,14 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { private def withImplAndStore( f: ( - RadixTreeImpl[Task], - InMemoryKeyValueStore[Task] - ) => Task[Unit] + RadixTreeImpl[IO], + InMemoryKeyValueStore[IO] + ) => IO[Unit] ): Unit = { - val store = InMemoryKeyValueStore[Task] + import coop.rchain.shared.RChainScheduler._ + val store = InMemoryKeyValueStore[IO] val typedStore = store.toTypedStore(RadixHistory.codecBlakeHash, scodec.codecs.bytes) - val radixTreeImpl = new RadixTreeImpl[Task](typedStore) - f(radixTreeImpl, store).runSyncUnsafe(20.seconds) + val radixTreeImpl = new RadixTreeImpl[IO](typedStore) + f(radixTreeImpl, store).unsafeRunTimed(20.seconds) } }; diff --git a/shared/src/main/scala/coop/rchain/grpcmonix/GrpcMonix.scala b/shared/src/main/scala/coop/rchain/grpcmonix/GrpcMonix.scala deleted file mode 100644 index aab1c17b068..00000000000 --- a/shared/src/main/scala/coop/rchain/grpcmonix/GrpcMonix.scala +++ /dev/null @@ -1,111 +0,0 @@ -package coop.rchain.grpcmonix - -import scala.concurrent.Future -import scala.util.control.NonFatal - -import coop.rchain.shared.{Log, LogSource} - -import com.google.common.util.concurrent.ListenableFuture -import io.grpc.stub.StreamObserver -import io.grpc.{Status, StatusRuntimeException} -import monix.eval.Task -import monix.execution._ -import monix.execution.Ack.{Continue, Stop} -import monix.reactive.Observable -import monix.reactive.Observable.Operator -import monix.reactive.observers.Subscriber -import monix.reactive.subjects.PublishSubject -import org.reactivestreams.{Subscriber => SubscriberR} -import scalapb.grpc.Grpc - -object GrpcMonix { - - private val logger = Log.logId - implicit private val logSource: LogSource = LogSource(this.getClass) - - type GrpcOperator[I, O] = StreamObserver[O] => StreamObserver[I] - type Transformer[I, O] = Observable[I] => Observable[O] - - def guavaFutureToMonixTask[T](future: ListenableFuture[T]): Task[T] = - Task.deferFuture { - Grpc.guavaFuture2ScalaFuture(future) - } - - def grpcOperatorToMonixOperator[I, O](grpcOperator: GrpcOperator[I, O]): Operator[I, O] = { - outputSubsriber: Subscriber[O] => - val outputObserver: StreamObserver[O] = monixSubscriberToGrpcObserver(outputSubsriber) - val inputObserver: StreamObserver[I] = grpcOperator(outputObserver) - grpcObserverToMonixSubscriber(inputObserver, outputSubsriber.scheduler) - } - - @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) - def monixSubscriberToGrpcObserver[T](subscriber: Subscriber[T]): StreamObserver[T] = - new StreamObserver[T] { - override def onError(t: Throwable): Unit = subscriber.onError(t) - override def onCompleted(): Unit = subscriber.onComplete() - override def onNext(value: T): Unit = subscriber.onNext(value) - } - - @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) - def reactiveSubscriberToGrpcObserver[T](subscriber: SubscriberR[_ >: T]): StreamObserver[T] = - new StreamObserver[T] { - override def onError(t: Throwable): Unit = subscriber.onError(t) - override def onCompleted(): Unit = subscriber.onComplete() - override def onNext(value: T): Unit = subscriber.onNext(value) - } - - @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) - def grpcObserverToMonixSubscriber[T](observer: StreamObserver[T], s: Scheduler): Subscriber[T] = - new Subscriber[T] { - implicit override def scheduler: Scheduler = s - override def onError(t: Throwable): Unit = observer.onError(t) - override def onComplete(): Unit = observer.onCompleted() - override def onNext(value: T): Future[Ack] = - try { - observer.onNext(value) - Continue - } catch { - case t: Throwable => - observer.onError(t) - Stop - } - } - - def grpcObserverToMonixCallback[T](observer: StreamObserver[T]): Callback[Throwable, T] = - new Callback[Throwable, T] { - override def onError(t: Throwable): Unit = observer.onError(t) - override def onSuccess(value: T): Unit = - try { - observer.onNext(value) - observer.onCompleted() - } catch { - case sre: StatusRuntimeException if sre.getStatus.getCode == Status.Code.CANCELLED => - logger.warn(s"Failed to send a response: peer request timeout") - case NonFatal(e) => logger.warn(s"Failed to send a response: ${e.getMessage}") - } - } - - def liftByGrpcOperator[I, O]( - observable: Observable[I], - operator: GrpcOperator[I, O] - ): Observable[O] = - observable.liftByOperator( - grpcOperatorToMonixOperator(operator) - ) - - @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) - def unliftByTransformer[I, O]( - transformer: Transformer[I, O], - subscriber: Subscriber[O] - ): Subscriber[I] = - new Subscriber[I] { - private[this] val subject = PublishSubject[I]() - transformer(subject).subscribe(subscriber) - - implicit override def scheduler: Scheduler = subscriber.scheduler - override def onError(t: Throwable): Unit = subject.onError(t) - override def onComplete(): Unit = subject.onComplete() - override def onNext(value: I): Future[Ack] = subject.onNext(value) - } - -} diff --git a/shared/src/main/scala/coop/rchain/monix/Monixable.scala b/shared/src/main/scala/coop/rchain/monix/Monixable.scala deleted file mode 100644 index 740624c5a6a..00000000000 --- a/shared/src/main/scala/coop/rchain/monix/Monixable.scala +++ /dev/null @@ -1,35 +0,0 @@ -package coop.rchain.monix - -import cats.data.ReaderT -import cats.~> -import monix.eval.Task - -/** - * The purpose of this interface is to be a bridge to abstract effect type in old code which - * uses monix Task directly. - */ -trait Monixable[F[_]] { - def toTask[A](t: F[A]): Task[A] - - def fromTask[A](t: Task[A]): F[A] -} - -object Monixable { - def apply[F[_]](implicit instance: Monixable[F]): Monixable[F] = instance - - // Default implementation is just the identity for monix Task - implicit object MonixableTask extends Monixable[Task] { - override def toTask[A](task: Task[A]): Task[A] = task - - override def fromTask[A](task: Task[A]): Task[A] = task - } - - // FunctorK (specific to ReaderT) - implicit class MonixMapKOps[F[_]](val m: Monixable[F]) extends AnyVal { - def mapK[S](nt: F ~> ReaderT[F, S, *], s: S): Monixable[ReaderT[F, S, *]] = - new Monixable[ReaderT[F, S, *]] { - override def toTask[A](t: ReaderT[F, S, A]): Task[A] = m.toTask(t.run(s)) - override def fromTask[A](t: Task[A]): ReaderT[F, S, A] = nt(m.fromTask(t)) - } - } -} diff --git a/shared/src/main/scala/coop/rchain/monix/MonixableSyntax.scala b/shared/src/main/scala/coop/rchain/monix/MonixableSyntax.scala deleted file mode 100644 index 29b372368b8..00000000000 --- a/shared/src/main/scala/coop/rchain/monix/MonixableSyntax.scala +++ /dev/null @@ -1,20 +0,0 @@ -package coop.rchain.monix - -import cats.~> -import monix.eval.Task - -trait MonixableSyntax { - implicit final def sharedSyntaxdMonixableToTask[F[_], A](fa: F[A]): MonixableToTaskOps[F, A] = - new MonixableToTaskOps[F, A](fa) - - implicit final def sharedSyntaxdMonixableFromTask[A](task: Task[A]): MonixableFromTaskOps[A] = - new MonixableFromTaskOps(task) -} - -final class MonixableToTaskOps[F[_], A](val fa: F[A]) extends AnyVal { - def toTask(implicit m: Monixable[F]): Task[A] = Monixable[F].toTask(fa) -} - -final class MonixableFromTaskOps[A](val task: Task[A]) extends AnyVal { - def fromTask[F[_]: Monixable]: F[A] = Monixable[F].fromTask(task) -} diff --git a/shared/src/main/scala/coop/rchain/shared/MVarMonadState.scala b/shared/src/main/scala/coop/rchain/shared/MVarMonadState.scala deleted file mode 100644 index 5924ebb8c22..00000000000 --- a/shared/src/main/scala/coop/rchain/shared/MVarMonadState.scala +++ /dev/null @@ -1,41 +0,0 @@ -package coop.rchain.shared - -import cats._ -import cats.syntax.all._ -import cats.mtl.MonadState - -import monix.catnap.MVar -import monix.eval.Task - -class MVarMonadState[S](state: Task[MVar[Task, S]])(implicit val monad: Monad[Task]) - extends MonadState[Task, S] { - /* - Removes a value from the state. - Blocks if the state is empty. - */ - def get: Task[S] = state >>= (_.take) - - /* - Sets a value in the state - Blocks if the state is non-empty - */ - def set(s: S): Task[Unit] = state >>= (_.put(s)) - - /* - Don't use inspect for a read & write access pattern. - Don't use inspect & set or inspect & modify - because the state may have changed in the meantime - */ - def inspect[A](f: S => A): Task[A] = state >>= (_.read.map(f)) - - /* - Use with care. You probably should use set instead. - Don't use get & modify together or it may block until someone calls set. - Don't use inspect & modify because the state may have changed in the meantime - */ - def modify(f: S => S): Task[Unit] = - for { - s <- get - _ <- set(f(s)) - } yield () -} diff --git a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala index 330e5d7cdcc..3e27c3e9305 100644 --- a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala +++ b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala @@ -1,16 +1,27 @@ package coop.rchain.shared -import monix.execution.Scheduler -import monix.execution.schedulers.SchedulerService +import cats.effect.{ContextShift, IO} + +import java.util.concurrent.{Executors, ThreadFactory} +import java.util.concurrent.atomic.AtomicLong object RChainScheduler { - val availableProcessors: Int = java.lang.Runtime.getRuntime.availableProcessors() - // TODO: make it configurable - // TODO: fine tune this - val interpreterScheduler: SchedulerService = Scheduler.forkJoin( - name = "interpreter-rspace", - parallelism = availableProcessors * 2, - maxThreads = availableProcessors * 2, - reporter = UncaughtExceptionLogger - ) + implicit val mainEC = scala.concurrent.ExecutionContext.Implicits.global + implicit val csIO: ContextShift[IO] = IO.contextShift(mainEC) + val rholangEC = mainEC + implicit val timer = IO.timer(mainEC) + + val ioScheduler = Executors.newCachedThreadPool(new ThreadFactory { + private val counter = new AtomicLong(0L) + + def newThread(r: Runnable) = { + val th = new Thread(r) + th.setName( + "io-thread-" + + counter.getAndIncrement.toString + ) + th.setDaemon(true) + th + } + }) } diff --git a/shared/src/main/scala/coop/rchain/shared/UncaughtExceptionLogger.scala b/shared/src/main/scala/coop/rchain/shared/UncaughtExceptionLogger.scala deleted file mode 100644 index fcd7b6b6741..00000000000 --- a/shared/src/main/scala/coop/rchain/shared/UncaughtExceptionLogger.scala +++ /dev/null @@ -1,13 +0,0 @@ -package coop.rchain.shared - -import cats.Id - -import monix.execution.UncaughtExceptionReporter - -object UncaughtExceptionLogger extends UncaughtExceptionReporter { - implicit private val logSource: LogSource = LogSource(this.getClass) - private val log: Log[Id] = Log.logId - - def reportFailure(ex: scala.Throwable): Unit = - log.error("Uncaught Exception", ex) -} diff --git a/shared/src/main/scala/coop/rchain/shared/package.scala b/shared/src/main/scala/coop/rchain/shared/package.scala index 08861be8d03..b14fdac419b 100644 --- a/shared/src/main/scala/coop/rchain/shared/package.scala +++ b/shared/src/main/scala/coop/rchain/shared/package.scala @@ -1,7 +1,6 @@ package coop.rchain import coop.rchain.fs2.Fs2StreamSyntax -import coop.rchain.monix.MonixableSyntax import coop.rchain.sdk.primitive.MapSyntax import coop.rchain.store.{KeyValueStoreManagerSyntax, KeyValueStoreSyntax, KeyValueTypedStoreSyntax} @@ -17,7 +16,6 @@ trait AllSyntaxShared extends KeyValueStoreSyntax with KeyValueTypedStoreSyntax with KeyValueStoreManagerSyntax - with MonixableSyntax with Fs2StreamSyntax with catscontrib.ToBooleanF with MapSyntax diff --git a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala index 51019e7d0ff..e404dc5d595 100644 --- a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala +++ b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala @@ -1,17 +1,17 @@ package coop.rchain.shared import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, Timer} +import cats.effect.{Concurrent, IO, Timer} import cats.syntax.all._ import coop.rchain.shared.syntax.sharedSyntaxFs2Stream import fs2.Stream -import monix.eval.Task import monix.execution.schedulers.TestScheduler import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.util.Success +import RChainScheduler._ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { @@ -20,7 +20,7 @@ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { */ def test[F[_]: Concurrent: Timer](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { st => val addOne = Stream.eval(st.updateAndGet(_ + "1")) - val pause = Stream.sleep(1.second).drain + val pause = Stream.sleep(1.second)(Timer[F]).drain val addZero = st.update(_ + "0") (addOne ++ pause ++ addOne).evalOnIdle(addZero, timeout).compile.lastOrError @@ -29,11 +29,11 @@ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { // Helper to construct success result def success[A](a: A): Option[Success[A]] = Success(a).some - // Instance of testing ExecutionContext (Scheduler) + // Instance of testing ContextShift (Scheduler) implicit val ec = TestScheduler() "evalOnIdle" should "NOT trigger timeout if element IS produced within timeout period" in { - val t = test[Task](1001.millis).runToFuture + val t = test[IO](1001.millis).unsafeToFuture() // Sanity check, value should be empty before start t.value shouldBe none @@ -47,7 +47,7 @@ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { } it should "trigger timeout if element is NOT produced within timeout" in { - val t = test[Task](750.millis).runToFuture + val t = test[IO](750.millis).unsafeToFuture // Sanity check, value should be empty before start t.value shouldBe none @@ -61,7 +61,7 @@ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { } it should "trigger two timeouts if element is NOT produced and timeout is double time shorter" in { - val t = test[Task](499.millis).runToFuture + val t = test[IO](499.millis).unsafeToFuture // Sanity check, value should be empty before start t.value shouldBe none diff --git a/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala b/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala index 785845c4b58..2093db64b5c 100644 --- a/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala +++ b/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala @@ -1,8 +1,8 @@ package coop.rchain.shared import cats.Functor +import cats.effect.{ContextShift, IO} import cats.syntax.functor._ -import monix.eval.Task import monix.execution.Scheduler import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers @@ -19,5 +19,5 @@ object scalatestcontrib extends Matchers { ) } - def effectTest[T](f: Task[T])(implicit scheduler: Scheduler): T = f.runSyncUnsafe() + def effectTest[T](f: IO[T]): T = f.unsafeRunSync } diff --git a/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala b/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala index a28a7cfa7cd..7fdd7c4b217 100644 --- a/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala +++ b/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala @@ -1,9 +1,8 @@ package coop.rchain.store -import cats.effect.Sync +import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.shared.syntax._ -import monix.eval.Task import org.scalacheck.{Arbitrary, Gen} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -60,32 +59,32 @@ class InMemoryKeyValueStoreSpec it should "put and get data from the store" in { forAll(genData) { expected => - implicit val kvm = InMemoryStoreManager[Task] - val sut = new KeyValueStoreSut[Task] + implicit val kvm = InMemoryStoreManager[IO] + val sut = new KeyValueStoreSut[IO] val test = for { result <- sut.testPutGet(expected) } yield result shouldBe expected - test.runSyncUnsafe() + test.unsafeRunSync } } it should "put and get all data from the store" in { forAll(genData) { expected => - implicit val kvm = InMemoryStoreManager[Task] - val sut = new KeyValueStoreSut[Task] + implicit val kvm = InMemoryStoreManager[IO] + val sut = new KeyValueStoreSut[IO] val test = for { result <- sut.testPutIterate(expected) } yield result shouldBe expected - test.runSyncUnsafe() + test.unsafeRunSync } } it should "put and collect partial data from the store" in { forAll(genData) { expected => - implicit val kvm = InMemoryStoreManager[Task] - val sut = new KeyValueStoreSut[Task] + implicit val kvm = InMemoryStoreManager[IO] + val sut = new KeyValueStoreSut[IO] val keys = expected.toList.map(_._1) val kMin = keys.min @@ -103,14 +102,14 @@ class InMemoryKeyValueStoreSpec } } yield result shouldBe expectedFiltered - test.runSyncUnsafe() + test.unsafeRunSync } } it should "not have deleted keys in the store" in { forAll(genData) { input => - implicit val kvm = InMemoryStoreManager[Task] - val sut = new KeyValueStoreSut[Task] + implicit val kvm = InMemoryStoreManager[IO] + val sut = new KeyValueStoreSut[IO] val allKeys = input.keysIterator.toVector // Take some keys for deletion val (getKeys, deleteKeys) = allKeys.splitAt(allKeys.size / 2) @@ -122,7 +121,7 @@ class InMemoryKeyValueStoreSpec result <- sut.testPutDeleteGet(input, deleteKeys) } yield result shouldBe expected - test.runSyncUnsafe() + test.unsafeRunSync } } From 87c417e2f6d93a6760e4ccfadee76bffbfc67b1d Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Wed, 5 Apr 2023 22:29:43 +0400 Subject: [PATCH 10/17] First step --- build.sbt | 1 + project/plugins.sbt | 1 + 2 files changed, 2 insertions(+) diff --git a/build.sbt b/build.sbt index 19c08bebba6..808db5b4766 100644 --- a/build.sbt +++ b/build.sbt @@ -54,6 +54,7 @@ lazy val projectSettings = Seq( ), scalafmtOnCompile := !sys.env.contains("CI"), // disable in CI environments ThisBuild / scapegoatVersion := "1.4.11", + ThisBuild / scalacOptions += "semanticdb:synthetics:on", Test / testOptions += Tests.Argument("-oD"), //output test durations javacOptions ++= Seq("-source", "11", "-target", "11"), Test / fork := true, diff --git a/project/plugins.sbt b/project/plugins.sbt index 24aa6d476d6..9bfdd232364 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -21,3 +21,4 @@ addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.1.1") // TODO replace with addSbtPlugin("org.typelevel" % "sbt-fs2-grpc" % "") // when migrated top CE3 since latest fs2-grpc is not available for CE2 addSbtPlugin("org.lyranthe.fs2-grpc" % "sbt-java-gen" % "0.11.2") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") From 693b8f572eb334bd3574c0a67f192e28a86ab6de Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Wed, 5 Apr 2023 22:59:08 +0400 Subject: [PATCH 11/17] Second step. After scalafix run. --- .../blockstorage/dag/BlockMetadataStore.scala | 2 +- .../rchain/casper/BlockExecutionTracker.scala | 2 +- .../rchain/casper/MultiParentCasper.scala | 5 ++-- .../coop/rchain/casper/api/BlockApiImpl.scala | 2 +- .../rchain/casper/blocks/BlockProcessor.scala | 7 +++--- .../rchain/casper/blocks/BlockReceiver.scala | 2 +- .../rchain/casper/blocks/BlockRetriever.scala | 11 ++++---- .../casper/blocks/proposer/Proposer.scala | 6 ++--- .../casper/dag/BlockDagKeyValueStorage.scala | 3 ++- .../casper/engine/LfsBlockRequester.scala | 6 ++--- .../engine/LfsTupleSpaceRequester.scala | 6 ++--- .../rchain/casper/engine/NodeLaunch.scala | 8 +++--- .../rchain/casper/engine/NodeSyncing.scala | 8 +++--- .../rchain/casper/protocol/CommUtil.scala | 5 ++-- .../casper/reporting/ReportingCasper.scala | 4 +-- .../casper/rholang/InterpreterUtil.scala | 9 ++++--- .../rholang/syntax/RuntimeReplaySyntax.scala | 2 +- .../casper/rholang/syntax/RuntimeSyntax.scala | 2 +- .../state/instances/ProposerState.scala | 2 +- .../coop/rchain/casper/util/BondsParser.scala | 19 +++++++------- .../coop/rchain/casper/util/VaultParser.scala | 11 ++++---- .../rchain/casper/addblock/ProposerSpec.scala | 2 +- .../api/BlockQueryResponseAPITest.scala | 1 - .../casper/api/BlocksResponseAPITest.scala | 1 - .../batch2/BlockReceiverEffectsSpec.scala | 1 - .../engine/LfsBlockRequesterEffectsSpec.scala | 6 ++--- .../engine/LfsStateRequesterEffectsSpec.scala | 5 ++-- .../engine/RunningHandleHasBlockSpec.scala | 2 +- .../rchain/casper/genesis/GenesisTest.scala | 3 +-- .../helper/BlockDagStorageFixture.scala | 2 +- .../coop/rchain/casper/helper/TestNode.scala | 12 ++++----- .../casper/helper/TestResultCollector.scala | 2 +- .../rchain/casper/helper/TestRhoRuntime.scala | 2 +- .../merging/MergeNumberChannelSpec.scala | 2 +- .../rchain/casper/rholang/Resources.scala | 2 +- .../sync/BlockRetrieverRequesAllSpec.scala | 2 +- .../casper/sync/BlockRetrieverSpec.scala | 2 +- .../util/comm/TransportLayerTestImpl.scala | 2 +- .../util/scalatest/Fs2StreamMatchers.scala | 2 +- .../scala/coop/rchain/comm/rp/Connect.scala | 2 +- .../comm/transport/GrpcTransportClient.scala | 2 +- .../transport/GrpcTransportReceiver.scala | 6 ++--- .../comm/transport/GrpcTransportServer.scala | 8 +++--- .../comm/discovery/GrpcKademliaRPCSpec.scala | 2 +- .../comm/discovery/KademliaRPCRuntime.scala | 25 ++++++++++--------- .../comm/discovery/KademliaRPCSpec.scala | 5 ++-- .../rchain/comm/rp/ClearConnectionsSpec.scala | 2 +- .../coop/rchain/comm/rp/ConnectSpec.scala | 2 +- .../rchain/comm/rp/FindAndConnectSpec.scala | 2 +- .../comm/rp/HandleProtocolHandshakeSpec.scala | 2 +- .../comm/transport/GrpcTransportSpec.scala | 2 +- .../transport/PacketStoreRestoreSpec.scala | 2 +- .../transport/TcpTransportLayerSpec.scala | 5 ++-- .../transport/TransportLayerRuntime.scala | 22 ++++++++-------- .../comm/transport/TransportLayerSpec.scala | 4 +-- .../scala/coop/rchain/graphz/Graphz.scala | 2 +- .../scala/coop/rchain/graphz/GraphzSpec.scala | 2 +- .../main/scala/coop/rchain/node/Main.scala | 2 +- .../scala/coop/rchain/node/api/package.scala | 2 +- .../rchain/node/dag/RNodeStateSetup.scala | 2 +- .../NetworkBlockRequester.scala | 2 +- .../dag/implementation/RNodeDagManager.scala | 2 +- .../coop/rchain/node/effects/package.scala | 4 +-- .../node/instances/ProposerInstance.scala | 4 ++- .../node/revvaultexport/StateBalances.scala | 2 +- .../reporting/TransactionBalances.scala | 2 +- .../rchain/node/runtime/NetworkServers.scala | 13 +++++----- .../rchain/node/runtime/NodeCallCtx.scala | 2 +- .../coop/rchain/node/runtime/NodeMain.scala | 7 +++--- .../rchain/node/runtime/NodeRuntime.scala | 16 ++++++------ .../coop/rchain/node/runtime/Setup.scala | 8 +++--- .../coop/rchain/node/web/Transaction.scala | 2 +- .../scala/coop/rchain/node/web/package.scala | 7 +++--- .../node/mergeablity/ComputeMerge.scala | 3 +-- .../rchain/node/perf/HistoryGenKeySpec.scala | 2 +- .../rholang/interpreter/Interpreter.scala | 2 +- .../rchain/rholang/interpreter/Reduce.scala | 2 +- .../rholang/interpreter/RhoRuntime.scala | 2 +- .../rholang/interpreter/RholangCLI.scala | 2 +- .../rholang/interpreter/SystemProcesses.scala | 2 +- .../accounting/CostAccounting.scala | 1 + .../interpreter/accounting/package.scala | 2 +- .../rchain/rholang/interpreter/dispatch.scala | 2 +- .../rholang/interpreter/matcher/StreamT.scala | 2 +- .../merging/RholangMergingLogic.scala | 2 +- .../rchain/rholang/interpreter/package.scala | 4 +-- .../scala/coop/rchain/rholang/Resources.scala | 2 +- .../interpreter/BigIntNormalizerSpec.scala | 2 +- .../CostAccountingReducerTest.scala | 2 +- .../interpreter/RholangOnlyDispatcher.scala | 2 +- .../interpreter/accounting/package.scala | 2 +- .../coop/rchain/rspace/bench/BasicBench.scala | 2 +- .../scala/coop/rchain/rspace/HotStore.scala | 2 +- .../scala/coop/rchain/rspace/RSpaceOps.scala | 4 +-- .../coop/rchain/rspace/ReportingRspace.scala | 2 +- .../rchain/rspace/concurrent/MultiLock.scala | 2 +- .../rspace/examples/AddressBookExample.scala | 2 +- .../rchain/rspace/merger/EventLogIndex.scala | 2 +- .../rchain/rspace/merger/StateChange.scala | 2 +- .../instances/RSpaceStateManagerImpl.scala | 1 - .../rchain/rspace/ExportImportTests.scala | 2 +- .../coop/rchain/rspace/HotStoreSpec.scala | 2 +- .../rchain/rspace/ReplayRSpaceTests.scala | 2 +- .../rchain/rspace/StorageExamplesTests.scala | 2 +- .../coop/rchain/rspace/StorageTestsBase.scala | 2 +- .../rspace/concurrent/MultiLockTest.scala | 2 +- .../coop/rchain/fs2/Fs2StreamSyntax.scala | 10 ++++---- .../rchain/metrics/MetricsSemaphore.scala | 2 +- .../scala/coop/rchain/shared/MaybeCell.scala | 2 +- .../coop/rchain/shared/RChainScheduler.scala | 2 +- .../main/scala/coop/rchain/shared/Time.scala | 4 +-- .../rchain/store/LazyAdHocKeyValueCache.scala | 2 +- .../coop/rchain/store/LazyKeyValueCache.scala | 2 +- .../rchain/store/LmdbDirStoreManager.scala | 2 +- .../coop/rchain/store/LmdbStoreManager.scala | 2 +- .../rchain/shared/Fs2ExtensionsSpec.scala | 15 +++++------ .../coop/rchain/shared/scalatestcontrib.scala | 2 +- 117 files changed, 237 insertions(+), 224 deletions(-) diff --git a/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockMetadataStore.scala b/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockMetadataStore.scala index 9a067f21eca..4f103e8f1b2 100644 --- a/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockMetadataStore.scala +++ b/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockMetadataStore.scala @@ -2,7 +2,6 @@ package coop.rchain.blockstorage.dag import cats.Monad import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter import coop.rchain.models.BlockHash.BlockHash @@ -12,6 +11,7 @@ import coop.rchain.shared.syntax._ import coop.rchain.store.KeyValueTypedStore import scala.collection.immutable.SortedMap +import cats.effect.Ref object BlockMetadataStore { def apply[F[_]: Sync: Log]( diff --git a/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala b/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala index f5d1e879162..97ef71d22cf 100644 --- a/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala +++ b/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala @@ -1,11 +1,11 @@ package coop.rchain.casper import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.blockstorage.dag.BlockDagStorage.DeployId import coop.rchain.rholang.interpreter.EvaluateResult import coop.rchain.sdk.syntax.all._ +import cats.effect.Ref trait BlockExecutionTracker[F[_]] { def execStarted(d: DeployId): F[Unit] diff --git a/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala b/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala index 39141048f55..18df68d69a9 100644 --- a/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala @@ -1,7 +1,7 @@ package coop.rchain.casper import cats.data.EitherT -import cats.effect.{Concurrent, Sync, Timer} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -19,6 +19,7 @@ import coop.rchain.models.syntax._ import coop.rchain.models.{BlockHash => _, _} import coop.rchain.sdk.error.FatalError import coop.rchain.shared._ +import cats.effect.Temporal final case class ParsingError(details: String) @@ -168,7 +169,7 @@ object MultiParentCasper { rejectedDeploys = csRejectedDeploys ) - def validate[F[_]: Concurrent: Timer: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validate[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage, shardId: String, minPhloPrice: Long diff --git a/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala b/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala index 1c6d2b4a35f..bd607f93c17 100644 --- a/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala +++ b/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.api import cats.data.OptionT -import cats.effect.concurrent.Ref import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString @@ -45,6 +44,7 @@ import coop.rchain.shared.syntax._ import fs2.Stream import scala.collection.immutable.SortedMap +import cats.effect.Ref object BlockApiImpl { def apply[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala index f01429e66a6..3d4701ac31c 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.blocks -import cats.effect.{Concurrent, Timer} +import cats.effect.Concurrent import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.BlockDagStorage @@ -13,6 +13,7 @@ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream import fs2.concurrent.Queue +import cats.effect.Temporal object BlockProcessor { @@ -21,7 +22,7 @@ object BlockProcessor { * - input block must have all dependencies in the DAG * - blocks created by node itself are not processed here, but in Proposer */ - def apply[F[_]: Concurrent: Timer: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( + def apply[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( inputBlocks: Stream[F, BlockMessage], validatedQueue: Queue[F, BlockMessage], shardId: String, @@ -40,7 +41,7 @@ object BlockProcessor { } yield (block, result) } - def validateAndAddToDag[F[_]: Concurrent: Timer: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( + def validateAndAddToDag[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( block: BlockMessage, shardId: String, minPhloPrice: Long diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala index abebd8d4f3e..81ea06b64eb 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.blocks import cats.Show -import cats.effect.concurrent.Ref import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore @@ -15,6 +14,7 @@ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream import fs2.concurrent.Queue +import cats.effect.Ref sealed trait RecvStatus // Begin checking and storing block diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala index 05fa93db390..d4df4472992 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala @@ -1,8 +1,6 @@ package coop.rchain.casper.blocks import cats.Monad -import cats.effect.Timer -import cats.effect.concurrent.Ref import cats.syntax.all._ import cats.tagless.autoFunctorK import coop.rchain.casper.PrettyPrinter @@ -17,6 +15,7 @@ import coop.rchain.shared.Log import java.util.concurrent.TimeUnit import scala.concurrent.duration.FiniteDuration +import cats.effect.{Ref, Temporal} /** * BlockRetriever makes sure block is received once Casper request it. @@ -92,7 +91,7 @@ object BlockRetriever { def apply[F[_]](implicit ev: BlockRetriever[F]): BlockRetriever[F] = ev - def of[F[_]: Monad: RequestedBlocks: Log: Timer: RPConfAsk: TransportLayer: CommUtil: Metrics] + def of[F[_]: Monad: RequestedBlocks: Log: Temporal: RPConfAsk: TransportLayer: CommUtil: Metrics] : BlockRetriever[F] = new BlockRetriever[F] { @@ -137,7 +136,7 @@ object BlockRetriever { admitHashReason: AdmitHashReason ): F[AdmitHashResult] = for { - now <- Timer[F].clock.realTime(TimeUnit.MILLISECONDS) + now <- Temporal[F].clock.realTime(TimeUnit.MILLISECONDS) result <- RequestedBlocks[F] .modify[AdmitHashResult] { state => val unknownHash = !state.contains(hash) @@ -230,7 +229,7 @@ object BlockRetriever { s"Remain waiting: ${waitingListTail.map(_.endpoint.host).mkString(", ")}." ) _ <- CommUtil[F].requestForBlock(nextPeer, hash) - ts <- Timer[F].clock.realTime(TimeUnit.MILLISECONDS) + ts <- Temporal[F].clock.realTime(TimeUnit.MILLISECONDS) _ <- RequestedBlocks.put( hash, requested.copy( @@ -261,7 +260,7 @@ object BlockRetriever { _ <- state.keySet.toList.traverse(hash => { val requested = state(hash) for { - expired <- Timer[F].clock + expired <- Temporal[F].clock .realTime(TimeUnit.MILLISECONDS) .map(_ - requested.timestamp > ageThreshold.toMillis) _ <- Log[F] diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala index 8b570183f77..b48a7728adb 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala @@ -1,8 +1,7 @@ package coop.rchain.casper.blocks.proposer import cats.data.OptionT -import cats.effect.concurrent.Deferred -import cats.effect.{Concurrent, Timer} +import cats.effect.Concurrent import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -24,6 +23,7 @@ import coop.rchain.sdk.consensus.Stake import coop.rchain.sdk.error.FatalError import coop.rchain.shared.syntax._ import coop.rchain.shared.{Log, Time} +import cats.effect.{Deferred, Temporal} sealed abstract class ProposerResult object ProposerEmpty extends ProposerResult @@ -114,7 +114,7 @@ class Proposer[F[_]: Concurrent: Log: Span]( object Proposer { // format: off def apply[F[_] - /* Execution */ : Concurrent: Timer: Time + /* Execution */ : Concurrent: Temporal: Time /* Storage */ : BlockStore: BlockDagStorage /* Rholang */ : RuntimeManager /* Comm */ : CommUtil diff --git a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala index 36aa2f397a0..21456db15ef 100644 --- a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala +++ b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.dag import cats.effect.Concurrent -import cats.effect.concurrent.{Ref, Semaphore} import cats.syntax.all._ import cats.{Monad, Show} import coop.rchain.blockstorage._ @@ -29,6 +28,8 @@ import coop.rchain.store.{KeyValueStoreManager, KeyValueTypedStore} import fs2.Stream import scala.collection.concurrent.TrieMap +import cats.effect.Ref +import cats.effect.std.Semaphore final class BlockDagKeyValueStorage[F[_]: Concurrent: Log] private ( representationState: Ref[F, DagRepresentation], diff --git a/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala b/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala index 148f976677d..f706b7c165c 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, Sync, Timer} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter import coop.rchain.casper.protocol.{BlockMessage, FinalizedFringe} @@ -14,6 +13,7 @@ import fs2.concurrent.Queue import scala.collection.immutable.SortedMap import scala.concurrent.duration._ +import cats.effect.{Ref, Temporal} /** * Last Finalized State processor for receiving blocks. @@ -148,7 +148,7 @@ object LfsBlockRequester { * @param validateBlock Check if received block is valid * @return fs2.Stream processing all blocks */ - def stream[F[_]: Concurrent: Timer: Log]( + def stream[F[_]: Concurrent: Temporal: Log]( fringe: FinalizedFringe, incomingBlocks: Stream[F, BlockMessage], blockHeightsBeforeFringe: Int, diff --git a/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala b/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala index f70457f2409..298c6961262 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, Sync, Timer} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol._ import coop.rchain.models.syntax._ @@ -16,6 +15,7 @@ import fs2.{Pure, Stream} import scodec.bits.ByteVector import scala.concurrent.duration._ +import cats.effect.{Ref, Temporal} /** * Last Finalized State processor for receiving Rholang state. @@ -94,7 +94,7 @@ object LfsTupleSpaceRequester { * @param validateTupleSpaceItems Check if received statet chunk is valid * @return fs2.Stream processing all tuple space state */ - def stream[F[_]: Concurrent: Timer: Log]( + def stream[F[_]: Concurrent: Temporal: Log]( fringe: FinalizedFringe, tupleSpaceMessageQueue: Queue[F, StoreItemsMessage], requestForStoreItem: (StatePartPath, Int) => F[Unit], diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala index e1c3a8e52ea..c5dbda7dbcc 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala @@ -1,8 +1,7 @@ package coop.rchain.casper.engine import cats.Parallel -import cats.effect.concurrent.Deferred -import cats.effect.{Concurrent, ContextShift, Timer} +import cats.effect.Concurrent import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -28,6 +27,7 @@ import fs2.Stream import fs2.concurrent.Queue import scala.concurrent.duration.DurationInt +import cats.effect.{Deferred, Temporal} final case class PeerMessage(peer: PeerNode, message: CasperMessage) @@ -35,7 +35,7 @@ object NodeLaunch { // format: off def apply[F[_] - /* Execution */ : Concurrent: Parallel: ContextShift: Time: Timer + /* Execution */ : Concurrent: Parallel: ContextShift: Time: Temporal /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -118,7 +118,7 @@ object NodeLaunch { def waitForFirstConnection: F[Unit] = for { isEmpty <- ConnectionsCell[F].get.map(_.isEmpty) - _ <- (Timer[F].sleep(250.millis) *> waitForFirstConnection).whenA(isEmpty) + _ <- (Temporal[F].sleep(250.millis) *> waitForFirstConnection).whenA(isEmpty) } yield () for { diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala index 478cee6984f..2754109e43e 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, Timer} +import cats.effect.Concurrent import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -24,6 +23,7 @@ import fs2.concurrent.Queue import scala.collection.immutable.SortedMap import scala.concurrent.duration._ +import cats.effect.{Deferred, Ref, Temporal} object NodeSyncing { @@ -32,7 +32,7 @@ object NodeSyncing { */ // format: off def apply[F[_] - /* Execution */ : Concurrent: Time: Timer + /* Execution */ : Concurrent: Time: Temporal /* Transport */ : TransportLayer: CommUtil /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -62,7 +62,7 @@ object NodeSyncing { */ // format: off class NodeSyncing[F[_] - /* Execution */ : Concurrent: Time: Timer + /* Execution */ : Concurrent: Time: Temporal /* Transport */ : TransportLayer: CommUtil /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala b/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala index 9065118617a..2dc937c6d63 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala @@ -18,6 +18,7 @@ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.shared._ import scala.concurrent.duration._ +import cats.effect.Temporal // TODO: remove CommUtil completely and move to extensions (syntax) on TransportLayer @autoFunctorK @@ -49,7 +50,7 @@ object CommUtil { def apply[F[_]](implicit ev: CommUtil[F]): CommUtil[F] = ev - def of[F[_]: Concurrent: Timer: TransportLayer: RPConfAsk: ConnectionsCell: Log]: CommUtil[F] = + def of[F[_]: Concurrent: Temporal: TransportLayer: RPConfAsk: ConnectionsCell: Log]: CommUtil[F] = new CommUtil[F] { def sendToPeers(message: Packet, scopeSize: Option[Int]): F[Unit] = @@ -86,7 +87,7 @@ object CommUtil { Log[F].warn( s"Failed to send ${msgTypeName} to $peer because of ${CommError .errorMessage(error)}. Retrying in $retryAfter..." - ) >> Timer[F].sleep(retryAfter) >> keepOnRequestingTillRunning(peer, msg) + ) >> Temporal[F].sleep(retryAfter) >> keepOnRequestingTillRunning(peer, msg) } RPConfAsk[F].ask >>= { conf => diff --git a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala index 0f931f9e0bf..06871d6fca8 100644 --- a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala @@ -1,8 +1,7 @@ package coop.rchain.casper.reporting import cats.Parallel -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.models.syntax._ @@ -33,6 +32,7 @@ import coop.rchain.rspace.{ReportingRspace, Match => RSpaceMatch} import coop.rchain.shared.Log import scala.concurrent.ExecutionContext +import cats.effect.Ref /** * @param processedDeploy Deploy details diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala index 167221300de..eaa14319c78 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang -import cats.effect.{Concurrent, Sync, Timer} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -30,6 +30,7 @@ import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.rholang.interpreter.errors.InterpreterError import coop.rchain.shared.{Log, LogSource} import retry.{retryingOnFailures, RetryPolicies} +import cats.effect.Temporal object InterpreterUtil { @@ -47,7 +48,7 @@ object InterpreterUtil { // TODO: most of this function is legacy code, it should be refactored with separation of errors that are // handled (with included data e.g. hash not equal) and fatal errors which should NOT be handled - def validateBlockCheckpoint[F[_]: Concurrent: Timer: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validateBlockCheckpoint[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[(BlockMetadata, BlockProcessing[Boolean])] = for { @@ -129,11 +130,11 @@ object InterpreterUtil { (bmd, result) } - def validateBlockCheckpointLegacy[F[_]: Concurrent: Timer: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validateBlockCheckpointLegacy[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[BlockProcessing[Boolean]] = validateBlockCheckpoint(block).map(_._2) - private def replayBlock[F[_]: Sync: Timer: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( + private def replayBlock[F[_]: Sync: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( initialStateHash: StateHash, block: BlockMessage, rand: Blake2b512Random diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala index e071d68a7d7..25ed91e91ea 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala @@ -2,7 +2,6 @@ package coop.rchain.casper.rholang.syntax import cats.data.EitherT import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.casper.CasperMetricsSource import coop.rchain.casper.protocol.{ @@ -43,6 +42,7 @@ import RuntimeReplaySyntax._ import coop.rchain.casper.rholang.BlockRandomSeed import coop.rchain.casper.syntax._ import coop.rchain.crypto.hash.Blake2b512Random +import cats.effect.Ref trait RuntimeReplaySyntax { implicit final def casperSyntaxRholangRuntimeReplay[F[_]]( diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala index b7c314e08a0..59357880313 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala @@ -2,7 +2,6 @@ package coop.rchain.casper.rholang.syntax import cats.data.{EitherT, OptionT} import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import cats.{Functor, Monad} import com.google.protobuf.ByteString @@ -48,6 +47,7 @@ import coop.rchain.rspace.hashing.{Blake2b256Hash, StableHashProvider} import coop.rchain.rspace.history.History.emptyRootHash import coop.rchain.rspace.merger.EventLogMergingLogic.NumberChannelsEndVal import coop.rchain.shared.{Base16, Log} +import cats.effect.Ref trait RuntimeSyntax { implicit final def casperSyntaxRholangRuntime[F[_]]( diff --git a/casper/src/main/scala/coop/rchain/casper/state/instances/ProposerState.scala b/casper/src/main/scala/coop/rchain/casper/state/instances/ProposerState.scala index 9de9419a082..fd0aeeb2297 100644 --- a/casper/src/main/scala/coop/rchain/casper/state/instances/ProposerState.scala +++ b/casper/src/main/scala/coop/rchain/casper/state/instances/ProposerState.scala @@ -1,8 +1,8 @@ package coop.rchain.casper.state.instances -import cats.effect.concurrent.Deferred import coop.rchain.casper.blocks.proposer.ProposeResult import coop.rchain.casper.protocol.BlockMessage +import cats.effect.Deferred final case class ProposerState[F[_]]( latestProposeResult: Option[(ProposeResult, Option[BlockMessage])] = None, diff --git a/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala b/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala index 7c3ba2f8d1d..07fae97855f 100644 --- a/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala +++ b/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.util -import cats.effect.{Blocker, ContextShift, Sync} +import cats.effect.Sync import cats.syntax.all._ import coop.rchain.crypto.PublicKey import coop.rchain.crypto.signatures.Secp256k1 @@ -9,6 +9,7 @@ import coop.rchain.models.syntax._ import fs2.{io, text, Pipe, Stream} import java.nio.file.Path +import cats.effect.Resource object BondsParser { @@ -20,7 +21,7 @@ object BondsParser { * - https://typelevel.org/cats-effect/docs/migration-guide#blocker */ def parse[F[_]: Sync: ContextShift: Log](bondsPath: Path): F[Map[PublicKey, Long]] = { - def readLines(blocker: Blocker) = + def readLines = io.file .readAll[F](bondsPath, blocker, chunkSize = 4096) .through(text.utf8Decode) @@ -58,7 +59,7 @@ object BondsParser { case ex: Throwable => new Exception(s"FAILED PARSING BONDS FILE: $bondsPath\n$ex") } - Blocker[F].use(readLines) + Resource.unit[F].use(readLines) } def parse[F[_]: Sync: ContextShift: Log]( @@ -67,7 +68,7 @@ object BondsParser { ): F[Map[PublicKey, Long]] = { val bondsPath = Path.of(bondsPathStr) - def readLines(blocker: Blocker) = + def readLines = io.file .exists(blocker, bondsPath) .ifM( @@ -75,7 +76,7 @@ object BondsParser { Log[F].warn(s"BONDS FILE NOT FOUND: $bondsPath. Creating file with random bonds.") >> newValidators[F](autogenShardSize, bondsPath.toAbsolutePath) ) - Blocker[F].use(readLines) + Resource.unit[F].use(readLines) } private def newValidators[F[_]: Sync: ContextShift: Log]( @@ -89,11 +90,11 @@ object BondsParser { val (_, pubKeys) = keys.unzip val bonds = pubKeys.iterator.zipWithIndex.toMap.mapValues(_.toLong + 1L) - def toFile(filePath: Path, blocker: Blocker): Pipe[F, String, Unit] = + def toFile(filePath: Path): Pipe[F, String, Unit] = _.through(text.utf8Encode).through(io.file.writeAll(filePath, blocker)) // Write generated `.sk` files with private key as content - def writeSkFiles(blocker: Blocker) = + def writeSkFiles = Stream .fromIterator(keys.iterator) .flatMap { @@ -107,7 +108,7 @@ object BondsParser { .drain // Create bonds file with generated public keys - def writeBondsFile(blocker: Blocker) = { + def writeBondsFile = { val br = System.lineSeparator() val bondsStream = Stream .fromIterator(bonds.iterator) @@ -120,7 +121,7 @@ object BondsParser { } // Write .sk files and bonds file - Blocker[F].use { blocker => + Resource.unit[F].use { blocker => io.file.createDirectories(blocker, genesisFolder) *> writeSkFiles(blocker) *> writeBondsFile(blocker) *> bonds.pure[F] } diff --git a/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala b/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala index 2b6429ad10a..65dc4e58523 100644 --- a/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala +++ b/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.util -import cats.effect.{Blocker, ContextShift, Sync} +import cats.effect.Sync import cats.syntax.all._ import coop.rchain.casper.genesis.contracts.Vault import coop.rchain.rholang.interpreter.util.RevAddress @@ -8,6 +8,7 @@ import coop.rchain.shared.Log import fs2.{io, text} import java.nio.file.Path +import cats.effect.Resource object VaultParser { @@ -19,7 +20,7 @@ object VaultParser { * - https://typelevel.org/cats-effect/docs/migration-guide#blocker */ def parse[F[_]: Sync: ContextShift: Log](vaultsPath: Path): F[Seq[Vault]] = { - def readLines(blocker: Blocker) = + def readLines = io.file .readAll[F](vaultsPath, blocker, chunkSize = 4096) .through(text.utf8Decode) @@ -61,13 +62,13 @@ object VaultParser { case ex: Throwable => new Exception(s"FAILED PARSING WALLETS FILE: $vaultsPath\n$ex") } - Blocker[F].use(readLines) + Resource.unit[F].use(readLines) } def parse[F[_]: Sync: ContextShift: Log](vaultsPathStr: String): F[Seq[Vault]] = { val vaultsPath = Path.of(vaultsPathStr) - def readLines(blocker: Blocker) = + def readLines = io.file .exists(blocker, vaultsPath) .ifM( @@ -76,7 +77,7 @@ object VaultParser { .warn(s"WALLETS FILE NOT FOUND: $vaultsPath. No vaults will be put in genesis block.") .as(Seq.empty[Vault]) ) - Blocker[F].use(readLines) + Resource.unit[F].use(readLines) } private def tryWithMsg[F[_]: Sync, A](f: => A)(failMsg: => String) = diff --git a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala index 86567718964..909550ced4c 100644 --- a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala @@ -2,7 +2,6 @@ package coop.rchain.casper.addblock import cats.Applicative import cats.effect.IO -import cats.effect.concurrent.Deferred import cats.syntax.all._ import coop.rchain.casper._ import coop.rchain.casper.blocks.proposer._ @@ -18,6 +17,7 @@ import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import coop.rchain.shared.RChainScheduler._ +import cats.effect.Deferred class ProposerSpec extends AnyFlatSpec with Matchers with BlockDagStorageFixture { diff --git a/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala index 54e2304798e..bc341e7b325 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.api import cats.effect.{IO, Sync} -import cats.effect.concurrent.Ref import cats.effect.testing.scalatest.AsyncIOSpec import cats.syntax.all._ import com.google.protobuf.ByteString diff --git a/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala index b6333d9d9b6..bc8d8d06801 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.api import cats.effect.{IO, Sync} -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag._ diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala index 0fc2c27d62e..523f1607552 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.batch2 import cats.Applicative -import cats.effect.concurrent.Ref import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala index c6d1079af17..d674b4c69da 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, IO, Timer} +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.engine.LfsBlockRequester.ST @@ -16,6 +15,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ +import cats.effect.{Ref, Temporal} class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2StreamMatchers { @@ -81,7 +81,7 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * @param test test definition */ - def createMock[F[_]: Concurrent: Timer: Log]( + def createMock[F[_]: Concurrent: Temporal: Log]( startBlock: BlockMessage, requestTimeout: FiniteDuration )(test: Mock[F] => F[Unit]): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala index e5252d33839..53eac20f3ad 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.{Concurrent, IO, Timer} +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.engine.LfsTupleSpaceRequester.{ST, StatePartPath} @@ -18,6 +18,7 @@ import scodec.bits.ByteVector import java.nio.ByteBuffer import scala.concurrent.duration.{DurationInt, FiniteDuration} +import cats.effect.Temporal class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2StreamMatchers { @@ -69,7 +70,7 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * @param test test definition */ - def createMock[F[_]: Concurrent: Timer: Log](requestTimeout: FiniteDuration)( + def createMock[F[_]: Concurrent: Temporal: Log](requestTimeout: FiniteDuration)( test: Mock[F] => F[Unit] ): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala index fef6943a2b2..b4fcfba62f5 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.engine import cats.effect.IO -import cats.effect.concurrent.Ref import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.blocks.BlockRetriever @@ -26,6 +25,7 @@ import coop.rchain.shared.Log import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers +import cats.effect.Ref class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { import coop.rchain.shared.RChainScheduler._ diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala index 0f6c0fe7858..fb968b536b0 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.genesis import cats.Parallel -import cats.effect.{Concurrent, ContextShift, IO, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.syntax._ @@ -238,7 +238,6 @@ object GenesisTest { )( implicit genesisPath: Path, runtimeManager: RuntimeManager[IO], - c: Concurrent[IO], log: LogStub[IO] ): IO[BlockMessage] = for { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala index 591645eb831..ddc549cdece 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.{Concurrent, IO, Resource, Timer} +import cats.effect.{Concurrent, IO, Resource} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala index 6e9f0aa3b0f..47ce01eae23 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala @@ -1,8 +1,7 @@ package coop.rchain.casper.helper import cats.Parallel -import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ContextShift, IO, Resource, Sync, Timer} +import cats.effect.{Concurrent, IO, Resource, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -40,8 +39,9 @@ import monix.execution.Scheduler import java.nio.file.Path import scala.concurrent.duration.{FiniteDuration, MILLISECONDS} +import cats.effect.{Deferred, Ref, Temporal} -case class TestNode[F[_]: Concurrent: Timer]( +case class TestNode[F[_]: Concurrent: Temporal]( name: String, local: PeerNode, tle: TransportLayerTestImpl[F], @@ -285,7 +285,7 @@ object TestNode { import scala.concurrent.ExecutionContext.Implicits.global implicit val cs: ContextShift[IO] = IO.contextShift(global) - implicit val t: Timer[IO] = IO.timer(global) + implicit val t: Temporal[IO] = IO.timer(global) def standaloneEff(genesis: GenesisContext): Resource[Effect, TestNode[Effect]] = networkEff( @@ -315,7 +315,7 @@ object TestNode { ) } - private def networkF[F[_]: Concurrent: Parallel: ContextShift: Timer: TestNetwork]( + private def networkF[F[_]: Concurrent: Parallel: ContextShift: Temporal: TestNetwork]( sks: IndexedSeq[PrivateKey], genesis: BlockMessage, storageMatrixPath: Path, @@ -373,7 +373,7 @@ object TestNode { } } - private def createNode[F[_]: Concurrent: Timer: Parallel: ContextShift: TestNetwork]( + private def createNode[F[_]: Concurrent: Temporal: Parallel: ContextShift: TestNetwork]( name: String, currentPeerNode: PeerNode, genesis: BlockMessage, diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala index 0faf5ac82b9..03d017ba109 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala @@ -1,6 +1,5 @@ package coop.rchain.casper.helper import cats.effect.Concurrent -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.Span @@ -10,6 +9,7 @@ import coop.rchain.models.rholang.implicits._ import coop.rchain.models.{ETuple, Expr, ListParWithRandom, Par} import coop.rchain.rholang.interpreter.ContractCall import coop.rchain.rholang.interpreter.SystemProcesses.ProcessContext +import cats.effect.Ref object IsAssert { def unapply( diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala index b22f50b3327..f7cf9824d4f 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper import cats.Parallel -import cats.effect.{Concurrent, ContextShift, Resource} +import cats.effect.{Concurrent, Resource} import coop.rchain.metrics.{Metrics, Span} import coop.rchain.rholang.Resources.mkRuntimes import coop.rchain.rholang.interpreter.RhoRuntime.RhoHistoryRepository diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala index 8e0bf4b2951..0f028e15195 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.merging import cats.Parallel -import cats.effect.{Concurrent, ContextShift, IO} +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.rholang.Resources diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala index 36e6b895d4a..67ff59545bf 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.rholang import cats.Parallel -import cats.effect.{Concurrent, ContextShift, Resource, Sync} +import cats.effect.{Concurrent, Resource, Sync} import cats.syntax.all._ import coop.rchain.casper.storage.RNodeKeyValueStoreManager.rnodeDbMapping import coop.rchain.metrics diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala index 39041a6b89d..6eff7edb39a 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.sync import cats.effect.IO -import cats.effect.concurrent.Ref import com.google.protobuf.ByteString import coop.rchain.casper.blocks.BlockRetriever import coop.rchain.casper.blocks.BlockRetriever.{RequestState, RequestedBlocks} @@ -23,6 +22,7 @@ import org.scalatest.matchers.should.Matchers import java.util.concurrent.TimeUnit import scala.concurrent.duration._ +import cats.effect.Ref class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala index f56f6af8ce5..8a0948754cd 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.casper.sync import cats.effect.IO -import cats.effect.concurrent.Ref import com.google.protobuf.ByteString import coop.rchain.casper.blocks.BlockRetriever import coop.rchain.casper.blocks.BlockRetriever.{RequestState, RequestedBlocks} @@ -16,6 +15,7 @@ import coop.rchain.shared.{Log, Time} import org.scalatest.BeforeAndAfterEach import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers +import cats.effect.Ref class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { diff --git a/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala b/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala index 482fbf97b60..ffaafde1b25 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/comm/TransportLayerTestImpl.scala @@ -2,7 +2,6 @@ package coop.rchain.casper.util.comm import cats.Monad import cats.effect._ -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.casper.util.comm.TestNetwork.TestNetwork import coop.rchain.comm.CommError.CommErr @@ -14,6 +13,7 @@ import io.grpc.Server import scala.collection.immutable.Queue import scala.concurrent.duration.FiniteDuration +import cats.effect.Ref object TestNetwork { type NodeMessageQueues = Map[PeerNode, Queue[Protocol]] diff --git a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala index e022c3f9075..bfe083b379a 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.util.scalatest -import cats.effect.{ContextShift, IO} +import cats.effect.IO import fs2.Stream import org.scalatest.matchers.{MatchResult, Matcher} diff --git a/comm/src/main/scala/coop/rchain/comm/rp/Connect.scala b/comm/src/main/scala/coop/rchain/comm/rp/Connect.scala index a495e185c08..6f447710e30 100644 --- a/comm/src/main/scala/coop/rchain/comm/rp/Connect.scala +++ b/comm/src/main/scala/coop/rchain/comm/rp/Connect.scala @@ -2,7 +2,6 @@ package coop.rchain.comm.rp import cats._ import cats.effect._ -import cats.effect.concurrent.Ref import cats.mtl._ import cats.syntax.all._ import coop.rchain.comm.CommError._ @@ -15,6 +14,7 @@ import coop.rchain.metrics.implicits._ import coop.rchain.shared._ import scala.util.Random +import cats.effect.Ref object Connect { diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala index 70473951bd7..7826907f905 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala @@ -1,7 +1,6 @@ package coop.rchain.comm.transport import cats.Applicative -import cats.effect.concurrent.{Deferred, Ref} import cats.effect.syntax.all._ import cats.effect.{Concurrent, ConcurrentEffect, Sync} import cats.syntax.all._ @@ -23,6 +22,7 @@ import scala.collection.concurrent.TrieMap import scala.concurrent.ExecutionContext import scala.concurrent.duration.{FiniteDuration, _} import scala.util._ +import cats.effect.{Deferred, Ref} /** * GRPC channel with a message buffer protecting it from resource exhaustion diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala index 6a5fefa0c5e..0ed77425eb3 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala @@ -1,7 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ConcurrentEffect, ContextShift, Resource, Sync, Timer} +import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} import cats.syntax.all._ import cats.effect.syntax.all._ import coop.rchain.comm.protocol.routing._ @@ -21,6 +20,7 @@ import io.netty.internal.tcnative.AsyncTask import scala.collection.concurrent.TrieMap import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt +import cats.effect.{Deferred, Ref, Temporal} object GrpcTransportReceiver { @@ -30,7 +30,7 @@ object GrpcTransportReceiver { type MessageBuffers[F[_]] = (Send => F[Boolean], StreamMessage => F[Boolean], Stream[F, Unit]) type MessageHandlers[F[_]] = (Send => F[Unit], StreamMessage => F[Unit]) - def create[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( + def create[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Temporal]( networkId: String, port: Int, serverSslContext: SslContext, diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala index 21d041da894..162bfcb41c7 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala @@ -1,7 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ConcurrentEffect, ContextShift, Resource, Sync, Timer} +import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} import cats.syntax.all._ import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.comm.protocol.routing.Protocol @@ -19,6 +18,7 @@ import java.nio.file.Path import scala.collection.concurrent.TrieMap import scala.io.Source import scala.util.{Left, Right, Using} +import cats.effect.{Deferred, Ref, Temporal} trait TransportLayerServer[F[_]] { def resource( @@ -38,7 +38,7 @@ object TransportLayerServer { } } -class GrpcTransportServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( +class GrpcTransportServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Temporal]( networkId: String, port: Int, cert: String, @@ -108,7 +108,7 @@ class GrpcTransportServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Me object GrpcTransportServer { - def acquireServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Timer]( + def acquireServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Temporal]( networkId: String, port: Int, certPath: Path, diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala index 1681a9c1f70..49a6c777f3b 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.discovery import cats.Applicative -import cats.effect.{ContextShift, IO, Resource, Sync, Timer} +import cats.effect.{IO, Resource, Sync} import cats.mtl.DefaultApplicativeAsk import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala index a36f787f871..97ff0d9aca3 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.discovery import cats._ -import cats.effect.{Resource, Sync, Timer} +import cats.effect.{Resource, Sync} import cats.syntax.all._ import coop.rchain.comm._ import io.grpc @@ -10,8 +10,9 @@ import java.net.ServerSocket import scala.collection.mutable import scala.concurrent.duration._ import scala.util.{Try, Using} +import cats.effect.Temporal -abstract class KademliaRPCRuntime[F[_]: Sync: Timer, E <: Environment] { +abstract class KademliaRPCRuntime[F[_]: Sync: Temporal, E <: Environment] { def createEnvironment(port: Int): F[E] @@ -117,24 +118,24 @@ trait Environment { def port: Int } -abstract class Handler[F[_]: Monad: Timer, R] { +abstract class Handler[F[_]: Monad: Temporal, R] { def received: Seq[(PeerNode, R)] = receivedMessages protected val receivedMessages: mutable.MutableList[(PeerNode, R)] = mutable.MutableList.empty[(PeerNode, R)] } -final class PingHandler[F[_]: Monad: Timer]( +final class PingHandler[F[_]: Monad: Temporal]( delay: Option[FiniteDuration] = None ) extends Handler[F, PeerNode] { def handle(peer: PeerNode): PeerNode => F[Unit] = p => for { _ <- receivedMessages.synchronized(receivedMessages += ((peer, p))).pure[F] - _ <- delay.fold(().pure[F])(implicitly[Timer[F]].sleep) + _ <- delay.fold(().pure[F])(implicitly[Temporal[F]].sleep) } yield () } -final class LookupHandler[F[_]: Monad: Timer]( +final class LookupHandler[F[_]: Monad: Temporal]( response: Seq[PeerNode], delay: Option[FiniteDuration] = None ) extends Handler[F, (PeerNode, Array[Byte])] { @@ -142,21 +143,21 @@ final class LookupHandler[F[_]: Monad: Timer]( (p, a) => for { _ <- receivedMessages.synchronized(receivedMessages += ((peer, (p, a)))).pure[F] - _ <- delay.fold(().pure[F])(implicitly[Timer[F]].sleep) + _ <- delay.fold(().pure[F])(implicitly[Temporal[F]].sleep) } yield response } object Handler { - def pingHandler[F[_]: Monad: Timer]: PingHandler[F] = new PingHandler[F] + def pingHandler[F[_]: Monad: Temporal]: PingHandler[F] = new PingHandler[F] - def pingHandlerWithDelay[F[_]: Monad: Timer](delay: FiniteDuration): PingHandler[F] = + def pingHandlerWithDelay[F[_]: Monad: Temporal](delay: FiniteDuration): PingHandler[F] = new PingHandler[F](Some(delay)) - def lookupHandlerNil[F[_]: Monad: Timer]: LookupHandler[F] = new LookupHandler[F](Nil) + def lookupHandlerNil[F[_]: Monad: Temporal]: LookupHandler[F] = new LookupHandler[F](Nil) - def lookupHandlerWithDelay[F[_]: Monad: Timer](delay: FiniteDuration): LookupHandler[F] = + def lookupHandlerWithDelay[F[_]: Monad: Temporal](delay: FiniteDuration): LookupHandler[F] = new LookupHandler[F](Nil, Some(delay)) - def lookupHandler[F[_]: Monad: Timer](result: Seq[PeerNode]): LookupHandler[F] = + def lookupHandler[F[_]: Monad: Temporal](result: Seq[PeerNode]): LookupHandler[F] = new LookupHandler[F](result) } diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala index b1d6703ff02..b66e3d0e88b 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.discovery -import cats.effect.{Sync, Timer} +import cats.effect.Sync import cats.syntax.all._ import coop.rchain.comm.{NodeIdentifier, PeerNode} import org.scalatest.matchers.should.Matchers @@ -8,8 +8,9 @@ import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ import scala.util.Random +import cats.effect.Temporal -abstract class KademliaRPCSpec[F[_]: Sync: Timer, E <: Environment] +abstract class KademliaRPCSpec[F[_]: Sync: Temporal, E <: Environment] extends KademliaRPCRuntime[F, E] with AnyWordSpecLike with Matchers { diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala index 9feb9eb6915..76a8173072e 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala @@ -1,6 +1,5 @@ package coop.rchain.comm.rp -import cats.effect.concurrent.Ref import cats.{catsInstancesForId => _, _} import coop.rchain.catscontrib.effect.implicits._ import coop.rchain.catscontrib.ski._ @@ -16,6 +15,7 @@ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ +import cats.effect.Ref class ClearConnectionsSpec extends AnyFunSpec diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala index 5c9db474c52..fea7aa68022 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala @@ -1,6 +1,5 @@ package coop.rchain.comm.rp -import cats.effect.concurrent.Ref import cats.{catsInstancesForId => _, _} import coop.rchain.catscontrib.effect.implicits._ import coop.rchain.catscontrib.ski._ @@ -16,6 +15,7 @@ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{FiniteDuration, MILLISECONDS} +import cats.effect.Ref class ConnectSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with AppendedClues { diff --git a/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala index e9fe0cd872b..d2e1ced1a22 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala @@ -1,6 +1,5 @@ package coop.rchain.comm.rp -import cats.effect.concurrent.Ref import cats.syntax.all._ import cats.{catsInstancesForId => _, _} import org.scalatest.funspec.AnyFunSpec @@ -16,6 +15,7 @@ import coop.rchain.shared._ import org.scalatest._ import scala.concurrent.duration._ +import cats.effect.Ref class FindAndConnectSpec extends AnyFunSpec diff --git a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala index 401c340b22f..faec523ba9b 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.comm.rp import cats.effect.{Concurrent, IO} -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.comm._ import coop.rchain.comm.rp.Connect._ @@ -13,6 +12,7 @@ import fs2.concurrent.Queue import org.scalatest.flatspec.AnyFlatSpec import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import RChainScheduler._ +import cats.effect.Ref class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChecks { diff --git a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala index 92618c2ddde..55e71ac3d3b 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.{ContextShift, IO} +import cats.effect.IO import com.google.protobuf.ByteString import coop.rchain.comm.CommError._ import coop.rchain.comm._ diff --git a/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala index 42870770c72..4ea5c74d731 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.{ContextShift, IO} +import cats.effect.IO import com.google.protobuf.ByteString import coop.rchain.comm.protocol.routing._ import org.scalacheck.Gen diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala index dbd9669332e..a783a6a42df 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.transport -import cats.effect.{IO, Sync, Timer} -import cats.effect.concurrent.{Deferred, MVar, Ref} +import cats.effect.{IO, Sync} +import cats.effect.concurrent.MVar import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.crypto.util.{CertificateHelper, CertificatePrinter} @@ -9,6 +9,7 @@ import coop.rchain.metrics.Metrics import coop.rchain.p2p.EffectsTestInstances._ import coop.rchain.shared.RChainScheduler._ import coop.rchain.shared.{Base16, Log} +import cats.effect.{Deferred, Ref} class TcpTransportLayerSpec extends TransportLayerSpec[IO, TcpTlsEnvironment] { diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala index 2d8564f73e2..81d26b74752 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala @@ -2,7 +2,7 @@ package coop.rchain.comm.transport import cats._ import cats.effect.concurrent.MVar2 -import cats.effect.{Sync, Timer} +import cats.effect.Sync import cats.syntax.all._ import coop.rchain.catscontrib.ski._ import coop.rchain.comm.CommError.CommErr @@ -14,8 +14,9 @@ import java.net.ServerSocket import scala.collection.mutable import scala.concurrent.duration._ import scala.util.{Try, Using} +import cats.effect.Temporal -abstract class TransportLayerRuntime[F[_]: Sync: Timer, E <: Environment] { +abstract class TransportLayerRuntime[F[_]: Sync: Temporal, E <: Environment] { val networkId = "test" @@ -91,7 +92,7 @@ abstract class TransportLayerRuntime[F[_]: Sync: Timer, E <: Environment] { for { r <- execute(localTl, local, remote) _ <- if (blockUntilDispatched) cb.waitUntilDispatched() - else implicitly[Timer[F]].sleep(1.second) + else implicitly[Temporal[F]].sleep(1.second) } yield r } } yield new TwoNodesResult { @@ -179,9 +180,9 @@ abstract class TransportLayerRuntime[F[_]: Sync: Timer, E <: Environment] { for { r <- execute(localTl, local, remote1, remote2) _ <- if (blockUntilDispatched) cb1.waitUntilDispatched() - else implicitly[Timer[F]].sleep(1.second) + else implicitly[Temporal[F]].sleep(1.second) _ <- if (blockUntilDispatched) cb2.waitUntilDispatched() - else implicitly[Timer[F]].sleep(1.second) + else implicitly[Temporal[F]].sleep(1.second) } yield r } } yield new ThreeNodesResult { @@ -233,7 +234,7 @@ final class DispatcherCallback[F[_]: Functor](state: MVar2[F, Unit]) { def waitUntilDispatched(): F[Unit] = state.take } -final class Dispatcher[F[_]: Monad: Timer, R, S]( +final class Dispatcher[F[_]: Monad: Temporal, R, S]( response: PeerNode => S, delay: Option[FiniteDuration] = None, ignore: R => Boolean = kp(false) @@ -241,7 +242,7 @@ final class Dispatcher[F[_]: Monad: Timer, R, S]( def dispatch(peer: PeerNode, callback: DispatcherCallback[F]): R => F[S] = p => for { - _ <- delay.fold(().pure[F])(implicitly[Timer[F]].sleep) + _ <- delay.fold(().pure[F])(implicitly[Temporal[F]].sleep) _ = if (!ignore(p)) receivedMessages.synchronized(receivedMessages += ((peer, p))) r = response(peer) _ <- callback.notifyThatDispatched() @@ -253,19 +254,20 @@ final class Dispatcher[F[_]: Monad: Timer, R, S]( object Dispatcher { - def withoutMessageDispatcher[F[_]: Monad: Timer]: Dispatcher[F, Protocol, CommunicationResponse] = + def withoutMessageDispatcher[F[_]: Monad: Temporal] + : Dispatcher[F, Protocol, CommunicationResponse] = new Dispatcher[F, Protocol, CommunicationResponse]( _ => CommunicationResponse.handledWithoutMessage, ignore = _.message.isDisconnect ) - def internalCommunicationErrorDispatcher[F[_]: Monad: Timer] + def internalCommunicationErrorDispatcher[F[_]: Monad: Temporal] : Dispatcher[F, Protocol, CommunicationResponse] = new Dispatcher[F, Protocol, CommunicationResponse]( _ => CommunicationResponse.notHandled(InternalCommunicationError("Test")), ignore = _.message.isDisconnect ) - def devNullPacketDispatcher[F[_]: Monad: Timer]: Dispatcher[F, Blob, Unit] = + def devNullPacketDispatcher[F[_]: Monad: Temporal]: Dispatcher[F, Blob, Unit] = new Dispatcher[F, Blob, Unit](response = kp(())) } diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala index f26f22b1de5..51ce38dfc6f 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala @@ -1,6 +1,5 @@ package coop.rchain.comm.transport -import cats.effect.Timer import cats.effect.Sync import com.google.protobuf.ByteString import coop.rchain.comm.CommError.CommErr @@ -11,8 +10,9 @@ import coop.rchain.comm.syntax._ import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike +import cats.effect.Temporal -abstract class TransportLayerSpec[F[_]: Sync: Timer, E <: Environment] +abstract class TransportLayerSpec[F[_]: Sync: Temporal, E <: Environment] extends TransportLayerRuntime[F, E] with AnyWordSpecLike with Matchers diff --git a/graphz/src/main/scala/coop/rchain/graphz/Graphz.scala b/graphz/src/main/scala/coop/rchain/graphz/Graphz.scala index 3ef45bf3f14..c29a914c126 100644 --- a/graphz/src/main/scala/coop/rchain/graphz/Graphz.scala +++ b/graphz/src/main/scala/coop/rchain/graphz/Graphz.scala @@ -1,8 +1,8 @@ package coop.rchain.graphz import cats._ -import cats.effect.concurrent.Ref import cats.syntax.all._ +import cats.effect.Ref trait GraphSerializer[F[_]] { def push(str: String, suffix: String = "\n"): F[Unit] diff --git a/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala b/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala index d9d7d4eb119..2515a9729ee 100644 --- a/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala +++ b/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala @@ -1,11 +1,11 @@ package coop.rchain.graphz import cats.effect.IO -import cats.effect.concurrent.Ref import cats.syntax.all._ import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers +import cats.effect.Ref class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with AppendedClues { diff --git a/node/src/main/scala/coop/rchain/node/Main.scala b/node/src/main/scala/coop/rchain/node/Main.scala index faf1953bde6..7c9962230c7 100644 --- a/node/src/main/scala/coop/rchain/node/Main.scala +++ b/node/src/main/scala/coop/rchain/node/Main.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{ContextShift, IO, Timer} +import cats.effect.IO import coop.rchain.node.configuration._ import coop.rchain.node.effects._ import coop.rchain.node.runtime.NodeMain diff --git a/node/src/main/scala/coop/rchain/node/api/package.scala b/node/src/main/scala/coop/rchain/node/api/package.scala index 91c556fb642..580587b62f3 100644 --- a/node/src/main/scala/coop/rchain/node/api/package.scala +++ b/node/src/main/scala/coop/rchain/node/api/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{Concurrent, ConcurrentEffect, ContextShift, Resource, Sync} +import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc import coop.rchain.node.model.ReplFs2Grpc diff --git a/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala b/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala index cadec92ab95..9cf61a708f9 100644 --- a/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala +++ b/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala @@ -1,9 +1,9 @@ package coop.rchain.node.dag import cats.effect.{Concurrent, Sync} -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.node.dag.implementation.{BlockStatus, NetworkBlockRequester, RNodeDagManager} +import cats.effect.Ref object RNodeStateSetup { diff --git a/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala b/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala index 3a4877e8de7..83bd9fd3631 100644 --- a/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala +++ b/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala @@ -1,9 +1,9 @@ package coop.rchain.node.dag.implementation -import cats.effect.concurrent.Ref import cats.effect.{Concurrent, Sync} import coop.rchain.sdk.block.BlockRequester import fs2.Stream +import cats.effect.Ref object NetworkBlockRequester { def apply[F[_]: Concurrent, B, BId]( diff --git a/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala b/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala index 20590de65cc..e1139501a07 100644 --- a/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala +++ b/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala @@ -1,9 +1,9 @@ package coop.rchain.node.dag.implementation -import cats.effect.concurrent.Ref import cats.effect.{Concurrent, Sync} import coop.rchain.sdk.dag.data.{DagManager, DagView} import fs2.Stream +import cats.effect.Ref object RNodeDagManager { def apply[F[_]: Concurrent, M, MId, S, SId]( diff --git a/node/src/main/scala/coop/rchain/node/effects/package.scala b/node/src/main/scala/coop/rchain/node/effects/package.scala index 7a412407fc0..b7b67615c03 100644 --- a/node/src/main/scala/coop/rchain/node/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/effects/package.scala @@ -1,7 +1,6 @@ package coop.rchain.node -import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Blocker, Concurrent, ConcurrentEffect, ContextShift, IO, Sync} +import cats.effect.{Concurrent, ConcurrentEffect, IO, Sync} import cats.mtl._ import cats.syntax.all._ import cats.{Applicative, Monad, Parallel} @@ -21,6 +20,7 @@ import scala.concurrent.duration._ import scala.io.Source import scala.tools.jline.console._ import scala.util.Using +import cats.effect.{Deferred, Ref} package object effects { diff --git a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala index 63efc9809aa..69704f4fa8e 100644 --- a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala +++ b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala @@ -1,7 +1,7 @@ package coop.rchain.node.instances import cats.effect.Concurrent -import cats.effect.concurrent.{Deferred, MVar, Ref, Semaphore} +import cats.effect.concurrent.MVar import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter import coop.rchain.casper.blocks.proposer._ @@ -10,6 +10,8 @@ import coop.rchain.casper.state.instances.ProposerState import coop.rchain.shared.Log import fs2.Stream import fs2.concurrent.Queue +import cats.effect.{Deferred, Ref} +import cats.effect.std.Semaphore object ProposerInstance { def create[F[_]: Concurrent: Log]( diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala index 26631cc7ee3..ffe0701fb5d 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala @@ -1,7 +1,7 @@ package coop.rchain.node.revvaultexport import cats.Parallel -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala index a4c93614f90..7ef305cbe1e 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala @@ -1,7 +1,7 @@ package coop.rchain.node.revvaultexport.reporting import cats.Parallel -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.dag.DagRepresentation diff --git a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala index 4a2b54d8fee..70525705e3a 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala @@ -1,6 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.{Async, Concurrent, ConcurrentEffect, ContextShift, IO, Resource, Sync, Timer} +import cats.effect.{Async, Concurrent, ConcurrentEffect, IO, Resource, Sync} import cats.syntax.all._ import com.typesafe.config.Config import coop.rchain.casper.protocol.deploy.v1 @@ -36,6 +36,7 @@ import coop.rchain.shared.RChainScheduler._ import scala.concurrent.ExecutionContext import scala.util.{Failure, Success} +import cats.effect.Temporal object NetworkServers { @@ -44,7 +45,7 @@ object NetworkServers { */ // format: off def create[F[_] - /* Execution */ : ConcurrentEffect: Timer: ContextShift + /* Execution */ : ConcurrentEffect: Temporal: ContextShift /* Comm */ : TransportLayer: NodeDiscovery: KademliaStore: RPConfAsk: ConnectionsCell /* Diagnostics */ : Log: Metrics] // format: on ( @@ -131,7 +132,7 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def protocolServer[F[_]: Concurrent: ConcurrentEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Timer]( + def protocolServer[F[_]: Concurrent: ConcurrentEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Temporal]( nodeConf: NodeConf, routingMessageQueue: Queue[F, RoutingMessage] ): Resource[F, Unit] = { @@ -163,7 +164,7 @@ object NetworkServers { grpcEC ) - def webApiServer[F[_]: ContextShift: ConcurrentEffect: Timer: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def webApiServer[F[_]: ContextShift: ConcurrentEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], reportingRoutes: ReportingHttpRoutes[F], @@ -179,7 +180,7 @@ object NetworkServers { reportingRoutes ) - def adminWebApiServer[F[_]: ContextShift: ConcurrentEffect: Timer: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def adminWebApiServer[F[_]: ContextShift: ConcurrentEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], adminWebApi: AdminWebApi[F], @@ -209,7 +210,7 @@ object NetworkServers { } // TODO: check new version of Kamon if supports custom effect - def stop: F[Unit] = Async[F].async { cb => + def stop: F[Unit] = Async[F].async_ { cb => Kamon.stopAllReporters().onComplete { case Success(value) => cb(Right(value)) case Failure(error) => cb(Left(error)) diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala index a0efd1b969b..2e10a8c26c9 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala @@ -51,7 +51,7 @@ object NodeCallCtx { t.runAsync(envToEff(fa))(cb) // Async override def async[A](k: (Either[Throwable, A] => Unit) => Unit): ReaderNodeCallCtx[A] = - c.async(k) + c.async_(k) override def asyncF[A]( k: (Either[Throwable, A] => Unit) => ReaderNodeCallCtx[Unit] ): ReaderNodeCallCtx[A] = diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala index 801e5d94890..f3b8bd1ffbe 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala @@ -1,7 +1,7 @@ package coop.rchain.node.runtime import cats.Parallel -import cats.effect.{ConcurrentEffect, ContextShift, Resource, Sync, Timer} +import cats.effect.{ConcurrentEffect, Resource, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol.client.{DeployRuntime, GrpcDeployService, GrpcProposeService} import coop.rchain.crypto.PrivateKey @@ -23,6 +23,7 @@ import java.nio.file.Path import scala.collection.JavaConverters._ import scala.tools.jline.console.ConsoleReader import scala.tools.jline.console.completer.StringsCompleter +import cats.effect.Temporal object NodeMain { @@ -33,7 +34,7 @@ object NodeMain { * * @param options command line options */ - def startNode[F[_]: ConcurrentEffect: Parallel: ContextShift: Timer: ConsoleIO: Log]( + def startNode[F[_]: ConcurrentEffect: Parallel: ContextShift: Temporal: ConsoleIO: Log]( options: commandline.Options ): F[Unit] = Sync[F].defer { // Create merged configuration from CLI options and config file @@ -86,7 +87,7 @@ object NodeMain { * @param options command line options * @param console console */ - def runCLI[F[_]: Sync: ConcurrentEffect: ConsoleIO: Timer]( + def runCLI[F[_]: Sync: ConcurrentEffect: ConsoleIO: Temporal]( options: commandline.Options ): F[Unit] = { val grpcPort = diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala index 04b23e9ccd6..49752c90a85 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala @@ -1,8 +1,7 @@ package coop.rchain.node.runtime import cats.Parallel -import cats.effect.{ConcurrentEffect, ContextShift, Resource, Sync, Timer} -import cats.effect.concurrent.Ref +import cats.effect.{ConcurrentEffect, Resource, Sync} import cats.mtl._ import cats.syntax.all._ import com.typesafe.config.Config @@ -27,11 +26,12 @@ import java.util.concurrent.{Executors, ThreadFactory} import java.util.concurrent.atomic.AtomicLong import scala.concurrent.ExecutionContext import scala.concurrent.duration._ +import cats.effect.{Ref, Temporal} object NodeRuntime { type LocalEnvironment[F[_]] = ApplicativeLocal[F, NodeCallCtx] - def start[F[_]: ConcurrentEffect: Parallel: ContextShift: Timer: Log]( + def start[F[_]: ConcurrentEffect: Parallel: ContextShift: Temporal: Log]( nodeConf: NodeConf, kamonConf: Config )(implicit mainEC: ExecutionContext): F[Unit] = { @@ -44,8 +44,8 @@ object NodeRuntime { * although they can be generated with cats.tagless @autoFunctorK macros but support is missing for IntelliJ. * https://github.com/typelevel/cats-tagless/issues/60 (Cheers, Marcin!!) */ - implicit val lg: Log[ReaderNodeCallCtx] = Log[F].mapK(effToEnv) - implicit val tm: Timer[ReaderNodeCallCtx] = Timer[F].mapK(effToEnv) + implicit val lg: Log[ReaderNodeCallCtx] = Log[F].mapK(effToEnv) + implicit val tm: Temporal[ReaderNodeCallCtx] = Temporal[F].mapK(effToEnv) for { id <- NodeEnvironment.create[F](nodeConf) @@ -75,7 +75,7 @@ object NodeRuntime { } yield () } -class NodeRuntime[F[_]: ConcurrentEffect: Parallel: Timer: ContextShift: LocalEnvironment: Log] private[node] ( +class NodeRuntime[F[_]: ConcurrentEffect: Parallel: Temporal: ContextShift: LocalEnvironment: Log] private[node] ( nodeConf: NodeConf, kamonConf: Config, id: NodeIdentifier @@ -178,7 +178,7 @@ class NodeRuntime[F[_]: ConcurrentEffect: Parallel: Timer: ContextShift: LocalEn for { _ <- NodeDiscovery[F].discover _ <- Connect.findAndConnect[F](Connect.connect[F]) - _ <- Timer[F].sleep(20.seconds) + _ <- Temporal[F].sleep(20.seconds) } yield () } @@ -190,7 +190,7 @@ class NodeRuntime[F[_]: ConcurrentEffect: Parallel: Timer: ContextShift: LocalEn for { _ <- dynamicIpCheck(nodeConf).whenA(nodeConf.protocolServer.dynamicIp) _ <- Connect.clearConnections[F] - _ <- Timer[F].sleep(10.minutes) + _ <- Temporal[F].sleep(10.minutes) } yield () } diff --git a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala index efda1cd327a..33d1072df2c 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala @@ -1,7 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.concurrent.{Deferred, Ref} -import cats.effect.{Concurrent, ContextShift, Timer} +import cats.effect.Concurrent import cats.mtl.ApplicativeAsk import cats.syntax.all._ import cats.{Parallel, Show} @@ -46,9 +45,10 @@ import coop.rchain.store.KeyValueStoreManager import fs2.Stream import fs2.concurrent.Queue import monix.execution.Scheduler +import cats.effect.{Deferred, Ref, Temporal} object Setup { - def setupNodeProgram[F[_]: Concurrent: Parallel: ContextShift: Timer: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( + def setupNodeProgram[F[_]: Concurrent: Parallel: ContextShift: Temporal: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( storeManager: KeyValueStoreManager[F], rpConnections: ConnectionsCell[F], rpConfAsk: ApplicativeAsk[F, RPConf], @@ -67,7 +67,7 @@ object Setup { ] = { // TODO: temporary until Time is removed completely // https://github.com/rchain/rchain/issues/3730 - implicit val time = Time.fromTimer(Timer[F]) + implicit val time = Time.fromTimer(Temporal[F]) for { // Block execution tracker diff --git a/node/src/main/scala/coop/rchain/node/web/Transaction.scala b/node/src/main/scala/coop/rchain/node/web/Transaction.scala index ab19a0ef7ff..fd50c08527b 100644 --- a/node/src/main/scala/coop/rchain/node/web/Transaction.scala +++ b/node/src/main/scala/coop/rchain/node/web/Transaction.scala @@ -1,6 +1,5 @@ package coop.rchain.node.web -import cats.effect.concurrent.Deferred import cats.effect.Concurrent import cats.syntax.all._ import coop.rchain.casper.api.BlockReportApi @@ -22,6 +21,7 @@ import scodec.codecs.utf8 import scala.collection.concurrent.TrieMap import scala.language.higherKinds +import cats.effect.Deferred final case class Transaction( fromAddr: String, diff --git a/node/src/main/scala/coop/rchain/node/web/package.scala b/node/src/main/scala/coop/rchain/node/web/package.scala index fe00c67a814..0c4df641b96 100644 --- a/node/src/main/scala/coop/rchain/node/web/package.scala +++ b/node/src/main/scala/coop/rchain/node/web/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{ConcurrentEffect, ContextShift, Resource, Sync, Timer} +import cats.effect.{ConcurrentEffect, Resource, Sync} import cats.syntax.all._ import coop.rchain.comm.discovery.NodeDiscovery import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfAsk} @@ -17,6 +17,7 @@ import org.http4s.server.blaze.BlazeServerBuilder import org.http4s.server.middleware.CORS import scala.concurrent.duration.{DurationInt, FiniteDuration} +import cats.effect.Temporal package object web { // https://github.com/http4s/http4s/security/advisories/GHSA-52cf-226f-rhr6 @@ -24,7 +25,7 @@ package object web { def corsPolicy[F[_]: Sync](routes: HttpRoutes[F]) = CORS(routes, CORS.DefaultCORSConfig.copy(allowCredentials = false)) - def acquireHttpServer[F[_]: ContextShift: ConcurrentEffect: Timer: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( + def acquireHttpServer[F[_]: ContextShift: ConcurrentEffect: Temporal: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( reporting: Boolean, host: String = "0.0.0.0", httpPort: Int, @@ -59,7 +60,7 @@ package object web { .resource } - def acquireAdminHttpServer[F[_]: ContextShift: ConcurrentEffect: Timer: Log]( + def acquireAdminHttpServer[F[_]: ContextShift: ConcurrentEffect: Temporal: Log]( host: String = "0.0.0.0", httpPort: Int, connectionIdleTimeout: FiniteDuration, diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala b/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala index bd9256f7631..d2981ad2125 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala @@ -1,8 +1,7 @@ package coop.rchain.node.mergeablity import cats.Parallel -import cats.effect.concurrent.Deferred -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.dag.BlockDagKeyValueStorage diff --git a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala index 9db3a139a83..f89131ef09a 100644 --- a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala +++ b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala @@ -1,7 +1,7 @@ package coop.rchain.node.perf import cats.Parallel -import cats.effect.{Concurrent, ContextShift, IO, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.hashing.Blake2b256Hash diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/Interpreter.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/Interpreter.scala index 03c6c0ea2bf..0375e84744f 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/Interpreter.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/Interpreter.scala @@ -1,7 +1,6 @@ package coop.rchain.rholang.interpreter import cats.effect._ -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.implicits._ @@ -15,6 +14,7 @@ import coop.rchain.rholang.interpreter.errors.{ InterpreterError, OutOfPhlogistonsError } +import cats.effect.Ref final case class EvaluateResult( cost: Cost, diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/Reduce.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/Reduce.scala index 5d857e17bdd..19378731690 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/Reduce.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/Reduce.scala @@ -1,7 +1,6 @@ package coop.rchain.rholang.interpreter import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import cats.{Parallel, Eval => _} import com.google.protobuf.ByteString @@ -29,6 +28,7 @@ import scalapb.GeneratedMessage import scala.collection.SortedSet import scala.collection.immutable.BitSet import scala.util.Try +import cats.effect.Ref /** Reduce is the interface for evaluating Rholang expressions. * diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala index ef4f0a51dad..1b7ff8d004b 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala @@ -2,7 +2,6 @@ package coop.rchain.rholang.interpreter import cats.data.Chain import cats.effect._ -import cats.effect.concurrent.Ref import cats.mtl.FunctorTell import cats.syntax.all._ import cats.{Monad, Parallel} @@ -30,6 +29,7 @@ import coop.rchain.shared.Log import monix.execution.Scheduler import scala.concurrent.ExecutionContext +import cats.effect.Ref trait RhoRuntime[F[_]] extends HasCost[F] { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala index aa34578f28f..a79f0248cba 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter import cats._ -import cats.effect.{Blocker, Concurrent, ContextShift, IO, Sync} +import cats.effect.{Concurrent, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models._ diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala index c236d760e04..db316514214 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala @@ -1,7 +1,6 @@ package coop.rchain.rholang.interpreter import cats.effect.Concurrent -import cats.effect.concurrent.Ref import cats.syntax.all._ import com.google.protobuf.ByteString import com.typesafe.scalalogging.Logger @@ -21,6 +20,7 @@ import coop.rchain.rspace.{ContResult, Result} import coop.rchain.shared.Base16 import scala.util.Try +import cats.effect.Ref //TODO: Make each of the system processes into a case class, // so that implementation is not repetitive. diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala index 4b71e6a8103..168b08f551b 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala @@ -8,6 +8,7 @@ import cats.mtl._ import cats.Monad import coop.rchain.metrics.{Metrics, MetricsSemaphore} +import cats.effect.Ref object CostAccounting { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala index 0cfc48f21cb..e3032ac58d2 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala @@ -3,12 +3,12 @@ package coop.rchain.rholang.interpreter import cats._ import cats.data._ import cats.effect.Sync -import cats.effect.concurrent.Semaphore import cats.syntax.all._ import cats.mtl._ import coop.rchain.catscontrib.ski.kp import coop.rchain.rholang.interpreter.errors.OutOfPhlogistonsError +import cats.effect.std.Semaphore package object accounting extends Costs { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/dispatch.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/dispatch.scala index 962f484daf6..0921fdd3d6d 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/dispatch.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/dispatch.scala @@ -2,12 +2,12 @@ package coop.rchain.rholang.interpreter import cats.Parallel import cats.effect.Sync -import cats.effect.concurrent.Ref import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.models.TaggedContinuation.TaggedCont.{Empty, ParBody, ScalaBodyRef} import coop.rchain.models._ import coop.rchain.rholang.interpreter.RhoRuntime.RhoTuplespace import coop.rchain.rholang.interpreter.accounting._ +import cats.effect.Ref trait Dispatch[M[_], A, K] { def dispatch(continuation: K, dataList: Seq[A]): M[Unit] diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala index 2e9bef73901..b38d02505b6 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala @@ -3,13 +3,13 @@ import cats.mtl.lifting.MonadLayerControl import cats.{~>, Alternative, Applicative, Functor, FunctorFilter, Monad, MonadError, MonoidK} import cats.data.OptionT import cats.effect.Sync -import cats.effect.concurrent.Ref import coop.rchain.catscontrib.MonadTrans import coop.rchain.rholang.interpreter.matcher.StreamT.{SCons, SNil, Step} import scala.collection.immutable.Stream import scala.collection.immutable.Stream.Cons import scala.util.{Left, Right} +import cats.effect.Ref /** * Shamelessly transcribed minimal version of Gabriel Gonzalez's beginner-friendly ListT diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala index 2093d1b13c3..8b204c53783 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala @@ -2,7 +2,6 @@ package coop.rchain.rholang.interpreter.merging import cats.Monad import cats.effect.Concurrent -import cats.effect.concurrent.Ref import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.crypto.hash.Blake2b512Random @@ -20,6 +19,7 @@ import coop.rchain.scodec.codecs import scodec.Codec import scodec.bits.ByteVector import scodec.codecs.{bytes, int64, uint16, variableSizeBytes, vlong} +import cats.effect.Ref object RholangMergingLogic { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/package.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/package.scala index a43282531c0..c71277393d1 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/package.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/package.scala @@ -1,10 +1,10 @@ package coop.rchain.rholang -import cats.effect.Bracket +import cats.effect.MonadCancel package object interpreter { - type _error[F[_]] = Bracket[F, Throwable] + type _error[F[_]] = MonadCancel[F, Throwable] def _error[F[_]](implicit ev: _error[F]): _error[F] = ev diff --git a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala index 8b71b4ef9e9..05f1177c80e 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala @@ -2,7 +2,7 @@ package coop.rchain.rholang import cats.Parallel import cats.effect.ExitCase.Error -import cats.effect.{Concurrent, ContextShift, Resource, Sync} +import cats.effect.{Concurrent, Resource, Sync} import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.metrics.{Metrics, Span} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala index dddfe461f9a..6a1770869e7 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala @@ -2,7 +2,7 @@ package coop.rchain.rholang.interpreter import cats.Parallel import cats.effect.testing.scalatest.AsyncIOSpec -import cats.effect.{Concurrent, ContextShift, IO} +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala index eea3fcf6dca..902cd13e014 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala @@ -2,7 +2,6 @@ package coop.rchain.rholang.interpreter import cats.Parallel import cats.effect.{IO, Sync} -import cats.effect.concurrent.Ref import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.{Metrics, Span} import coop.rchain.models.Expr.ExprInstance.{EVarBody, GString} @@ -24,6 +23,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ +import cats.effect.Ref class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqualsSupport { diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RholangOnlyDispatcher.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RholangOnlyDispatcher.scala index 3c07d0597fc..849366864e7 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RholangOnlyDispatcher.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RholangOnlyDispatcher.scala @@ -2,13 +2,13 @@ package coop.rchain.rholang.interpreter import cats.Parallel import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.models.TaggedContinuation.TaggedCont.{Empty, ParBody, ScalaBodyRef} import coop.rchain.models._ import coop.rchain.rholang.interpreter.RhoRuntime.RhoTuplespace import coop.rchain.rholang.interpreter.accounting._ +import cats.effect.Ref object RholangOnlyDispatcher { diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala index 87b69e0d2ca..0d55f9ead94 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala @@ -3,10 +3,10 @@ package coop.rchain.rholang.interpreter.accounting import cats._ import cats.data._ import cats.effect._ -import cats.effect.concurrent.Ref import cats.syntax.all._ import cats.mtl._ import cats.mtl.implicits._ +import cats.effect.Ref package object utils { diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala index 5ac392770ec..055a807a195 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.bench -import cats.effect.{ContextShift, Sync} +import cats.effect.Sync import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} diff --git a/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala b/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala index 395ca843d8b..92bbeba86ed 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala @@ -2,11 +2,11 @@ package coop.rchain.rspace import cats.syntax.all._ import cats.effect._ -import cats.effect.concurrent.{Deferred, Ref} import coop.rchain.rspace.history.HistoryReaderBase import coop.rchain.rspace.internal._ import scala.collection.immutable.Map +import cats.effect.{Deferred, Ref} trait HotStore[F[_], C, P, A, K] { def getContinuations(channels: Seq[C]): F[Seq[WaitingContinuation[P, K]]] diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala index 473ba8e9af2..2ceaf64ecf7 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala @@ -1,8 +1,7 @@ package coop.rchain.rspace import cats.Applicative -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, ContextShift, Sync} +import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.catscontrib._ @@ -20,6 +19,7 @@ import monix.execution.atomic.AtomicAny import scala.collection.SortedSet import scala.concurrent.{ExecutionContext, SyncVar} import scala.util.Random +import cats.effect.Ref abstract class RSpaceOps[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala index 5c68b076bff..edb1493380f 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala @@ -2,7 +2,6 @@ package coop.rchain.rspace import cats.Parallel import cats.effect._ -import cats.effect.concurrent.Ref import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.metrics.{Metrics, Span} @@ -23,6 +22,7 @@ import monix.execution.atomic.AtomicAny import scala.collection.SortedSet import scala.concurrent.ExecutionContext +import cats.effect.Ref /** * ReportingRspace works exactly like how ReplayRspace works. It can replay the deploy and try to find if the diff --git a/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala b/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala index d56215fb2af..e027f1ed88c 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala @@ -1,6 +1,5 @@ package coop.rchain.rspace.concurrent -import cats.effect.concurrent.Semaphore import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import coop.rchain.catscontrib.ski.kp @@ -8,6 +7,7 @@ import coop.rchain.metrics.Metrics import coop.rchain.metrics.Metrics.Source import scala.collection.concurrent.TrieMap +import cats.effect.std.Semaphore class MultiLock[F[_]: Concurrent: Metrics, K](metricSource: Metrics.Source) { diff --git a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala index 1d5184cb334..3c7c376f96a 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.examples -import cats.effect.{Concurrent, ContextShift, IO} +import cats.effect.{Concurrent, IO} import cats.{Applicative, Id} import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager diff --git a/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala b/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala index 78bac400c21..adc67498b46 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala @@ -1,7 +1,6 @@ package coop.rchain.rspace.merger import cats.effect.Concurrent -import cats.effect.concurrent.Ref import cats.kernel.Monoid import cats.syntax.all._ import coop.rchain.rspace.merger.EventLogMergingLogic.{ @@ -12,6 +11,7 @@ import coop.rchain.rspace.trace.{COMM, Consume, Event, Produce} import coop.rchain.shared.syntax._ import scala.collection.immutable.Set +import cats.effect.Ref final case class EventLogIndex( producesLinear: Set[Produce], diff --git a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala index 85c0136d927..1ffb6a2ba9d 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala @@ -2,7 +2,6 @@ package coop.rchain.rspace.merger import cats.Monoid import cats.effect.Concurrent -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.rspace.hashing.{Blake2b256Hash, StableHashProvider} import coop.rchain.rspace.history.{ColdStoreInstances, DataLeaf, HistoryReaderBinary} @@ -15,6 +14,7 @@ import coop.rchain.shared.Serialize import coop.rchain.shared.syntax._ import fs2.Stream import scodec.bits.ByteVector +import cats.effect.Ref /** * Datum changes are referenced by channel, continuation changes are references by consume. diff --git a/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceStateManagerImpl.scala b/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceStateManagerImpl.scala index 0407b7f4d36..7fa4f2478d0 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceStateManagerImpl.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceStateManagerImpl.scala @@ -1,7 +1,6 @@ package coop.rchain.rspace.state.instances import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.catscontrib.Catscontrib._ import coop.rchain.catscontrib.ski._ diff --git a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala index eeeaaf82379..5252b087f44 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala @@ -1,7 +1,6 @@ package coop.rchain.rspace import cats.effect.IO -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.examples.StringExamples.implicits._ @@ -19,6 +18,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector import coop.rchain.shared.RChainScheduler._ +import cats.effect.Ref class ExportImportTests extends AnyFlatSpec diff --git a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala index 1e5bd57991c..461a9e0ca1c 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala @@ -2,7 +2,6 @@ package coop.rchain.rspace import cats.Parallel import cats.effect.{Concurrent, IO, Sync} -import cats.effect.concurrent.Ref import cats.syntax.all._ import coop.rchain.rspace.examples.StringExamples.{StringsCaptor, _} import coop.rchain.rspace.examples.StringExamples.implicits._ @@ -20,6 +19,7 @@ import scodec.bits.ByteVector import scala.collection.SortedSet import scala.concurrent.duration._ import scala.util.Random +import cats.effect.Ref trait HotStoreSpec[F[_]] extends AnyFlatSpec with Matchers with ScalaCheckDrivenPropertyChecks { diff --git a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala index 906d3265760..f59da743f09 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala @@ -2,7 +2,6 @@ package coop.rchain.rspace import cats.Functor import cats.effect.IO -import cats.effect.concurrent.Ref import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.catscontrib.ski._ @@ -26,6 +25,7 @@ import org.scalatestplus.scalacheck._ import scala.collection.SortedSet import scala.util.Random import scala.util.Random.shuffle +import cats.effect.Ref object SchedulerPools { implicit val global = Scheduler.fixedPool("GlobalPool", 20) diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala index 3ee00536d8c..a691d240b36 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala @@ -2,7 +2,7 @@ package coop.rchain.rspace import cats.Parallel.Aux import cats._ -import cats.effect.{Concurrent, ContextShift, IO} +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import coop.rchain.rspace.examples.AddressBookExample import coop.rchain.rspace.examples.AddressBookExample._ diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala index ab9668d0843..16313b4f04c 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala @@ -1,7 +1,6 @@ package coop.rchain.rspace import cats.effect._ -import cats.effect.concurrent.Ref import cats.syntax.all._ import cats.{Parallel, _} import com.typesafe.scalalogging.Logger @@ -20,6 +19,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.ExecutionContext.Implicits.global +import cats.effect.Ref trait StorageTestsBase[F[_], C, P, A, K] extends AnyFlatSpec with Matchers with OptionValues { type T = ISpace[F, C, P, A, K] diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala index 10a3384b97c..af6545fffca 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala @@ -87,7 +87,7 @@ class MultiLockTest extends AnyFlatSpec with Matchers { } "FunctionalMultiLock" should "not allow concurrent modifications of same keys" in { - import cats.effect.{Concurrent, ContextShift, IO} + import cats.effect.{Concurrent, IO} import cats.implicits._ implicit val metrics: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO] diff --git a/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala b/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala index f5ace49096a..a2e5f8fdc6c 100644 --- a/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala +++ b/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala @@ -1,13 +1,13 @@ package coop.rchain.fs2 -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, Timer} +import cats.effect.Concurrent import cats.syntax.all._ import fs2.Stream import fs2.Stream._ import java.util.concurrent.TimeUnit import scala.concurrent.duration.{FiniteDuration, NANOSECONDS} +import cats.effect.{Ref, Temporal} trait Fs2StreamSyntax { implicit final def sharedSyntaxFs2Stream[F[_], A](stream: Stream[F, A]): Fs2StreamOps[F, A] = @@ -79,9 +79,9 @@ class Fs2StreamOps[F[_], A]( def evalOnIdle[B]( action: F[B], timeout: FiniteDuration - )(implicit c: Concurrent[F], t: Timer[F]): Stream[F, A] = { + )(implicit c: Concurrent[F], t: Temporal[F]): Stream[F, A] = { // Current time in nano seconds - val nanoTime = Timer[F].clock.monotonic(NANOSECONDS) + val nanoTime = Temporal[F].clock.monotonic(NANOSECONDS) // Timeout in nano seconds val timeoutNano = timeout.toNanos @@ -112,7 +112,7 @@ class Fs2StreamOps[F[_], A]( // Stream to execute action when timeout is reached, wait for next checking val nextStream = Stream.eval(elapsed) flatMap { case (sleep, isTimeout) => - Stream.eval(action).whenA(isTimeout) ++ Stream.eval(Timer[F].sleep(sleep)) + Stream.eval(action).whenA(isTimeout) ++ Stream.eval(Temporal[F].sleep(sleep)) } // On each element reset idle timer to current time | run next check recursively diff --git a/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala b/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala index 2a5fbe6661a..a08cc554b4e 100644 --- a/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala +++ b/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala @@ -1,10 +1,10 @@ package coop.rchain.metrics import cats.effect._ -import cats.effect.concurrent.Semaphore import cats.syntax.all._ import coop.rchain.catscontrib.ski.kp +import cats.effect.std.Semaphore class MetricsSemaphore[F[_]: Sync: Metrics]( underlying: Semaphore[F] diff --git a/shared/src/main/scala/coop/rchain/shared/MaybeCell.scala b/shared/src/main/scala/coop/rchain/shared/MaybeCell.scala index 36f8ec0d4ab..432daffcd67 100644 --- a/shared/src/main/scala/coop/rchain/shared/MaybeCell.scala +++ b/shared/src/main/scala/coop/rchain/shared/MaybeCell.scala @@ -1,7 +1,7 @@ package coop.rchain.shared import cats.effect.Sync -import cats.effect.concurrent.Ref import cats.syntax.all._ +import cats.effect.Ref trait MaybeCell[F[_], A] { def get: F[Option[A]] diff --git a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala index 3e27c3e9305..f44c6cd0233 100644 --- a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala +++ b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala @@ -1,6 +1,6 @@ package coop.rchain.shared -import cats.effect.{ContextShift, IO} +import cats.effect.IO import java.util.concurrent.{Executors, ThreadFactory} import java.util.concurrent.atomic.AtomicLong diff --git a/shared/src/main/scala/coop/rchain/shared/Time.scala b/shared/src/main/scala/coop/rchain/shared/Time.scala index ddb234f227f..681e59e184f 100644 --- a/shared/src/main/scala/coop/rchain/shared/Time.scala +++ b/shared/src/main/scala/coop/rchain/shared/Time.scala @@ -2,12 +2,12 @@ package coop.rchain.shared import cats.Monad import cats.data.EitherT -import cats.effect.Timer import cats.tagless._ import coop.rchain.catscontrib.Catscontrib._ import coop.rchain.catscontrib._ import scala.concurrent.duration.{FiniteDuration, MILLISECONDS, NANOSECONDS} +import cats.effect.Temporal // TODO: there is no reason for custom Timer definition, remove it // - for testing TestScheduler (monix) ot TestContext (cats-laws) (TestControl cats.effect 3) should be used @@ -33,7 +33,7 @@ object Time extends TimeInstances { /** * Default implementation from cats [[Timer]] */ - def fromTimer[F[_]](implicit timer: Timer[F]): Time[F] = + def fromTimer[F[_]](implicit timer: Temporal[F]): Time[F] = new Time[F] { def currentMillis: F[Long] = timer.clock.realTime(MILLISECONDS) def nanoTime: F[Long] = timer.clock.monotonic(NANOSECONDS) diff --git a/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala b/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala index 31737f7f3b8..5864c06a38d 100644 --- a/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala +++ b/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala @@ -2,8 +2,8 @@ package coop.rchain.store import cats.Applicative import cats.effect.Concurrent -import cats.effect.concurrent.{Deferred, Ref} import cats.syntax.all._ +import cats.effect.{Deferred, Ref} trait KeyValueCache[F[_], K, V] { def get(key: K, fallback: F[V]): F[V] diff --git a/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala b/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala index 5612f3b5895..4c60682a208 100644 --- a/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala +++ b/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala @@ -1,8 +1,8 @@ package coop.rchain.store import cats.effect.Concurrent -import cats.effect.concurrent.{Deferred, Ref} import cats.syntax.all._ +import cats.effect.{Deferred, Ref} class LazyKeyValueCache[F[_]: Concurrent, K, V] private[LazyKeyValueCache] ( cache: Ref[F, Map[K, Deferred[F, V]]], diff --git a/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala b/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala index 44bce42100c..a2f0c044524 100644 --- a/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala +++ b/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala @@ -1,12 +1,12 @@ package coop.rchain.store -import cats.effect.concurrent.{Deferred, Ref} import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import coop.rchain.shared.Log import coop.rchain.store.LmdbDirStoreManager.{Db, LmdbEnvConfig} import java.nio.file.Path +import cats.effect.{Deferred, Ref} object LmdbDirStoreManager { // TODO: Return instance as Resource with the call to _shutdown_. diff --git a/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala b/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala index 6e1f62fcd8e..6f75740c966 100644 --- a/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala +++ b/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala @@ -3,13 +3,13 @@ package coop.rchain.store import java.nio.ByteBuffer import java.nio.file.{Files, Path} -import cats.effect.concurrent.{Deferred, Ref} import cats.effect.{Concurrent, Sync} import cats.syntax.all._ import coop.rchain.shared.{Log, LogSource} import enumeratum.{Enum, EnumEntry} import org.lmdbjava.ByteBufferProxy.PROXY_SAFE import org.lmdbjava.{DbiFlags, Env, EnvFlags} +import cats.effect.{Deferred, Ref} object LmdbStoreManager { def apply[F[_]: Concurrent: Log](dirPath: Path, maxEnvSize: Long): F[KeyValueStoreManager[F]] = diff --git a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala index e404dc5d595..df8fe12b6b2 100644 --- a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala +++ b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala @@ -1,7 +1,6 @@ package coop.rchain.shared -import cats.effect.concurrent.Ref -import cats.effect.{Concurrent, IO, Timer} +import cats.effect.{Concurrent, IO} import cats.syntax.all._ import coop.rchain.shared.syntax.sharedSyntaxFs2Stream import fs2.Stream @@ -12,18 +11,20 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.util.Success import RChainScheduler._ +import cats.effect.{Ref, Temporal} class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { /** * Creates a Stream of 2 elements creating String "11", if timeout occurs it will insert zeroes e.g. "101" */ - def test[F[_]: Concurrent: Timer](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { st => - val addOne = Stream.eval(st.updateAndGet(_ + "1")) - val pause = Stream.sleep(1.second)(Timer[F]).drain - val addZero = st.update(_ + "0") + def test[F[_]: Concurrent: Temporal](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { + st => + val addOne = Stream.eval(st.updateAndGet(_ + "1")) + val pause = Stream.sleep(1.second)(Temporal[F]).drain + val addZero = st.update(_ + "0") - (addOne ++ pause ++ addOne).evalOnIdle(addZero, timeout).compile.lastOrError + (addOne ++ pause ++ addOne).evalOnIdle(addZero, timeout).compile.lastOrError } // Helper to construct success result diff --git a/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala b/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala index 2093db64b5c..a694953dd3d 100644 --- a/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala +++ b/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala @@ -1,7 +1,7 @@ package coop.rchain.shared import cats.Functor -import cats.effect.{ContextShift, IO} +import cats.effect.IO import cats.syntax.functor._ import monix.execution.Scheduler import org.scalatest.Assertion From d22a958339513a697586fd97bd0e46400410e5eb Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Wed, 5 Apr 2023 22:59:47 +0400 Subject: [PATCH 12/17] Update CE --- project/Dependencies.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 992ca0c88a1..de9d60f5aca 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,10 +4,10 @@ object Dependencies { val osClassifier: String = Detector.detect(Seq("fedora")).osClassifier - val catsVersion = "2.7.0" - val catsEffectVersion = "2.5.4" + val catsVersion = "2.9.0" + val catsEffectVersion = "3.3.14" val catsMtlVersion = "0.7.1" - val fs2Version = "2.5.10" + val fs2Version = "3.6.1" val monixVersion = "3.4.0" val http4sVersion = "0.21.24" val endpointsVersion = "1.4.0" From d3621254aef28b788ae761a9b08cbf1cb84ad232 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Wed, 5 Apr 2023 23:44:42 +0400 Subject: [PATCH 13/17] Concurrent => Async --- .../blockstorage/BlockStoreSyntax.scala | 4 ++-- .../dag/BlockDagStorageSyntax.scala | 4 ++-- .../rchain/casper/MultiParentCasper.scala | 8 ++++---- .../scala/coop/rchain/casper/Validate.scala | 6 +++--- .../coop/rchain/casper/api/BlockApiImpl.scala | 6 +++--- .../rchain/casper/api/BlockReportApi.scala | 6 +++--- .../rchain/casper/blocks/BlockProcessor.scala | 6 +++--- .../rchain/casper/blocks/BlockReceiver.scala | 6 +++--- .../casper/blocks/proposer/BlockCreator.scala | 4 ++-- .../casper/blocks/proposer/Proposer.scala | 9 ++++----- .../casper/dag/BlockDagKeyValueStorage.scala | 8 ++++---- .../casper/engine/LfsBlockRequester.scala | 4 ++-- .../engine/LfsTupleSpaceRequester.scala | 4 ++-- .../rchain/casper/engine/NodeLaunch.scala | 8 ++++---- .../rchain/casper/engine/NodeRunning.scala | 6 +++--- .../rchain/casper/engine/NodeSyncing.scala | 6 +++--- .../coop/rchain/casper/genesis/Genesis.scala | 4 ++-- .../rchain/casper/merging/BlockIndex.scala | 8 ++++---- .../casper/merging/DeployChainIndex.scala | 4 ++-- .../rchain/casper/merging/DeployIndex.scala | 4 ++-- .../rchain/casper/merging/MergeScope.scala | 6 +++--- .../rchain/casper/protocol/CommUtil.scala | 2 +- .../protocol/client/DeployService.scala | 4 ++-- .../protocol/client/ProposeService.scala | 4 ++-- .../casper/reporting/ReportingCasper.scala | 8 ++++---- .../casper/rholang/InterpreterUtil.scala | 8 ++++---- .../casper/rholang/RuntimeManager.scala | 8 ++++---- .../rholang/syntax/RuntimeManagerSyntax.scala | 6 +++--- .../storage/RNodeKeyValueStoreManager.scala | 4 ++-- .../casper/api/BondedStatusAPITest.scala | 6 +++--- .../casper/api/ExploratoryDeployAPITest.scala | 4 ++-- .../batch2/BlockReceiverEffectsSpec.scala | 4 ++-- .../casper/batch2/LmdbKeyValueStoreSpec.scala | 4 ++-- .../engine/LfsBlockRequesterEffectsSpec.scala | 4 ++-- .../engine/LfsStateRequesterEffectsSpec.scala | 4 ++-- .../rchain/casper/genesis/GenesisTest.scala | 16 ++++++++-------- .../casper/helper/BlockApiFixture.scala | 6 +++--- .../helper/BlockDagStorageFixture.scala | 4 ++-- .../casper/helper/BlockDataContract.scala | 4 ++-- .../rchain/casper/helper/BlockGenerator.scala | 6 +++--- .../casper/helper/DeployerIdContract.scala | 4 ++-- .../casper/helper/RhoLoggerContract.scala | 4 ++-- .../coop/rchain/casper/helper/RhoSpec.scala | 4 ++-- .../casper/helper/Secp256k1SignContract.scala | 4 ++-- .../casper/helper/SysAuthTokenContract.scala | 4 ++-- .../coop/rchain/casper/helper/TestNode.scala | 10 +++++----- .../casper/helper/TestResultCollector.scala | 6 +++--- .../rchain/casper/helper/TestRhoRuntime.scala | 4 ++-- .../merging/MergeNumberChannelSpec.scala | 4 ++-- .../casper/rholang/InterpreterUtilTest.scala | 4 ++-- .../rchain/casper/rholang/Resources.scala | 8 ++++---- .../comm/discovery/GrpcKademliaRPC.scala | 4 ++-- .../coop/rchain/comm/discovery/package.scala | 4 ++-- .../comm/transport/GrpcTransportClient.scala | 6 +++--- .../transport/GrpcTransportReceiver.scala | 6 +++--- .../comm/transport/GrpcTransportServer.scala | 6 +++--- .../comm/transport/StreamObservable.scala | 6 +++--- .../comm/rp/HandleProtocolHandshakeSpec.scala | 4 ++-- .../rchain/node/api/DeployGrpcServiceV1.scala | 4 ++-- .../scala/coop/rchain/node/api/package.scala | 6 +++--- .../rchain/node/dag/RNodeStateSetup.scala | 4 ++-- .../NetworkBlockRequester.scala | 6 +++--- .../dag/implementation/RNodeDagManager.scala | 6 +++--- .../coop/rchain/node/effects/ReplClient.scala | 4 ++-- .../coop/rchain/node/effects/package.scala | 8 ++++---- .../node/instances/ProposerInstance.scala | 4 ++-- .../node/revvaultexport/StateBalances.scala | 4 ++-- .../mainnet1/StateBalanceMain.scala | 2 +- .../reporting/TransactionBalanceMain.scala | 2 +- .../reporting/TransactionBalances.scala | 4 ++-- .../rchain/node/runtime/GrpcServices.scala | 4 ++-- .../rchain/node/runtime/NetworkServers.scala | 16 ++++++++-------- .../rchain/node/runtime/NodeCallCtx.scala | 7 ++++--- .../coop/rchain/node/runtime/NodeMain.scala | 6 +++--- .../rchain/node/runtime/NodeRuntime.scala | 6 +++--- .../coop/rchain/node/runtime/Setup.scala | 4 ++-- .../rchain/node/web/ReportingRoutes.scala | 4 ++-- .../coop/rchain/node/web/Transaction.scala | 12 ++++++------ .../coop/rchain/node/web/WebApiRoutesV1.scala | 8 ++++---- .../scala/coop/rchain/node/web/package.scala | 6 +++--- .../node/mergeablity/ComputeMerge.scala | 4 ++-- .../rchain/node/perf/HistoryGenKeySpec.scala | 10 +++++----- .../revvaultexport/RhoTrieTraverserTest.scala | 4 ++-- .../rholang/interpreter/ContractCall.scala | 4 ++-- .../rholang/interpreter/RhoRuntime.scala | 16 ++++++++-------- .../rholang/interpreter/RholangCLI.scala | 4 ++-- .../rholang/interpreter/SystemProcesses.scala | 6 +++--- .../accounting/CostAccounting.scala | 12 ++++++------ .../merging/RholangMergingLogic.scala | 4 ++-- .../interpreter/storage/StoragePrinter.scala | 6 +++--- .../scala/coop/rchain/rholang/Resources.scala | 10 +++++----- .../interpreter/BigIntNormalizerSpec.scala | 4 ++-- .../accounting/CostAccountingSpec.scala | 2 +- .../coop/rchain/roscala/util/LockedMap.scala | 2 +- .../coop/rchain/roscala/util/syntax.scala | 2 +- .../scala/coop/rchain/rspace/HotStore.scala | 8 ++++---- .../scala/coop/rchain/rspace/RSpace.scala | 10 +++++----- .../scala/coop/rchain/rspace/RSpaceOps.scala | 4 ++-- .../coop/rchain/rspace/ReplayRSpace.scala | 4 ++-- .../coop/rchain/rspace/ReportingRspace.scala | 6 +++--- .../rchain/rspace/concurrent/MultiLock.scala | 6 +++--- .../rspace/concurrent/TwoStepLock.scala | 4 ++-- .../rspace/examples/AddressBookExample.scala | 4 ++-- .../coop/rchain/rspace/history/History.scala | 4 ++-- .../rspace/history/HistoryRepository.scala | 4 ++-- .../history/HistoryRepositoryImpl.scala | 6 +++--- .../instances/RSpaceHistoryReaderImpl.scala | 4 ++-- .../rchain/rspace/merger/EventLogIndex.scala | 4 ++-- .../rchain/rspace/merger/StateChange.scala | 6 +++--- .../rspace/merger/StateChangeMerger.scala | 4 ++-- .../rspace/state/RSpaceExporterSyntax.scala | 4 ++-- .../rchain/rspace/state/RSpaceImporter.scala | 2 +- .../state/exporters/RSpaceExporterDisk.scala | 4 ++-- .../state/instances/RSpaceExporterStore.scala | 6 +++--- .../state/instances/RSpaceImporterStore.scala | 4 ++-- .../coop/rchain/rspace/HotStoreSpec.scala | 4 ++-- .../rchain/rspace/StorageExamplesTests.scala | 2 +- .../coop/rchain/rspace/StorageTestsBase.scala | 4 ++-- .../rspace/concurrent/MultiLockTest.scala | 2 +- .../effect/implicits/package.scala | 2 +- .../coop/rchain/fs2/Fs2StreamSyntax.scala | 19 +++++++++---------- .../rchain/metrics/MetricsSemaphore.scala | 4 ++-- .../coop/rchain/shared/RChainScheduler.scala | 9 +-------- .../rchain/store/LazyAdHocKeyValueCache.scala | 6 +++--- .../coop/rchain/store/LazyKeyValueCache.scala | 9 ++++----- .../rchain/store/LmdbDirStoreManager.scala | 7 +++---- .../coop/rchain/store/LmdbStoreManager.scala | 8 +++----- .../rchain/shared/Fs2ExtensionsSpec.scala | 13 ++++++------- 128 files changed, 355 insertions(+), 368 deletions(-) diff --git a/block-storage/src/main/scala/coop/rchain/blockstorage/BlockStoreSyntax.scala b/block-storage/src/main/scala/coop/rchain/blockstorage/BlockStoreSyntax.scala index 8deb803629e..8d439a3f03e 100644 --- a/block-storage/src/main/scala/coop/rchain/blockstorage/BlockStoreSyntax.scala +++ b/block-storage/src/main/scala/coop/rchain/blockstorage/BlockStoreSyntax.scala @@ -1,6 +1,6 @@ package coop.rchain.blockstorage -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.casper.PrettyPrinter @@ -37,7 +37,7 @@ final class BlockStoreOps[F[_]]( def getUnsafe( hashes: Seq[BlockHash] - )(implicit concurrent: Concurrent[F]): fs2.Stream[F, BlockMessage] = { + )(implicit concurrent: Async[F]): fs2.Stream[F, BlockMessage] = { val streams = hashes.map(h => fs2.Stream.eval(getUnsafe(h))) fs2.Stream .emits(streams) diff --git a/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockDagStorageSyntax.scala b/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockDagStorageSyntax.scala index 2b55d6a186a..ec782a299c7 100644 --- a/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockDagStorageSyntax.scala +++ b/block-storage/src/main/scala/coop/rchain/blockstorage/dag/BlockDagStorageSyntax.scala @@ -1,6 +1,6 @@ package coop.rchain.blockstorage.dag -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter import coop.rchain.casper.protocol.BlockMessage @@ -26,7 +26,7 @@ final class BlockDagStorageOps[F[_]]( def lookupUnsafe( hashes: Seq[BlockHash] - )(implicit concurrent: Concurrent[F]): F[List[BlockMetadata]] = { + )(implicit concurrent: Async[F]): F[List[BlockMetadata]] = { val streams = hashes.map(h => fs2.Stream.eval(lookupUnsafe(h))) fs2.Stream.emits(streams).parJoinUnbounded.compile.toList } diff --git a/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala b/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala index 18df68d69a9..914fc5bbcb4 100644 --- a/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala @@ -1,7 +1,7 @@ package coop.rchain.casper import cats.data.EitherT -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -35,7 +35,7 @@ object MultiParentCasper { // Required to enable protection from re-submitting duplicate deploys val deployLifespan = 50 - def getPreStateForNewBlock[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log] + def getPreStateForNewBlock[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log] : F[ParentsMergedState] = for { dag <- BlockDagStorage[F].getRepresentation @@ -48,7 +48,7 @@ object MultiParentCasper { preState <- getPreStateForParents(parentHashes) } yield preState - def getPreStateForParents[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log]( + def getPreStateForParents[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log]( parentHashes: Set[BlockHash] ): F[ParentsMergedState] = for { @@ -169,7 +169,7 @@ object MultiParentCasper { rejectedDeploys = csRejectedDeploys ) - def validate[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validate[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage, shardId: String, minPhloPrice: Long diff --git a/casper/src/main/scala/coop/rchain/casper/Validate.scala b/casper/src/main/scala/coop/rchain/casper/Validate.scala index 89f2ec5638a..3552d55e59d 100644 --- a/casper/src/main/scala/coop/rchain/casper/Validate.scala +++ b/casper/src/main/scala/coop/rchain/casper/Validate.scala @@ -1,7 +1,7 @@ package coop.rchain.casper import cats.data.EitherT -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import cats.{Applicative, Monad} import com.google.protobuf.ByteString @@ -359,7 +359,7 @@ object Validate { } } yield result - def bondsCache[F[_]: Concurrent: RuntimeManager: Log]( + def bondsCache[F[_]: Async: RuntimeManager: Log]( b: BlockMessage ): F[ValidBlockProcessing] = { val bonds = b.bonds @@ -379,7 +379,7 @@ object Validate { /** * All of deploys must have greater or equal phloPrice then minPhloPrice */ - def phloPrice[F[_]: Log: Concurrent]( + def phloPrice[F[_]: Log: Async]( b: BlockMessage, minPhloPrice: Long ): F[ValidBlockProcessing] = diff --git a/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala b/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala index bd607f93c17..a771f605e8f 100644 --- a/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala +++ b/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.api import cats.data.OptionT -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -47,7 +47,7 @@ import scala.collection.immutable.SortedMap import cats.effect.Ref object BlockApiImpl { - def apply[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( + def apply[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( validatorOpt: Option[ValidatorIdentity], networkId: String, shardId: String, @@ -89,7 +89,7 @@ object BlockApiImpl { final case class BlockRetrievalError(message: String) extends Exception } -class BlockApiImpl[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( +class BlockApiImpl[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( validatorOpt: Option[ValidatorIdentity], networkId: String, shardId: String, diff --git a/casper/src/main/scala/coop/rchain/casper/api/BlockReportApi.scala b/casper/src/main/scala/coop/rchain/casper/api/BlockReportApi.scala index 4a2f1f6d3e2..39f643f06a4 100644 --- a/casper/src/main/scala/coop/rchain/casper/api/BlockReportApi.scala +++ b/casper/src/main/scala/coop/rchain/casper/api/BlockReportApi.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.api import cats.data.EitherT -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -22,7 +22,7 @@ import coop.rchain.shared.syntax._ import scala.collection.concurrent.TrieMap -class BlockReportApi[F[_]: Concurrent: BlockStore: Metrics: Log]( +class BlockReportApi[F[_]: Async: BlockStore: Metrics: Log]( reportingCasper: ReportingCasper[F], reportStore: ReportStore[F], validatorIdentityOpt: Option[ValidatorIdentity] @@ -106,7 +106,7 @@ class BlockReportApi[F[_]: Concurrent: BlockStore: Metrics: Log]( } object BlockReportApi { - def apply[F[_]: Concurrent: BlockStore: Metrics: Log]( + def apply[F[_]: Async: BlockStore: Metrics: Log]( reportingCasper: ReportingCasper[F], reportStore: ReportStore[F], validatorIdentityOpt: Option[ValidatorIdentity] diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala index 3d4701ac31c..c0cd39e5989 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.blocks -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.BlockDagStorage @@ -22,7 +22,7 @@ object BlockProcessor { * - input block must have all dependencies in the DAG * - blocks created by node itself are not processed here, but in Proposer */ - def apply[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( + def apply[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( inputBlocks: Stream[F, BlockMessage], validatedQueue: Queue[F, BlockMessage], shardId: String, @@ -41,7 +41,7 @@ object BlockProcessor { } yield (block, result) } - def validateAndAddToDag[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( + def validateAndAddToDag[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( block: BlockMessage, shardId: String, minPhloPrice: Long diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala index 81ea06b64eb..d9ae1dde5ac 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.blocks import cats.Show -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -176,7 +176,7 @@ final case class BlockReceiverState[MId: Show] private ( } object BlockReceiver { - def apply[F[_]: Concurrent: BlockStore: BlockDagStorage: BlockRetriever: Log]( + def apply[F[_]: Async: BlockStore: BlockDagStorage: BlockRetriever: Log]( state: Ref[F, BlockReceiverState[BlockHash]], incomingBlocksStream: Stream[F, BlockMessage], finishedProcessingStream: Stream[F, BlockMessage], @@ -300,6 +300,6 @@ object BlockReceiver { } } - def notValidated[F[_]: Concurrent: BlockStore: BlockDagStorage](hash: BlockHash): F[Boolean] = + def notValidated[F[_]: Async: BlockStore: BlockDagStorage](hash: BlockHash): F[Boolean] = BlockStore[F].contains(hash) &&^ BlockDagStorage[F].getRepresentation.map(!_.contains(hash)) } diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/BlockCreator.scala b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/BlockCreator.scala index baf48770be3..0a1cf9941b1 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/BlockCreator.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/BlockCreator.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.blocks.proposer -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -23,7 +23,7 @@ import coop.rchain.shared.Log final case class BlockCreator(id: ValidatorIdentity, shardId: String) { type StateTransitionResult = (StateHash, Seq[ProcessedDeploy], Seq[ProcessedSystemDeploy]) - def create[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def create[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( preState: ParentsMergedState, deploys: Seq[DeployId], toSlash: Set[Validator] = Set.empty, diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala index b48a7728adb..d31d7676a5d 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.blocks.proposer import cats.data.OptionT -import cats.effect.Concurrent +import cats.effect.{Async, Deferred, Sync, Temporal} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -23,7 +23,6 @@ import coop.rchain.sdk.consensus.Stake import coop.rchain.sdk.error.FatalError import coop.rchain.shared.syntax._ import coop.rchain.shared.{Log, Time} -import cats.effect.{Deferred, Temporal} sealed abstract class ProposerResult object ProposerEmpty extends ProposerResult @@ -40,7 +39,7 @@ object ProposerResult { def started(seqNumber: Long): ProposerResult = ProposerStarted(seqNumber) } -class Proposer[F[_]: Concurrent: Log: Span]( +class Proposer[F[_]: Async: Log: Span]( getLatestSeqNumber: Validator => F[Long], // propose constraint checkers checkActiveValidator: ValidatorIdentity => F[Boolean], @@ -67,7 +66,7 @@ class Proposer[F[_]: Concurrent: Log: Span]( proposeEffect(b) >> (ProposeResult.success(v), b.some).pure[F] case Left(v) => - Concurrent[F].raiseError[(ProposeResult, Option[BlockMessage])]( + Sync[F].raiseError[(ProposeResult, Option[BlockMessage])]( new Exception( s"Validation of self created block failed with reason: $v, cancelling propose." ) @@ -114,7 +113,7 @@ class Proposer[F[_]: Concurrent: Log: Span]( object Proposer { // format: off def apply[F[_] - /* Execution */ : Concurrent: Temporal: Time + /* Execution */ : Async: Temporal: Time /* Storage */ : BlockStore: BlockDagStorage /* Rholang */ : RuntimeManager /* Comm */ : CommUtil diff --git a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala index 21456db15ef..124ea00b9af 100644 --- a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala +++ b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.dag -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import cats.{Monad, Show} import coop.rchain.blockstorage._ @@ -31,7 +31,7 @@ import scala.collection.concurrent.TrieMap import cats.effect.Ref import cats.effect.std.Semaphore -final class BlockDagKeyValueStorage[F[_]: Concurrent: Log] private ( +final class BlockDagKeyValueStorage[F[_]: Async: Log] private ( representationState: Ref[F, DagRepresentation], lock: Semaphore[F], blockMetadataIndex: BlockMetadataStore[F], @@ -190,7 +190,7 @@ object BlockDagKeyValueStorage { deployPool: KeyValueTypedStore[F, DeployId, Signed[DeployData]] ) - private def createStores[F[_]: Concurrent: Log: Metrics](kvm: KeyValueStoreManager[F]) = { + private def createStores[F[_]: Async: Log: Metrics](kvm: KeyValueStoreManager[F]) = { implicit val kvm_ = kvm for { // Block metadata map @@ -230,7 +230,7 @@ object BlockDagKeyValueStorage { ) } - def create[F[_]: Concurrent: Log: Metrics]( + def create[F[_]: Async: Log: Metrics]( kvm: KeyValueStoreManager[F] ): F[BlockDagKeyValueStorage[F]] = for { diff --git a/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala b/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala index f706b7c165c..173534feedb 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter import coop.rchain.casper.protocol.{BlockMessage, FinalizedFringe} @@ -148,7 +148,7 @@ object LfsBlockRequester { * @param validateBlock Check if received block is valid * @return fs2.Stream processing all blocks */ - def stream[F[_]: Concurrent: Temporal: Log]( + def stream[F[_]: Async: Temporal: Log]( fringe: FinalizedFringe, incomingBlocks: Stream[F, BlockMessage], blockHeightsBeforeFringe: Int, diff --git a/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala b/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala index 298c6961262..f93808bce16 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol._ import coop.rchain.models.syntax._ @@ -94,7 +94,7 @@ object LfsTupleSpaceRequester { * @param validateTupleSpaceItems Check if received statet chunk is valid * @return fs2.Stream processing all tuple space state */ - def stream[F[_]: Concurrent: Temporal: Log]( + def stream[F[_]: Async: Temporal: Log]( fringe: FinalizedFringe, tupleSpaceMessageQueue: Queue[F, StoreItemsMessage], requestForStoreItem: (StatePartPath, Int) => F[Unit], diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala index c5dbda7dbcc..455c73fae7f 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.engine import cats.Parallel -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -35,7 +35,7 @@ object NodeLaunch { // format: off def apply[F[_] - /* Execution */ : Concurrent: Parallel: ContextShift: Time: Temporal + /* Execution */ : Async: Parallel: ContextShift: Time: Temporal /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -140,7 +140,7 @@ object NodeLaunch { } yield () } - def createGenesisBlockFromConfig[F[_]: Concurrent: ContextShift: RuntimeManager: Log]( + def createGenesisBlockFromConfig[F[_]: Async: ContextShift: RuntimeManager: Log]( validator: ValidatorIdentity, conf: CasperConf ): F[BlockMessage] = @@ -162,7 +162,7 @@ object NodeLaunch { conf.genesisBlockData.systemContractPubKey ) - def createGenesisBlock[F[_]: Concurrent: ContextShift: RuntimeManager: Log]( + def createGenesisBlock[F[_]: Async: ContextShift: RuntimeManager: Log]( validator: ValidatorIdentity, shardId: String, blockNumber: Long, diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala index e5184947a92..5eb9e1a456e 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.engine import cats.Monad -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -26,7 +26,7 @@ object NodeRunning { // format: off def apply[F[_] - /* Execution */ : Concurrent: Time + /* Execution */ : Async: Time /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Storage */ : BlockStore: BlockDagStorage: RSpaceStateManager @@ -212,7 +212,7 @@ object NodeRunning { // format: off class NodeRunning[F[_] - /* Execution */ : Concurrent: Time + /* Execution */ : Async: Time /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Storage */ : BlockStore: BlockDagStorage: RSpaceStateManager diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala index 2754109e43e..17293bbd214 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -32,7 +32,7 @@ object NodeSyncing { */ // format: off def apply[F[_] - /* Execution */ : Concurrent: Time: Temporal + /* Execution */ : Async: Time: Temporal /* Transport */ : TransportLayer: CommUtil /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -62,7 +62,7 @@ object NodeSyncing { */ // format: off class NodeSyncing[F[_] - /* Execution */ : Concurrent: Time: Temporal + /* Execution */ : Async: Time: Temporal /* Transport */ : TransportLayer: CommUtil /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager diff --git a/casper/src/main/scala/coop/rchain/casper/genesis/Genesis.scala b/casper/src/main/scala/coop/rchain/casper/genesis/Genesis.scala index b8065e06f06..27a114efd52 100644 --- a/casper/src/main/scala/coop/rchain/casper/genesis/Genesis.scala +++ b/casper/src/main/scala/coop/rchain/casper/genesis/Genesis.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.genesis -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.genesis.contracts._ @@ -57,7 +57,7 @@ object Genesis { StandardDeploys.poSGenerator(posParams, shardId) } - def createGenesisBlock[F[_]: Concurrent: RuntimeManager]( + def createGenesisBlock[F[_]: Async: RuntimeManager]( validator: ValidatorIdentity, genesis: Genesis ): F[BlockMessage] = { diff --git a/casper/src/main/scala/coop/rchain/casper/merging/BlockIndex.scala b/casper/src/main/scala/coop/rchain/casper/merging/BlockIndex.scala index adb5c25ccae..650c7650271 100644 --- a/casper/src/main/scala/coop/rchain/casper/merging/BlockIndex.scala +++ b/casper/src/main/scala/coop/rchain/casper/merging/BlockIndex.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.merging -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -28,7 +28,7 @@ object BlockIndex { // TODO make proper storage for block indices val cache = TrieMap.empty[BlockHash, BlockIndex] - def getBlockIndex[F[_]: Concurrent: RuntimeManager: BlockStore]( + def getBlockIndex[F[_]: Async: RuntimeManager: BlockStore]( blockHash: BlockHash ): F[BlockIndex] = { val cached = BlockIndex.cache.get(blockHash).map(_.pure) @@ -55,7 +55,7 @@ object BlockIndex { } } - def createEventLogIndex[F[_]: Concurrent, C, P, A, K]( + def createEventLogIndex[F[_]: Async, C, P, A, K]( events: List[Event], historyRepository: HistoryRepository[F, C, P, A, K], preStateHash: Blake2b256Hash, @@ -75,7 +75,7 @@ object BlockIndex { ) } yield eventLogIndex - def apply[F[_]: Concurrent, C, P, A, K]( + def apply[F[_]: Async, C, P, A, K]( blockHash: BlockHash, usrProcessedDeploys: List[ProcessedDeploy], sysProcessedDeploys: List[ProcessedSystemDeploy], diff --git a/casper/src/main/scala/coop/rchain/casper/merging/DeployChainIndex.scala b/casper/src/main/scala/coop/rchain/casper/merging/DeployChainIndex.scala index 666e918fc90..0b0b4f60dcc 100644 --- a/casper/src/main/scala/coop/rchain/casper/merging/DeployChainIndex.scala +++ b/casper/src/main/scala/coop/rchain/casper/merging/DeployChainIndex.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.merging -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.rspace.hashing.Blake2b256Hash @@ -36,7 +36,7 @@ object DeployChainIndex { implicit val ord = Ordering.by((x: DeployChainIndex) => (x.hostBlock, x.postStateHash)) - def apply[F[_]: Concurrent, C, P, A, K]( + def apply[F[_]: Async, C, P, A, K]( hostBlock: Blake2b256Hash, deploys: Set[DeployIndex], preStateHash: Blake2b256Hash, diff --git a/casper/src/main/scala/coop/rchain/casper/merging/DeployIndex.scala b/casper/src/main/scala/coop/rchain/casper/merging/DeployIndex.scala index 4af0686cc83..37600059994 100644 --- a/casper/src/main/scala/coop/rchain/casper/merging/DeployIndex.scala +++ b/casper/src/main/scala/coop/rchain/casper/merging/DeployIndex.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.merging -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.protocol.Event @@ -27,7 +27,7 @@ object DeployIndex { val SYS_CLOSE_BLOCK_DEPLOY_ID = ByteString.copyFrom(Array(2.toByte)) val SYS_EMPTY_DEPLOY_ID = ByteString.copyFrom(Array(3.toByte)) - def apply[F[_]: Concurrent]( + def apply[F[_]: Async]( sig: ByteString, cost: Long, events: List[Event], diff --git a/casper/src/main/scala/coop/rchain/casper/merging/MergeScope.scala b/casper/src/main/scala/coop/rchain/casper/merging/MergeScope.scala index 2f0a175eadc..fa1f5fb890d 100644 --- a/casper/src/main/scala/coop/rchain/casper/merging/MergeScope.scala +++ b/casper/src/main/scala/coop/rchain/casper/merging/MergeScope.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.merging -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.dag.{DagRepresentation, Message} @@ -78,7 +78,7 @@ object MergeScope { (MergeScope(fScopeIds, cScopeIds -- baseMsg.toSet), baseMsg) } - def merge[F[_]: Concurrent: Log]( + def merge[F[_]: Async: Log]( mergeScope: MergeScope, baseState: Blake2b256Hash, fringeStates: Map[Set[BlockHash], FringeData], @@ -142,7 +142,7 @@ object MergeScope { } /** Merge set of indices into base state and produce new state. */ - def computeMergedState[F[_]: Concurrent: Log]( + def computeMergedState[F[_]: Async: Log]( toMerge: Set[DeployChainIndex], baseState: Blake2b256Hash, historyRepository: RhoHistoryRepository[F] diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala b/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala index 2dc937c6d63..dd6f4dd3aae 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala @@ -50,7 +50,7 @@ object CommUtil { def apply[F[_]](implicit ev: CommUtil[F]): CommUtil[F] = ev - def of[F[_]: Concurrent: Temporal: TransportLayer: RPConfAsk: ConnectionsCell: Log]: CommUtil[F] = + def of[F[_]: Async: Temporal: TransportLayer: RPConfAsk: ConnectionsCell: Log]: CommUtil[F] = new CommUtil[F] { def sendToPeers(message: Packet, scopeSize: Option[Int]): F[Unit] = diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala index 04b92c85501..15ce62e7fcb 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.protocol.client -import cats.effect.{ConcurrentEffect, Sync} +import cats.effect.{AsyncEffect, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.deploy.v1.{DeployExecStatus, DeployServiceFs2Grpc} @@ -39,7 +39,7 @@ object DeployService { def apply[F[_]](implicit ev: DeployService[F]): DeployService[F] = ev } -class GrpcDeployService[F[_]: Sync: ConcurrentEffect](host: String, port: Int, maxMessageSize: Int) +class GrpcDeployService[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessageSize: Int) extends DeployService[F] with Closeable { diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala index c3958a6fcce..9d1a67be19a 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.protocol.client -import cats.effect.{ConcurrentEffect, Sync} +import cats.effect.{AsyncEffect, Sync} import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.propose.v1._ import coop.rchain.models.either.implicits._ @@ -19,7 +19,7 @@ object ProposeService { def apply[F[_]](implicit ev: ProposeService[F]): ProposeService[F] = ev } -class GrpcProposeService[F[_]: Sync: ConcurrentEffect](host: String, port: Int, maxMessageSize: Int) +class GrpcProposeService[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessageSize: Int) extends ProposeService[F] with Closeable { diff --git a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala index 06871d6fca8..856607ed60e 100644 --- a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.reporting import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.models.syntax._ @@ -83,7 +83,7 @@ object ReportingCasper { type RhoReportingRspace[F[_]] = ReportingRspace[F, Par, BindPattern, ListParWithRandom, TaggedContinuation] - def rhoReporter[F[_]: Concurrent: ContextShift: Parallel: BlockDagStorage: Log: Metrics: Span]( + def rhoReporter[F[_]: Async: ContextShift: Parallel: BlockDagStorage: Log: Metrics: Span]( rspaceStore: RSpaceStore[F], shardId: String ): ReportingCasper[F] = @@ -169,7 +169,7 @@ object ReportingRuntime { implicit val RuntimeMetricsSource: Source = Metrics.Source(RholangMetricsSource, "reportingRuntime") - def createReportingRSpace[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( + def createReportingRSpace[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( store: RSpaceStore[F] ): F[RhoReportingRspace[F]] = { import coop.rchain.rholang.interpreter.storage._ @@ -178,7 +178,7 @@ object ReportingRuntime { ReportingRspace.create[F, Par, BindPattern, ListParWithRandom, TaggedContinuation](store) } - def createReportingRuntime[F[_]: Concurrent: Log: Metrics: Span: Parallel]( + def createReportingRuntime[F[_]: Async: Log: Metrics: Span: Parallel]( reporting: RhoReportingRspace[F], shardId: String, extraSystemProcesses: Seq[Definition[F]] = Seq.empty diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala index eaa14319c78..b2b65b79c54 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -48,7 +48,7 @@ object InterpreterUtil { // TODO: most of this function is legacy code, it should be refactored with separation of errors that are // handled (with included data e.g. hash not equal) and fatal errors which should NOT be handled - def validateBlockCheckpoint[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validateBlockCheckpoint[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[(BlockMetadata, BlockProcessing[Boolean])] = for { @@ -130,7 +130,7 @@ object InterpreterUtil { (bmd, result) } - def validateBlockCheckpointLegacy[F[_]: Concurrent: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validateBlockCheckpointLegacy[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[BlockProcessing[Boolean]] = validateBlockCheckpoint(block).map(_._2) @@ -244,7 +244,7 @@ object InterpreterUtil { Log[F].info(s"Deploy ($deployInfo) errors: ${errors.mkString(", ")}") } - def computeDeploysCheckpoint[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def computeDeploysCheckpoint[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( deploys: Seq[Signed[DeployData]], systemDeploys: Seq[SystemDeploy], rand: Blake2b512Random, diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala index 96d037e024e..4cb326cbcf8 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala @@ -71,7 +71,7 @@ trait RuntimeManager[F[_]] { def getMergeableStore: MergeableStore[F] } -final case class RuntimeManagerImpl[F[_]: Concurrent: Metrics: Span: Log: ContextShift: Parallel]( +final case class RuntimeManagerImpl[F[_]: Async: Metrics: Span: Log: ContextShift: Parallel]( space: RhoISpace[F], replaySpace: RhoReplayISpace[F], historyRepo: RhoHistoryRepository[F], @@ -258,7 +258,7 @@ object RuntimeManager { def apply[F[_]](implicit F: RuntimeManager[F]): F.type = F - def apply[F[_]: Concurrent: ContextShift: Parallel: Metrics: Span: Log]( + def apply[F[_]: Async: ContextShift: Parallel: Metrics: Span: Log]( rSpace: RhoISpace[F], replayRSpace: RhoReplayISpace[F], historyRepo: RhoHistoryRepository[F], @@ -277,7 +277,7 @@ object RuntimeManager { ) ) - def apply[F[_]: Concurrent: ContextShift: Parallel: Metrics: Span: Log]( + def apply[F[_]: Async: ContextShift: Parallel: Metrics: Span: Log]( store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, @@ -288,7 +288,7 @@ object RuntimeManager { _._1 ) - def createWithHistory[F[_]: Concurrent: ContextShift: Parallel: Metrics: Span: Log]( + def createWithHistory[F[_]: Async: ContextShift: Parallel: Metrics: Span: Log]( store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeManagerSyntax.scala b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeManagerSyntax.scala index bbf09d92622..efc63008359 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeManagerSyntax.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeManagerSyntax.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang.syntax -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.rholang.RuntimeManager import coop.rchain.casper.rholang.RuntimeManager.StateHash @@ -64,7 +64,7 @@ final class RuntimeManagerOps[F[_]](private val rm: RuntimeManager[F]) extends A channelsData: Seq[NumberChannelsEndVal], // Used to calculate value difference from final values preStateHash: Blake2b256Hash - )(implicit s: Concurrent[F]): F[Unit] = + )(implicit s: Async[F]): F[Unit] = for { // Calculate difference values from final values on number channels diffs <- convertNumberChannelsToDiff(channelsData, preStateHash) @@ -94,7 +94,7 @@ final class RuntimeManagerOps[F[_]](private val rm: RuntimeManager[F]) extends A channelsData: Seq[NumberChannelsEndVal], // Used to calculate value difference from final values preStateHash: Blake2b256Hash - )(implicit s: Concurrent[F]): F[List[NumberChannelsDiff]] = Sync[F].defer { + )(implicit s: Async[F]): F[List[NumberChannelsDiff]] = Sync[F].defer { // Get number channel value for pre-state val getDataFunc = (ch: Blake2b256Hash) => diff --git a/casper/src/main/scala/coop/rchain/casper/storage/RNodeKeyValueStoreManager.scala b/casper/src/main/scala/coop/rchain/casper/storage/RNodeKeyValueStoreManager.scala index 4e9c1b91668..ee53d19866e 100644 --- a/casper/src/main/scala/coop/rchain/casper/storage/RNodeKeyValueStoreManager.scala +++ b/casper/src/main/scala/coop/rchain/casper/storage/RNodeKeyValueStoreManager.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.storage -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.shared.Log import coop.rchain.store.LmdbDirStoreManager.{gb, tb, Db, LmdbEnvConfig} import coop.rchain.store.{KeyValueStoreManager, LmdbDirStoreManager} @@ -8,7 +8,7 @@ import coop.rchain.store.{KeyValueStoreManager, LmdbDirStoreManager} import java.nio.file.Path object RNodeKeyValueStoreManager { - def apply[F[_]: Concurrent: Log](dirPath: Path): F[KeyValueStoreManager[F]] = + def apply[F[_]: Async: Log](dirPath: Path): F[KeyValueStoreManager[F]] = LmdbDirStoreManager[F](dirPath, rnodeDbMapping.toMap) // Config name is used as a sub-folder for LMDB files diff --git a/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala index a5ec9536466..cf3fb0a775e 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.api import cats.effect.testing.scalatest.AsyncIOSpec -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -118,7 +118,7 @@ import scala.collection.immutable.SortedMap // } // } // -// private def createMocks[F[_]: Concurrent: Sync] +// private def createMocks[F[_]: Async: Sync] // : (Concurrent[F], Log[F], BlockDagStorage[F], BlockStore[F], RuntimeManager[F], Span[F]) = { // val c = Concurrent[F] // val sp = mock[Span[F]] @@ -169,7 +169,7 @@ import scala.collection.immutable.SortedMap // Set(m.blockHash) // ) // -// private def bondedStatus[F[_]: Concurrent: BlockDagStorage: BlockStore: Log: RuntimeManager: Span]( +// private def bondedStatus[F[_]: Async: BlockDagStorage: BlockStore: Log: RuntimeManager: Span]( // validatorIdOpt: ValidatorIdentity, // publicKey: PublicKey, // block: BlockMessage diff --git a/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala index 0374a6596d1..cd9da149663 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.api import cats.effect.testing.scalatest.AsyncIOSpec -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -177,7 +177,7 @@ import scala.collection.immutable.SortedMap // } // } // -// private def exploratoryDeploy[F[_]: Concurrent: BlockStore: BlockDagStorage: RuntimeManager: Log: Span]( +// private def exploratoryDeploy[F[_]: Async: BlockStore: BlockDagStorage: RuntimeManager: Log: Span]( // term: String, // block: BlockHash, // validatorIdOpt: Option[ValidatorIdentity] = none diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala index 523f1607552..1c09060b23f 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.batch2 import cats.Applicative -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.{BlockDagStorage, DagMessageState, DagRepresentation} @@ -164,7 +164,7 @@ import scala.collection.immutable.SortedMap // // import fs2._ // -// private def withEnv[F[_]: Concurrent: Log](shardId: String)( +// private def withEnv[F[_]: Async: Log](shardId: String)( // f: ( // Queue[F, BlockMessage], // Queue[F, BlockMessage], diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala index e764b43a551..6e212420689 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.batch2 import java.nio.file.Files -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import coop.rchain.shared.Log import coop.rchain.store.{KeyValueStoreSut, LmdbStoreManager} @@ -28,7 +28,7 @@ class LmdbKeyValueStoreSpec override def afterAll: Unit = tempDir.deleteRecursively - def withSut[F[_]: Concurrent: Log](f: KeyValueStoreSut[F] => F[Unit]) = + def withSut[F[_]: Async: Log](f: KeyValueStoreSut[F] => F[Unit]) = for { kvm <- LmdbStoreManager[F](tempPath.resolve(Random.nextString(32)), 1024 * 1024 * 1024) sut = { diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala index d674b4c69da..c6ad51f5b69 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.engine.LfsBlockRequester.ST @@ -81,7 +81,7 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * @param test test definition */ - def createMock[F[_]: Concurrent: Temporal: Log]( + def createMock[F[_]: Async: Temporal: Log]( startBlock: BlockMessage, requestTimeout: FiniteDuration )(test: Mock[F] => F[Unit]): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala index 53eac20f3ad..d9a1c73fd10 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.engine -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.engine.LfsTupleSpaceRequester.{ST, StatePartPath} @@ -70,7 +70,7 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * @param test test definition */ - def createMock[F[_]: Concurrent: Temporal: Log](requestTimeout: FiniteDuration)( + def createMock[F[_]: Async: Temporal: Log](requestTimeout: FiniteDuration)( test: Mock[F] => F[Unit] ): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala index fb968b536b0..843a6bf8f26 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.genesis import cats.Parallel -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.syntax._ @@ -79,7 +79,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block _ <- fromInputFiles()( genesisPath, runtimeManager, - implicitly[Concurrent[IO]], + implicitly[Async[IO]], log ) _ = log.warns.count( @@ -103,7 +103,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesisAttempt <- fromInputFiles(maybeBondsPath = Some(nonExistingPath))( genesisPath, runtimeManager, - implicitly[Concurrent[IO]], + implicitly[Async[IO]], log ).attempt } yield log.warns.exists(_.contains("BONDS FILE NOT FOUND")) @@ -127,7 +127,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesisAttempt <- fromInputFiles(maybeBondsPath = Some(badBondsFile))( genesisPath, runtimeManager, - implicitly[Concurrent[IO]], + implicitly[Async[IO]], log ).attempt } yield genesisAttempt.left.value.getMessage should include( @@ -150,7 +150,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesis <- fromInputFiles(maybeBondsPath = Some(bondsFile))( genesisPath, runtimeManager, - implicitly[Concurrent[IO]], + implicitly[Async[IO]], log ) bonds = genesis.bonds.toList @@ -177,7 +177,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesis <- fromInputFiles()( genesisPath, runtimeManager, - implicitly[Concurrent[IO]], + implicitly[Async[IO]], log ) _ <- BlockStore[IO].put(genesis.blockHash, genesis) @@ -201,7 +201,7 @@ class GenesisTest extends AnyFlatSpec with Matchers with EitherValues with Block genesis <- fromInputFiles()( genesisPath, runtimeManager, - implicitly[Concurrent[IO]], + implicitly[Async[IO]], log ) bonds = genesis.bonds.toList @@ -274,7 +274,7 @@ object GenesisTest { ) } yield genesisBlock - def withGenResources[F[_]: Concurrent: ContextShift: Parallel]( + def withGenResources[F[_]: Async: ContextShift: Parallel]( body: (RuntimeManager[F], Path, LogStub[F]) => F[Unit] ): F[Unit] = { val storePath = storageLocation diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockApiFixture.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockApiFixture.scala index 05b6d2e7e38..3d33f04de74 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockApiFixture.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockApiFixture.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.BlockDagStorage @@ -14,7 +14,7 @@ import coop.rchain.shared.Log trait BlockApiFixture { - def createBlockApi[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( + def createBlockApi[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( shardId: String, maxDepthLimit: Int, validatorIdOpt: Option[ValidatorIdentity] = none @@ -40,7 +40,7 @@ trait BlockApiFixture { } yield blockApi } - def createBlockApi[F[_]: Concurrent](node: TestNode[F]): F[BlockApiImpl[F]] = { + def createBlockApi[F[_]: Async](node: TestNode[F]): F[BlockApiImpl[F]] = { import node.{blockDagStorage, blockStore, logEff, runtimeManager, sp} val thisNode = node.local diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala index ddc549cdece..cdc29efdde8 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.{Concurrent, IO, Resource} +import cats.effect.{Async, IO, Resource} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore import coop.rchain.blockstorage.BlockStore.BlockStore @@ -56,7 +56,7 @@ trait BlockDagStorageFixture extends BeforeAndAfter { self: Suite => object BlockDagStorageTestFixture { - def withStorageF[F[_]: Concurrent: Metrics: Log] + def withStorageF[F[_]: Async: Metrics: Log] : Resource[F, (BlockStore[F], BlockDagStorage[F])] = { def create(dir: Path) = for { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockDataContract.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockDataContract.scala index c0fabe14605..fd6cbf40280 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockDataContract.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockDataContract.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.crypto.PublicKey import coop.rchain.metrics.Span import coop.rchain.models.rholang.RhoType @@ -11,7 +11,7 @@ import coop.rchain.rholang.interpreter.SystemProcesses.ProcessContext object BlockDataContract { import cats.syntax.all._ - def set[F[_]: Concurrent: Span]( + def set[F[_]: Async: Span]( ctx: ProcessContext[F] )(message: Seq[ListParWithRandom]): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala index aa1b7ff95b7..79a11dde77b 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.helper import cats.Applicative -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -48,7 +48,7 @@ object BlockGenerator { rejectedDeploys = Set() ) - def step[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def step[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[Unit] = for { @@ -57,7 +57,7 @@ object BlockGenerator { result <- injectPostStateHash[F](block, postB1StateHash, postB1ProcessedDeploys) } yield result - private def computeBlockCheckpoint[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + private def computeBlockCheckpoint[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage, preState: ParentsMergedState ): F[(StateHash, Seq[ProcessedDeploy])] = Span[F].trace(GenerateBlockMetricsSource) { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/DeployerIdContract.scala b/casper/src/test/scala/coop/rchain/casper/helper/DeployerIdContract.scala index faa2b899f96..d44a4b283e9 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/DeployerIdContract.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/DeployerIdContract.scala @@ -1,5 +1,5 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.metrics.Span import coop.rchain.models.ListParWithRandom import coop.rchain.models.rholang.RhoType @@ -12,7 +12,7 @@ import coop.rchain.rholang.interpreter.SystemProcesses.ProcessContext object DeployerIdContract { import cats.syntax.all._ - def get[F[_]: Concurrent: Span]( + def get[F[_]: Async: Span]( ctx: ProcessContext[F] )(message: Seq[ListParWithRandom]): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/RhoLoggerContract.scala b/casper/src/test/scala/coop/rchain/casper/helper/RhoLoggerContract.scala index 9f76e0e627d..73e3f8a4158 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/RhoLoggerContract.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/RhoLoggerContract.scala @@ -1,5 +1,5 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.metrics.Span import coop.rchain.models.ListParWithRandom import coop.rchain.models.rholang.RhoType @@ -11,7 +11,7 @@ object RhoLoggerContract { val prettyPrinter = PrettyPrinter() //TODO extract a `RhoPatterns[F]` algebra that will move passing the Span, the Dispatcher, and the Space parameters closer to the edge of the world - def handleMessage[F[_]: Log: Concurrent: Span]( + def handleMessage[F[_]: Log: Async: Span]( ctx: ProcessContext[F] )(message: Seq[ListParWithRandom]): F[Unit] = { val isContractCall = new ContractCall(ctx.space, ctx.dispatcher) diff --git a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala index 942272808f1..5347843d754 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.casper.genesis.Genesis import coop.rchain.casper.genesis.contracts.TestUtil @@ -74,7 +74,7 @@ class RhoSpec( def hasFailures(assertions: List[RhoTestAssertion]) = assertions.find(_.isSuccess).isDefined - private def testFrameworkContracts[F[_]: Log: Concurrent: Span]( + private def testFrameworkContracts[F[_]: Log: Async: Span]( testResultCollector: TestResultCollector[F] ): Seq[SystemProcesses.Definition[F]] = { val testResultCollectorService = diff --git a/casper/src/test/scala/coop/rchain/casper/helper/Secp256k1SignContract.scala b/casper/src/test/scala/coop/rchain/casper/helper/Secp256k1SignContract.scala index 5b2f9ca6c5c..e55368902b8 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/Secp256k1SignContract.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/Secp256k1SignContract.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.crypto.signatures.Secp256k1 import coop.rchain.metrics.Span import coop.rchain.models.ListParWithRandom @@ -9,7 +9,7 @@ import coop.rchain.rholang.interpreter.{ContractCall, SystemProcesses} object Secp256k1SignContract { - def get[F[_]: Concurrent: Span]( + def get[F[_]: Async: Span]( ctx: SystemProcesses.ProcessContext[F] )(message: Seq[ListParWithRandom]): F[Unit] = { val isContractCall = new ContractCall(ctx.space, ctx.dispatcher) diff --git a/casper/src/test/scala/coop/rchain/casper/helper/SysAuthTokenContract.scala b/casper/src/test/scala/coop/rchain/casper/helper/SysAuthTokenContract.scala index 401642f6530..693aaa4ba83 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/SysAuthTokenContract.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/SysAuthTokenContract.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.metrics.Span import coop.rchain.models.rholang.RhoType import coop.rchain.models.{GSysAuthToken, ListParWithRandom} @@ -13,7 +13,7 @@ import coop.rchain.rholang.interpreter.SystemProcesses.ProcessContext object SysAuthTokenContract { import cats.syntax.all._ - def get[F[_]: Concurrent: Span]( + def get[F[_]: Async: Span]( ctx: ProcessContext[F] )(message: Seq[ListParWithRandom]): F[Unit] = { diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala index 47ce01eae23..96754ef9c7b 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.helper import cats.Parallel -import cats.effect.{Concurrent, IO, Resource, Sync} +import cats.effect.{Async, IO, Resource, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -41,7 +41,7 @@ import java.nio.file.Path import scala.concurrent.duration.{FiniteDuration, MILLISECONDS} import cats.effect.{Deferred, Ref, Temporal} -case class TestNode[F[_]: Concurrent: Temporal]( +case class TestNode[F[_]: Async: Temporal]( name: String, local: PeerNode, tle: TransportLayerTestImpl[F], @@ -301,7 +301,7 @@ object TestNode { maxParentDepth: Option[Int] = None, withReadOnlySize: Int = 0 ): Resource[Effect, IndexedSeq[TestNode[Effect]]] = { - implicit val c = Concurrent[Effect] + implicit val c = Async[Effect] implicit val n = TestNetwork.empty[Effect] networkF[Effect]( @@ -315,7 +315,7 @@ object TestNode { ) } - private def networkF[F[_]: Concurrent: Parallel: ContextShift: Temporal: TestNetwork]( + private def networkF[F[_]: Async: Parallel: ContextShift: Temporal: TestNetwork]( sks: IndexedSeq[PrivateKey], genesis: BlockMessage, storageMatrixPath: Path, @@ -373,7 +373,7 @@ object TestNode { } } - private def createNode[F[_]: Concurrent: Temporal: Parallel: ContextShift: TestNetwork]( + private def createNode[F[_]: Async: Temporal: Parallel: ContextShift: TestNetwork]( name: String, currentPeerNode: PeerNode, genesis: BlockMessage, diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala index 03d017ba109..1f025907b8b 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestResultCollector.scala @@ -1,5 +1,5 @@ package coop.rchain.casper.helper -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.Span @@ -81,13 +81,13 @@ case class TestResult( case class AckedActionCtx(ackChannel: Par, rand: Blake2b512Random, sequenceNumber: Long) object TestResultCollector { - def apply[F[_]: Concurrent: Span]: F[TestResultCollector[F]] = + def apply[F[_]: Async: Span]: F[TestResultCollector[F]] = Ref .of(TestResult(Map.empty, hasFinished = false)) .map(new TestResultCollector(_)) } -class TestResultCollector[F[_]: Concurrent: Span](result: Ref[F, TestResult]) { +class TestResultCollector[F[_]: Async: Span](result: Ref[F, TestResult]) { def getResult: F[TestResult] = result.get diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala index f7cf9824d4f..5d5812b6146 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.helper import cats.Parallel -import cats.effect.{Concurrent, Resource} +import cats.effect.{Async, Resource} import coop.rchain.metrics.{Metrics, Span} import coop.rchain.rholang.Resources.mkRuntimes import coop.rchain.rholang.interpreter.RhoRuntime.RhoHistoryRepository @@ -8,7 +8,7 @@ import coop.rchain.rholang.interpreter.{ReplayRhoRuntime, RhoRuntime} import coop.rchain.shared.Log object TestRhoRuntime { - def rhoRuntimeEff[F[_]: Log: Metrics: Span: Concurrent: Parallel: ContextShift]( + def rhoRuntimeEff[F[_]: Log: Metrics: Span: Async: Parallel: ContextShift]( initRegistry: Boolean = true ): Resource[F, (RhoRuntime[F], ReplayRhoRuntime[F], RhoHistoryRepository[F])] = mkRuntimes[F]("hash-set-casper-test-genesis-", initRegistry = initRegistry) diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala index 0f028e15195..187bc15720c 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.merging import cats.Parallel -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.rholang.Resources @@ -92,7 +92,7 @@ class MergeNumberChannelSpec extends AnyFlatSpec { RhoName(baseRhoSeed.next()) } - def testCase[F[_]: Concurrent: ContextShift: Parallel: Span: Log]( + def testCase[F[_]: Async: ContextShift: Parallel: Span: Log]( baseTerms: Seq[String], leftTerms: Seq[DeployTestInfo], rightTerms: Seq[DeployTestInfo], diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala index db4c1ac0e2e..a8cbbb01a91 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.rholang -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -47,7 +47,7 @@ class InterpreterUtilTest val genesisContext = GenesisBuilder.buildGenesis() val genesis = genesisContext.genesisBlock - def computeDeploysCheckpoint[F[_]: Concurrent: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def computeDeploysCheckpoint[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( parents: Seq[BlockHash], deploys: Seq[Signed[DeployData]], blockNumber: Long = 0L, diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala index 67ff59545bf..15b02f844b1 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.rholang import cats.Parallel -import cats.effect.{Concurrent, Resource, Sync} +import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import coop.rchain.casper.storage.RNodeKeyValueStoreManager.rnodeDbMapping import coop.rchain.metrics @@ -19,7 +19,7 @@ import java.nio.file.{Files, Path} object Resources { - def mkTestRNodeStoreManager[F[_]: Concurrent: Log]( + def mkTestRNodeStoreManager[F[_]: Async: Log]( dirPath: Path ): F[KeyValueStoreManager[F]] = { // Limit maximum environment (file) size for LMDB in tests @@ -36,7 +36,7 @@ object Resources { dbMappings >>= (xs => LmdbDirStoreManager[F](dirPath, xs.toMap)) } - def mkRuntimeManager[F[_]: Concurrent: Parallel: ContextShift: Log]( + def mkRuntimeManager[F[_]: Async: Parallel: ContextShift: Log]( prefix: String, mergeableTagName: Par ): Resource[F, RuntimeManager[F]] = @@ -46,7 +46,7 @@ object Resources { // TODO: This is confusing to create another instances for Log, Metrics and Span. // Investigate if it can be removed or define it as parameters. Similar for [[mkRuntimeManagerWithHistoryAt]]. - def mkRuntimeManagerAt[F[_]: Concurrent: Parallel: ContextShift]( + def mkRuntimeManagerAt[F[_]: Async: Parallel: ContextShift]( kvm: KeyValueStoreManager[F], mergeableTagName: Par ): F[RuntimeManager[F]] = { diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala index 064189cabc2..3034e98d7c8 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.discovery -import cats.effect.{ConcurrentEffect, Sync} +import cats.effect.{AsyncEffect, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.catscontrib.ski._ @@ -14,7 +14,7 @@ import io.grpc.netty._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -class GrpcKademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( +class GrpcKademliaRPC[F[_]: Sync: AsyncEffect: RPConfAsk: Metrics]( networkId: String, timeout: FiniteDuration, grpcEC: ExecutionContext diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala index 9ed230e31cd..5e40ecd0c00 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala @@ -1,6 +1,6 @@ package coop.rchain.comm -import cats.effect.{ConcurrentEffect, Resource, Sync} +import cats.effect.{AsyncEffect, Resource, Sync} import com.google.protobuf.ByteString import coop.rchain.metrics.Metrics import coop.rchain.sdk.syntax.all._ @@ -12,7 +12,7 @@ package object discovery { val DiscoveryMetricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "discovery.kademlia") - def acquireKademliaRPCServer[F[_]: Sync: ConcurrentEffect]( + def acquireKademliaRPCServer[F[_]: Sync: AsyncEffect]( networkId: String, port: Int, pingHandler: PeerNode => F[Unit], diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala index 7826907f905..940f7eb667f 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala @@ -2,7 +2,7 @@ package coop.rchain.comm.transport import cats.Applicative import cats.effect.syntax.all._ -import cats.effect.{Concurrent, ConcurrentEffect, Sync} +import cats.effect.{Async, ConcurrentEffect, Sync} import cats.syntax.all._ import coop.rchain.comm.CommError.{protocolException, CommErr} import coop.rchain.comm._ @@ -35,7 +35,7 @@ final case class BufferedGrpcStreamChannel[F[_]]( buferSubscriber: Stream[F, Unit] ) -class GrpcTransportClient[F[_]: Concurrent: ConcurrentEffect: Log: Metrics]( +class GrpcTransportClient[F[_]: Async: AsyncEffect: Log: Metrics]( networkId: String, cert: String, key: String, @@ -120,7 +120,7 @@ class GrpcTransportClient[F[_]: Concurrent: ConcurrentEffect: Log: Metrics]( ) >> channelsMap.update(_ - peer) >> getChannel(peer) else c.pure[F] - _ <- Concurrent[F] + _ <- Sync[F] .start(r.buferSubscriber.compile.drain) .onError { case err => diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala index 0ed77425eb3..2528627528a 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} +import cats.effect.{Async, ConcurrentEffect, Resource, Sync} import cats.syntax.all._ import cats.effect.syntax.all._ import coop.rchain.comm.protocol.routing._ @@ -30,7 +30,7 @@ object GrpcTransportReceiver { type MessageBuffers[F[_]] = (Send => F[Boolean], StreamMessage => F[Boolean], Stream[F, Unit]) type MessageHandlers[F[_]] = (Send => F[Unit], StreamMessage => F[Unit]) - def create[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Temporal]( + def create[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Temporal]( networkId: String, port: Int, serverSslContext: SslContext, @@ -62,7 +62,7 @@ object GrpcTransportReceiver { blobBuffer.dequeueChunk(1).parEvalMapUnordered(parallelism)(messageHandlers._2(_)) // inbound queue lives for 10 minutes TODO synchronize with Kademlia table cleanup s = (Stream.fixedDelay(10.minutes) ++ Stream.eval(clear)) concurrently stream - _ <- Concurrent[F] + _ <- Sync[F] .start(s.compile.drain) .onError { case err => diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala index 162bfcb41c7..bcd77629d1b 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} +import cats.effect.{Async, ConcurrentEffect, Resource, Sync} import cats.syntax.all._ import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.comm.protocol.routing.Protocol @@ -38,7 +38,7 @@ object TransportLayerServer { } } -class GrpcTransportServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Temporal]( +class GrpcTransportServer[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Temporal]( networkId: String, port: Int, cert: String, @@ -108,7 +108,7 @@ class GrpcTransportServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Me object GrpcTransportServer { - def acquireServer[F[_]: Concurrent: ConcurrentEffect: RPConfAsk: Log: Metrics: Temporal]( + def acquireServer[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Temporal]( networkId: String, port: Int, certPath: Path, diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala index 4273509d1f5..e710e348c27 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.comm.PeerNode import coop.rchain.comm.transport.PacketOps._ @@ -12,7 +12,7 @@ import scala.collection.concurrent.TrieMap final case class StreamMsgId(key: String, sender: PeerNode) -class StreamObservableClass[F[_]: Concurrent: Log]( +class StreamObservableClass[F[_]: Async: Log]( peer: PeerNode, bufferSize: Int, cache: TrieMap[String, Array[Byte]], @@ -46,7 +46,7 @@ class StreamObservableClass[F[_]: Concurrent: Log]( object StreamObservable { type StreamObservable[F[_]] = (Blob => F[Unit], Stream[F, StreamMsgId]) - def apply[F[_]: Concurrent: Log]( + def apply[F[_]: Async: Log]( peer: PeerNode, bufferSize: Int, cache: TrieMap[String, Array[Byte]] diff --git a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala index faec523ba9b..a26222793e5 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.rp -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import coop.rchain.comm._ import coop.rchain.comm.rp.Connect._ @@ -106,7 +106,7 @@ class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChe } } - private def tryToHandshake[F[_]: Concurrent: Log: Metrics]( + private def tryToHandshake[F[_]: Async: Log: Metrics]( srcPeer: PeerNode, remotePeer: PeerNode ): F[Connections] = { diff --git a/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala b/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala index 463ab94030b..cbae7721873 100644 --- a/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala +++ b/node/src/main/scala/coop/rchain/node/api/DeployGrpcServiceV1.scala @@ -1,6 +1,6 @@ package coop.rchain.node.api -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import cats.{Applicative, Foldable} import com.google.protobuf.ByteString @@ -17,7 +17,7 @@ import fs2.Stream object DeployGrpcServiceV1 { - def apply[F[_]: Concurrent: Log]( + def apply[F[_]: Async: Log]( blockApi: BlockApi[F], blockReportAPI: BlockReportApi[F] ): DeployServiceFs2Grpc[F, Metadata] = diff --git a/node/src/main/scala/coop/rchain/node/api/package.scala b/node/src/main/scala/coop/rchain/node/api/package.scala index 580587b62f3..c7d12e1eb67 100644 --- a/node/src/main/scala/coop/rchain/node/api/package.scala +++ b/node/src/main/scala/coop/rchain/node/api/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{Concurrent, ConcurrentEffect, Resource, Sync} +import cats.effect.{Async, ConcurrentEffect, Resource, Sync} import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc import coop.rchain.node.model.ReplFs2Grpc @@ -17,7 +17,7 @@ import scala.concurrent.duration.FiniteDuration package object api { - def acquireInternalServer[F[_]: Sync: ConcurrentEffect]( + def acquireInternalServer[F[_]: Sync: AsyncEffect]( host: String, port: Int, grpcEC: ExecutionContext, @@ -52,7 +52,7 @@ package object api { Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.awaitTermination())) } - def acquireExternalServer[F[_]: Concurrent: ConcurrentEffect: Log]( + def acquireExternalServer[F[_]: Async: AsyncEffect: Log]( host: String, port: Int, grpcEC: ExecutionContext, diff --git a/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala b/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala index 9cf61a708f9..5e3c92e0582 100644 --- a/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala +++ b/node/src/main/scala/coop/rchain/node/dag/RNodeStateSetup.scala @@ -1,6 +1,6 @@ package coop.rchain.node.dag -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.node.dag.implementation.{BlockStatus, NetworkBlockRequester, RNodeDagManager} import cats.effect.Ref @@ -10,7 +10,7 @@ object RNodeStateSetup { /** * TODO: Should create the whole node state to be used for API, CLI, etc. */ - def setupRNodeState[F[_]: Concurrent, M, MId, S, SId] = + def setupRNodeState[F[_]: Async, M, MId, S, SId] = for { /* State */ diff --git a/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala b/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala index 83bd9fd3631..c285d498455 100644 --- a/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala +++ b/node/src/main/scala/coop/rchain/node/dag/implementation/NetworkBlockRequester.scala @@ -1,12 +1,12 @@ package coop.rchain.node.dag.implementation -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import coop.rchain.sdk.block.BlockRequester import fs2.Stream import cats.effect.Ref object NetworkBlockRequester { - def apply[F[_]: Concurrent, B, BId]( + def apply[F[_]: Async, B, BId]( st: Ref[F, Map[BId, BlockStatus[B, BId]]] ): F[NetworkBlockRequester[F, B, BId]] = Sync[F].delay(new NetworkBlockRequester(st)) @@ -19,7 +19,7 @@ final case class Received[B, BId](id: BId, b: B) extends BlockStatus[B, BId] /** * TODO: Should wrap existing BlockRequester exposing necessary block statuses. */ -final case class NetworkBlockRequester[F[_]: Concurrent, B, BId] private ( +final case class NetworkBlockRequester[F[_]: Async, B, BId] private ( st: Ref[F, Map[BId, BlockStatus[B, BId]]] ) extends BlockRequester[F, B, BId] { override def requestBlock(id: BId): F[Unit] = ??? diff --git a/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala b/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala index e1139501a07..9d42a41c721 100644 --- a/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala +++ b/node/src/main/scala/coop/rchain/node/dag/implementation/RNodeDagManager.scala @@ -1,12 +1,12 @@ package coop.rchain.node.dag.implementation -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import coop.rchain.sdk.dag.data.{DagManager, DagView} import fs2.Stream import cats.effect.Ref object RNodeDagManager { - def apply[F[_]: Concurrent, M, MId, S, SId]( + def apply[F[_]: Async, M, MId, S, SId]( st: Ref[F, Map[MId, M]], requestMsg: MId => F[Unit], msgInput: Stream[F, M] @@ -14,7 +14,7 @@ object RNodeDagManager { Sync[F].delay(new RNodeDagManager(st, requestMsg, msgInput)) } -final case class RNodeDagManager[F[_]: Concurrent, M, MId, S, SId] private ( +final case class RNodeDagManager[F[_]: Async, M, MId, S, SId] private ( /** * DagManager internal in-memory state. */ diff --git a/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala b/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala index 420d278b832..0ae19c77850 100644 --- a/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala +++ b/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala @@ -1,6 +1,6 @@ package coop.rchain.node.effects -import cats.effect.{ConcurrentEffect, Sync} +import cats.effect.{AsyncEffect, Sync} import cats.syntax.all._ import coop.rchain.node.model._ import io.grpc.netty.NettyChannelBuilder @@ -24,7 +24,7 @@ object ReplClient { def apply[F[_]](implicit ev: ReplClient[F]): ReplClient[F] = ev } -class GrpcReplClient[F[_]: Sync: ConcurrentEffect](host: String, port: Int, maxMessageSize: Int) +class GrpcReplClient[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessageSize: Int) extends ReplClient[F] with Closeable { diff --git a/node/src/main/scala/coop/rchain/node/effects/package.scala b/node/src/main/scala/coop/rchain/node/effects/package.scala index b7b67615c03..bd28960e86e 100644 --- a/node/src/main/scala/coop/rchain/node/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/effects/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{Concurrent, ConcurrentEffect, IO, Sync} +import cats.effect.{Async, ConcurrentEffect, IO, Sync} import cats.mtl._ import cats.syntax.all._ import cats.{Applicative, Monad, Parallel} @@ -32,14 +32,14 @@ package object effects { def nodeDiscovery[F[_]: Monad: KademliaStore: KademliaRPC](id: NodeIdentifier): NodeDiscovery[F] = NodeDiscovery.kademlia(id) - def kademliaRPC[F[_]: Sync: ConcurrentEffect: RPConfAsk: Metrics]( + def kademliaRPC[F[_]: Sync: AsyncEffect: RPConfAsk: Metrics]( networkId: String, timeout: FiniteDuration, grpcEC: ExecutionContext ): KademliaRPC[F] = new GrpcKademliaRPC(networkId, timeout, grpcEC) - def transportClient[F[_]: Concurrent: ContextShift: ConcurrentEffect: Parallel: Log: Metrics]( + def transportClient[F[_]: Async: ContextShift: AsyncEffect: Parallel: Log: Metrics]( networkId: String, certPath: Path, keyPath: Path, @@ -63,7 +63,7 @@ package object effects { def consoleIO[F[_]: Sync](consoleReader: ConsoleReader): ConsoleIO[F] = new JLineConsoleIO(consoleReader) - def rpConnections[F[_]: Concurrent]: F[ConnectionsCell[F]] = + def rpConnections[F[_]: Async]: F[ConnectionsCell[F]] = Ref[F].of(Connections.empty) def rpConfState[F[_]: Sync](conf: RPConf): F[Ref[F, RPConf]] = Ref.of(conf) diff --git a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala index 69704f4fa8e..dff6db89699 100644 --- a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala +++ b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala @@ -1,6 +1,6 @@ package coop.rchain.node.instances -import cats.effect.Concurrent +import cats.effect.Async import cats.effect.concurrent.MVar import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter @@ -14,7 +14,7 @@ import cats.effect.{Deferred, Ref} import cats.effect.std.Semaphore object ProposerInstance { - def create[F[_]: Concurrent: Log]( + def create[F[_]: Async: Log]( proposeRequestsQueue: Queue[F, (Boolean, Deferred[F, ProposerResult])], proposer: Proposer[F], state: Ref[F, ProposerState[F]] diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala index ffe0701fb5d..29675e4a09b 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala @@ -1,7 +1,7 @@ package coop.rchain.node.revvaultexport import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -41,7 +41,7 @@ object StateBalances { } yield unf } - def read[F[_]: Concurrent: Parallel: ContextShift]( + def read[F[_]: Async: Parallel: ContextShift]( shardId: String, blockHash: String, vaultTreeHashMapDepth: Int, diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala index e85bd5c9f4f..c9a352ec4c6 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala @@ -75,7 +75,7 @@ object StateBalanceMain { val stateBalancesFile = outputDir.resolve("stateBalances.csv") import coop.rchain.shared.RChainScheduler._ - implicit val tc = Concurrent[IO] + implicit val tc = Async[IO] val task: IO[Unit] = for { stateBalances <- StateBalances.read( diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala index b45b75d01f9..f76553d31cc 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala @@ -74,7 +74,7 @@ object TransactionBalanceMain { val historyFile = outputDir.resolve("history.csv") import coop.rchain.shared.RChainScheduler._ - implicit val tc = Concurrent[IO] + implicit val tc = Async[IO] val task: IO[Unit] = for { result <- TransactionBalances.main( diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala index 7ef305cbe1e..2d996625345 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala @@ -1,7 +1,7 @@ package coop.rchain.node.revvaultexport.reporting import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.dag.DagRepresentation @@ -221,7 +221,7 @@ object TransactionBalances { } yield blockMes } - def main[F[_]: Concurrent: Parallel: ContextShift]( + def main[F[_]: Async: Parallel: ContextShift]( dataDir: Path, walletPath: Path, bondPath: Path, diff --git a/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala b/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala index 1d5dce5cf28..04572b5854d 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/GrpcServices.scala @@ -1,6 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.Concurrent +import cats.effect.Async import coop.rchain.casper.api.{BlockApi, BlockReportApi} import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc @@ -17,7 +17,7 @@ final case class GrpcServices[F[_]]( ) object GrpcServices { - def build[F[_]: Concurrent: Log]( + def build[F[_]: Async: Log]( blockApi: BlockApi[F], blockReportAPI: BlockReportApi[F], runtime: RhoRuntime[F] diff --git a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala index 70525705e3a..fb665aa4ec2 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala @@ -1,6 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.{Async, Concurrent, ConcurrentEffect, IO, Resource, Sync} +import cats.effect.{Async, ConcurrentEffect, IO, Resource, Sync} import cats.syntax.all._ import com.typesafe.config.Config import coop.rchain.casper.protocol.deploy.v1 @@ -45,7 +45,7 @@ object NetworkServers { */ // format: off def create[F[_] - /* Execution */ : ConcurrentEffect: Temporal: ContextShift + /* Execution */ : AsyncEffect: Temporal: ContextShift /* Comm */ : TransportLayer: NodeDiscovery: KademliaStore: RPConfAsk: ConnectionsCell /* Diagnostics */ : Log: Metrics] // format: on ( @@ -90,7 +90,7 @@ object NetworkServers { } yield () } - def internalServer[F[_]: Concurrent: ConcurrentEffect: Log]( + def internalServer[F[_]: Async: AsyncEffect: Log]( nodeConf: NodeConf, replService: ReplFs2Grpc[F, Metadata], deployService: DeployServiceFs2Grpc[F, Metadata], @@ -113,7 +113,7 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def externalServer[F[_]: Concurrent: ConcurrentEffect: Log]( + def externalServer[F[_]: Async: AsyncEffect: Log]( nodeConf: NodeConf, deployService: v1.DeployServiceFs2Grpc[F, Metadata], grpcEC: ExecutionContext @@ -132,7 +132,7 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def protocolServer[F[_]: Concurrent: ConcurrentEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Temporal]( + def protocolServer[F[_]: Async: AsyncEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Temporal]( nodeConf: NodeConf, routingMessageQueue: Queue[F, RoutingMessage] ): Resource[F, Unit] = { @@ -152,7 +152,7 @@ object NetworkServers { ) } - def discoveryServer[F[_]: Concurrent: ConcurrentEffect: KademliaStore: Log: Metrics]( + def discoveryServer[F[_]: Async: AsyncEffect: KademliaStore: Log: Metrics]( nodeConf: NodeConf, grpcEC: ExecutionContext ): Resource[F, Server] = @@ -164,7 +164,7 @@ object NetworkServers { grpcEC ) - def webApiServer[F[_]: ContextShift: ConcurrentEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def webApiServer[F[_]: ContextShift: AsyncEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], reportingRoutes: ReportingHttpRoutes[F], @@ -180,7 +180,7 @@ object NetworkServers { reportingRoutes ) - def adminWebApiServer[F[_]: ContextShift: ConcurrentEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def adminWebApiServer[F[_]: ContextShift: AsyncEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], adminWebApi: AdminWebApi[F], diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala index 2e10a8c26c9..3f69d581c22 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala @@ -1,7 +1,8 @@ package coop.rchain.node.runtime import cats.data.ReaderT -import cats.effect.{CancelToken, Concurrent, ConcurrentEffect, ExitCase, Fiber, IO, SyncIO} +import cats.effect.kernel.Async +import cats.effect.{CancelToken, ConcurrentEffect, ExitCase, Fiber, IO, SyncIO} import cats.~> import coop.rchain.node.diagnostics.Trace import coop.rchain.node.diagnostics.Trace.TraceId @@ -13,7 +14,7 @@ final case class NodeCallCtx(trace: TraceId) { object NodeCallCtx { def init: NodeCallCtx = NodeCallCtx(Trace.next) - final case class NodeCallCtxReader[F[_]: ConcurrentEffect]() { + final case class NodeCallCtxReader[F[_]: AsyncEffect]() { /** * Current implementation of Span uses ReaderT layer to hold the local state for tracing. @@ -37,7 +38,7 @@ object NodeCallCtx { * `runCancelable` and `runAsync` are newly provided. */ implicit val concurrentReaderNodeCallCtx = new ConcurrentEffect[ReaderNodeCallCtx] { - val c = Concurrent[ReaderNodeCallCtx] + val c = Async[ReaderNodeCallCtx] val t = ConcurrentEffect[F] // ConcurrentEffect diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala index f3b8bd1ffbe..8e505aa70f1 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala @@ -1,7 +1,7 @@ package coop.rchain.node.runtime import cats.Parallel -import cats.effect.{ConcurrentEffect, Resource, Sync} +import cats.effect.{AsyncEffect, Resource, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol.client.{DeployRuntime, GrpcDeployService, GrpcProposeService} import coop.rchain.crypto.PrivateKey @@ -34,7 +34,7 @@ object NodeMain { * * @param options command line options */ - def startNode[F[_]: ConcurrentEffect: Parallel: ContextShift: Temporal: ConsoleIO: Log]( + def startNode[F[_]: AsyncEffect: Parallel: ContextShift: Temporal: ConsoleIO: Log]( options: commandline.Options ): F[Unit] = Sync[F].defer { // Create merged configuration from CLI options and config file @@ -87,7 +87,7 @@ object NodeMain { * @param options command line options * @param console console */ - def runCLI[F[_]: Sync: ConcurrentEffect: ConsoleIO: Temporal]( + def runCLI[F[_]: Sync: AsyncEffect: ConsoleIO: Temporal]( options: commandline.Options ): F[Unit] = { val grpcPort = diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala index 49752c90a85..d4c7bc9689e 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala @@ -1,7 +1,7 @@ package coop.rchain.node.runtime import cats.Parallel -import cats.effect.{ConcurrentEffect, Resource, Sync} +import cats.effect.{AsyncEffect, Resource, Sync} import cats.mtl._ import cats.syntax.all._ import com.typesafe.config.Config @@ -31,7 +31,7 @@ import cats.effect.{Ref, Temporal} object NodeRuntime { type LocalEnvironment[F[_]] = ApplicativeLocal[F, NodeCallCtx] - def start[F[_]: ConcurrentEffect: Parallel: ContextShift: Temporal: Log]( + def start[F[_]: AsyncEffect: Parallel: ContextShift: Temporal: Log]( nodeConf: NodeConf, kamonConf: Config )(implicit mainEC: ExecutionContext): F[Unit] = { @@ -75,7 +75,7 @@ object NodeRuntime { } yield () } -class NodeRuntime[F[_]: ConcurrentEffect: Parallel: Temporal: ContextShift: LocalEnvironment: Log] private[node] ( +class NodeRuntime[F[_]: AsyncEffect: Parallel: Temporal: ContextShift: LocalEnvironment: Log] private[node] ( nodeConf: NodeConf, kamonConf: Config, id: NodeIdentifier diff --git a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala index 33d1072df2c..b42a7d40101 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala @@ -1,6 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.Concurrent +import cats.effect.Async import cats.mtl.ApplicativeAsk import cats.syntax.all._ import cats.{Parallel, Show} @@ -48,7 +48,7 @@ import monix.execution.Scheduler import cats.effect.{Deferred, Ref, Temporal} object Setup { - def setupNodeProgram[F[_]: Concurrent: Parallel: ContextShift: Temporal: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( + def setupNodeProgram[F[_]: Async: Parallel: ContextShift: Temporal: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( storeManager: KeyValueStoreManager[F], rpConnections: ConnectionsCell[F], rpConfAsk: ApplicativeAsk[F, RPConf], diff --git a/node/src/main/scala/coop/rchain/node/web/ReportingRoutes.scala b/node/src/main/scala/coop/rchain/node/web/ReportingRoutes.scala index bb096bc1d0f..9a225659dbb 100644 --- a/node/src/main/scala/coop/rchain/node/web/ReportingRoutes.scala +++ b/node/src/main/scala/coop/rchain/node/web/ReportingRoutes.scala @@ -1,6 +1,6 @@ package coop.rchain.node.web -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.api.BlockApi.ApiErr import coop.rchain.casper.api.BlockReportApi @@ -25,7 +25,7 @@ object ReportingRoutes { ): F[ReportResponse] = state.map(_.fold(BlockReportError, BlockTracesReport)) - def service[F[_]: Concurrent]( + def service[F[_]: Async]( blockReportAPI: BlockReportApi[F] ): ReportingHttpRoutes[F] = { val dsl = org.http4s.dsl.Http4sDsl[F] diff --git a/node/src/main/scala/coop/rchain/node/web/Transaction.scala b/node/src/main/scala/coop/rchain/node/web/Transaction.scala index fd50c08527b..2f2bb473228 100644 --- a/node/src/main/scala/coop/rchain/node/web/Transaction.scala +++ b/node/src/main/scala/coop/rchain/node/web/Transaction.scala @@ -1,6 +1,6 @@ package coop.rchain.node.web -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.casper.api.BlockReportApi import coop.rchain.casper.protocol.{ @@ -50,7 +50,7 @@ trait TransactionAPI[F[_]] { * This API is totally based on how RevVault.rho is written. If the `RevVault.rho` is re-written or changed, * this API might end up with useless. */ -final case class TransactionAPIImpl[F[_]: Concurrent]( +final case class TransactionAPIImpl[F[_]: Async]( blockReportAPI: BlockReportApi[F], // The transferUnforgeable can be retrieved based on the deployer and the timestamp of RevVault.rho // in the genesis ceremony. @@ -141,7 +141,7 @@ final case class TransactionAPIImpl[F[_]: Concurrent]( } -final case class CacheTransactionAPI[F[_]: Concurrent]( +final case class CacheTransactionAPI[F[_]: Async]( transactionAPI: TransactionAPI[F], store: TransactionStore[F] ) { @@ -229,19 +229,19 @@ object Transaction { .as[TransactionResponse] } - def apply[F[_]: Concurrent]( + def apply[F[_]: Async]( blockReportAPI: BlockReportApi[F], // The transferUnforgeable can be retrieved based on the deployer and the timestamp of RevVault.rho // in the genesis ceremony. transferUnforgeable: Par ): TransactionAPIImpl[F] = TransactionAPIImpl(blockReportAPI, transferUnforgeable) - def store[F[_]: Concurrent]( + def store[F[_]: Async]( kvm: KeyValueStoreManager[F] ): F[KeyValueTypedStore[F, String, TransactionResponse]] = kvm.database("transaction", utf8, SCodec.transactionResponseCodec) - def cacheTransactionAPI[F[_]: Concurrent]( + def cacheTransactionAPI[F[_]: Async]( transactionAPI: TransactionAPI[F], kvm: KeyValueStoreManager[F] ): F[CacheTransactionAPI[F]] = diff --git a/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala b/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala index 6936536e875..73005a0095d 100644 --- a/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala +++ b/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala @@ -1,6 +1,6 @@ package coop.rchain.node.web -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.node.api.json.JsonEntitiesCirceFromSchema import coop.rchain.node.api.v1.{WebApiAdminEndpoints, WebApiEndpoints} @@ -22,7 +22,7 @@ object WebApiRoutesV1 { * @param webApi Web API implementation * @return http4s routes (including OpenAPI schema _openapi.json_) */ - def create[F[_]: Concurrent: Log](webApi: WebApi[F]): HttpRoutes[F] = { + def create[F[_]: Async: Log](webApi: WebApi[F]): HttpRoutes[F] = { // RNode WebApi v1 routes val apiRoutes = HttpRoutes.of[F](WebApiRoutesV1(webApi).publicRoutes) // OpenAPI schema route @@ -37,7 +37,7 @@ object WebApiRoutesV1 { * @param adminWebApi Admin Web API implementation * @return http4s routes (including OpenAPI schema _openapi.json_) */ - def createAdmin[F[_]: Concurrent: Log]( + def createAdmin[F[_]: Async: Log]( webApi: WebApi[F], adminWebApi: AdminWebApi[F] ): HttpRoutes[F] = { @@ -55,7 +55,7 @@ object WebApiRoutesV1 { /** * Defines implementation (interpreter) for Web API endpoints. */ -final case class WebApiRoutesV1[F[_]: Concurrent: Log]( +final case class WebApiRoutesV1[F[_]: Async: Log]( webApi: WebApi[F] ) extends Endpoints[F] with JsonEntitiesCirceFromSchema diff --git a/node/src/main/scala/coop/rchain/node/web/package.scala b/node/src/main/scala/coop/rchain/node/web/package.scala index 0c4df641b96..f5c6acbf1f4 100644 --- a/node/src/main/scala/coop/rchain/node/web/package.scala +++ b/node/src/main/scala/coop/rchain/node/web/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{ConcurrentEffect, Resource, Sync} +import cats.effect.{AsyncEffect, Resource, Sync} import cats.syntax.all._ import coop.rchain.comm.discovery.NodeDiscovery import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfAsk} @@ -25,7 +25,7 @@ package object web { def corsPolicy[F[_]: Sync](routes: HttpRoutes[F]) = CORS(routes, CORS.DefaultCORSConfig.copy(allowCredentials = false)) - def acquireHttpServer[F[_]: ContextShift: ConcurrentEffect: Temporal: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( + def acquireHttpServer[F[_]: ContextShift: AsyncEffect: Temporal: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( reporting: Boolean, host: String = "0.0.0.0", httpPort: Int, @@ -60,7 +60,7 @@ package object web { .resource } - def acquireAdminHttpServer[F[_]: ContextShift: ConcurrentEffect: Temporal: Log]( + def acquireAdminHttpServer[F[_]: ContextShift: AsyncEffect: Temporal: Log]( host: String = "0.0.0.0", httpPort: Int, connectionIdleTimeout: FiniteDuration, diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala b/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala index d2981ad2125..d0367db53dd 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala @@ -1,7 +1,7 @@ package coop.rchain.node.mergeablity import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.dag.BlockDagKeyValueStorage @@ -46,7 +46,7 @@ trait ComputeMerge { * B1 "contract @0(0) = { 0 } | for (@1 <- @0) { 0 }" * */ - def computeMergeCase[F[_]: Concurrent: Span: Log: Metrics: Parallel: ContextShift]( + def computeMergeCase[F[_]: Async: Span: Log: Metrics: Parallel: ContextShift]( baseDeployRand: Blake2b512Random, baseDeploySources: Seq[Signed[DeployData]], leftDeploySources: Seq[Signed[DeployData]], diff --git a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala index f89131ef09a..23a2512b197 100644 --- a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala +++ b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala @@ -1,7 +1,7 @@ package coop.rchain.node.perf import cats.Parallel -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.hashing.Blake2b256Hash @@ -76,7 +76,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll override def afterAll: Unit = tempDir.deleteRecursively - def storeLMDB[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]() + def storeLMDB[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]() : F[KeyValueStore[F]] = for { lmdbHistoryManager <- LmdbStoreManager( @@ -106,7 +106,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll def create(root: Blake2b256Hash): F[HistoryType[F]] } - case class CreateRadixHistory[F[_]: Sync: Concurrent: ContextShift: Parallel: Log: Metrics: Span]() + case class CreateRadixHistory[F[_]: Sync: Async: ContextShift: Parallel: Log: Metrics: Span]() extends CreateHistory[F] { def create(root: Blake2b256Hash): F[HistoryType[F]] = Settings.typeStore match { @@ -131,7 +131,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll } } - case class CreateDefaultHistory[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]() + case class CreateDefaultHistory[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]() extends CreateHistory[F] { def create(root: Blake2b256Hash): F[HistoryType[F]] = Settings.typeStore match { @@ -154,7 +154,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll } } - class Experiment[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span: Sync] { + class Experiment[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span: Sync] { def getHistory(root: Blake2b256Hash): F[HistoryType[F]] = Settings.typeHistory match { diff --git a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala index 9b24f59ff9e..3aaf9a8c67f 100644 --- a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala +++ b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala @@ -1,6 +1,6 @@ package coop.rchain.node.revvaultexport -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import coop.rchain.casper.genesis.contracts.{Registry, StandardDeploys} import coop.rchain.casper.helper.TestNode.Effect import coop.rchain.casper.helper.TestRhoRuntime.rhoRuntimeEff @@ -58,7 +58,7 @@ class RhoTrieTraverserTest extends AnyFlatSpec { | } |}""".stripMargin - implicit val concurrent = Concurrent[IO] + implicit val concurrent = Async[IO] implicit val metricsEff: Metrics[Effect] = new Metrics.MetricsNOP[IO] implicit val noopSpan: Span[Effect] = NoopSpan[IO]() implicit val logger: Log[Effect] = Log.log[IO] diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/ContractCall.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/ContractCall.scala index 12ccd032f7d..1506c724dc5 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/ContractCall.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/ContractCall.scala @@ -1,6 +1,6 @@ package coop.rchain.rholang.interpreter -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.Span @@ -25,7 +25,7 @@ import coop.rchain.rholang.interpreter.RhoRuntime.RhoTuplespace * @param space the rspace instance * @param dispatcher the dispatcher */ -class ContractCall[F[_]: Concurrent: Span]( +class ContractCall[F[_]: Async: Span]( space: RhoTuplespace[F], dispatcher: Dispatch[F, ListParWithRandom, TaggedContinuation] ) { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala index 1b7ff8d004b..bf85aa1f299 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala @@ -379,7 +379,7 @@ object RhoRuntime { ) ) - def dispatchTableCreator[F[_]: Concurrent: Span]( + def dispatchTableCreator[F[_]: Async: Span]( space: RhoTuplespace[F], dispatcher: RhoDispatch[F], blockData: Ref[F, BlockData], @@ -398,7 +398,7 @@ object RhoRuntime { ) ) - def setupReducer[F[_]: Concurrent: Parallel: _cost: Log: Metrics: Span]( + def setupReducer[F[_]: Async: Parallel: _cost: Log: Metrics: Span]( chargingRSpace: RhoTuplespace[F], blockDataRef: Ref[F, BlockData], extraSystemProcesses: Seq[Definition[F]], @@ -433,7 +433,7 @@ object RhoRuntime { .map(_.toProcDefs) } yield (blockDataRef, urnMap, procDefs) - def createRhoEnv[F[_]: Concurrent: Parallel: _cost: Log: Metrics: Span]( + def createRhoEnv[F[_]: Async: Parallel: _cost: Log: Metrics: Span]( rspace: RhoISpace[F], mergeChs: Ref[F, Set[Par]], mergeableTagName: Par, @@ -471,7 +471,7 @@ object RhoRuntime { } yield () } - private def createRuntime[F[_]: Concurrent: Log: Metrics: Span: Parallel]( + private def createRuntime[F[_]: Async: Log: Metrics: Span: Parallel]( rspace: RhoISpace[F], extraSystemProcesses: Seq[Definition[F]], initRegistry: Boolean, @@ -510,7 +510,7 @@ object RhoRuntime { * use [[coop.rchain.rholang.interpreter.accounting.noOpCostLog]] * @return */ - def createRhoRuntime[F[_]: Concurrent: Log: Metrics: Span: Parallel]( + def createRhoRuntime[F[_]: Async: Log: Metrics: Span: Parallel]( rspace: RhoISpace[F], mergeableTagName: Par, initRegistry: Boolean = true, @@ -526,7 +526,7 @@ object RhoRuntime { * @param costLog same as [[coop.rchain.rholang.interpreter.RhoRuntime.createRhoRuntime]] * @return */ - def createReplayRhoRuntime[F[_]: Concurrent: Log: Metrics: Span: Parallel]( + def createReplayRhoRuntime[F[_]: Async: Log: Metrics: Span: Parallel]( rspace: RhoReplayISpace[F], mergeableTagName: Par, extraSystemProcesses: Seq[Definition[F]] = Seq.empty, @@ -554,7 +554,7 @@ object RhoRuntime { } yield runtime } - def createRuntimes[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( + def createRuntimes[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( space: RhoISpace[F], replaySpace: RhoReplayISpace[F], initRegistry: Boolean, @@ -580,7 +580,7 @@ object RhoRuntime { * Create from KeyValueStore's */ - def createRuntime[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( + def createRuntime[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], mergeableTagName: Par, rholangEC: ExecutionContext, diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala index a79f0248cba..0675d8d3d93 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter import cats._ -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models._ @@ -110,7 +110,7 @@ object RholangCLI { } } - def mkRSpaceStoreManager[F[_]: Concurrent: Log]( + def mkRSpaceStoreManager[F[_]: Async: Log]( dirPath: Path, mapSize: Long = 100 * mb ): F[KeyValueStoreManager[F]] = { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala index db316514214..f1bcc137e24 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/SystemProcesses.scala @@ -1,6 +1,6 @@ package coop.rchain.rholang.interpreter -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import com.google.protobuf.ByteString import com.typesafe.scalalogging.Logger @@ -97,7 +97,7 @@ object SystemProcesses { val REG_OPS: Long = 15L val SYS_AUTHTOKEN_OPS: Long = 16L } - final case class ProcessContext[F[_]: Concurrent: Span]( + final case class ProcessContext[F[_]: Async: Span]( space: RhoTuplespace[F], dispatcher: RhoDispatch[F], blockData: Ref[F, BlockData] @@ -139,7 +139,7 @@ object SystemProcesses { def apply[F[_]]( dispatcher: Dispatch[F, ListParWithRandom, TaggedContinuation], space: RhoTuplespace[F] - )(implicit F: Concurrent[F], spanF: Span[F]): SystemProcesses[F] = + )(implicit F: Async[F], spanF: Span[F]): SystemProcesses[F] = new SystemProcesses[F] { type ContWithMetaData = ContResult[Par, BindPattern, TaggedContinuation] diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala index 168b08f551b..234eb74d51f 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter.accounting import cats.data._ -import cats.effect.Concurrent +import cats.effect.Async import cats.effect.concurrent._ import cats.syntax.all._ import cats.mtl._ @@ -12,17 +12,17 @@ import cats.effect.Ref object CostAccounting { - private[this] def of[F[_]: Concurrent](init: Cost): F[MonadState[F, Cost]] = + private[this] def of[F[_]: Async](init: Cost): F[MonadState[F, Cost]] = Ref[F] .of(init) .map(defaultMonadState) - private[this] def empty[F[_]: Concurrent]: F[MonadState[F, Cost]] = + private[this] def empty[F[_]: Async]: F[MonadState[F, Cost]] = Ref[F] .of(Cost(0, "init")) .map(defaultMonadState) - def emptyCost[F[_]: Concurrent: Metrics]( + def emptyCost[F[_]: Async: Metrics]( implicit L: FunctorTell[F, Chain[Cost]], ms: Metrics.Source ): F[_cost[F]] = @@ -31,7 +31,7 @@ object CostAccounting { c <- empty } yield (loggingCost(c, L, s)) - def initialCost[F[_]: Concurrent: Metrics]( + def initialCost[F[_]: Async: Metrics]( init: Cost )(implicit L: FunctorTell[F, Chain[Cost]], ms: Metrics.Source): F[_cost[F]] = for { @@ -39,7 +39,7 @@ object CostAccounting { c <- of(init) } yield (loggingCost(c, L, s)) - private[this] def defaultMonadState[F[_]: Monad: Concurrent] = + private[this] def defaultMonadState[F[_]: Monad: Async] = (state: Ref[F, Cost]) => new DefaultMonadState[F, Cost] { val monad: cats.Monad[F] = implicitly[Monad[F]] diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala index 8b204c53783..ab9e59c976c 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogic.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter.merging import cats.Monad -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.crypto.hash.Blake2b512Random @@ -39,7 +39,7 @@ object RholangMergingLogic { * @param channelValues Final values * @param getInitialValue Accessor to initial value */ - def calculateNumChannelDiff[F[_]: Concurrent, Key]( + def calculateNumChannelDiff[F[_]: Async, Key]( channelValues: Seq[Map[Key, Long]], getInitialValue: Key => F[Option[Long]] ): F[List[Map[Key, Long]]] = { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/StoragePrinter.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/StoragePrinter.scala index 4962b2245ee..6bfe39904e9 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/StoragePrinter.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/storage/StoragePrinter.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter.storage import cats.FlatMap -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.casper.protocol.DeployData import coop.rchain.crypto.signatures.Signed @@ -62,7 +62,7 @@ object StoragePrinter { PrettyPrinter().buildString(unmatchedSends.reduce(_ ++ _)) } - def prettyPrintUnmatchedSends[F[_]: Concurrent]( + def prettyPrintUnmatchedSends[F[_]: Async]( deploy: Signed[DeployData], runtime: RhoRuntime[F] ): F[String] = { @@ -84,7 +84,7 @@ object StoragePrinter { } } - def prettyPrintUnmatchedSends[F[_]: Concurrent]( + def prettyPrintUnmatchedSends[F[_]: Async]( deploys: Seq[Signed[DeployData]], runtime: RhoRuntime[F] ): F[String] = diff --git a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala index 05f1177c80e..ed8c67f36aa 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala @@ -2,7 +2,7 @@ package coop.rchain.rholang import cats.Parallel import cats.effect.ExitCase.Error -import cats.effect.{Concurrent, Resource, Sync} +import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.metrics.{Metrics, Span} @@ -39,7 +39,7 @@ object Resources { }) ) - def mkRhoISpace[F[_]: Concurrent: Parallel: ContextShift: KeyValueStoreManager: Metrics: Span: Log] + def mkRhoISpace[F[_]: Async: Parallel: ContextShift: KeyValueStoreManager: Metrics: Span: Log] : F[RhoISpace[F]] = { import coop.rchain.rholang.interpreter.storage._ @@ -54,7 +54,7 @@ object Resources { } yield space } - def mkRuntime[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( + def mkRuntime[F[_]: Async: Parallel: ContextShift: Metrics: Span: Log]( prefix: String ): Resource[F, RhoRuntime[F]] = mkTempDir(prefix) @@ -62,7 +62,7 @@ object Resources { .evalMap(_.rSpaceStores) .evalMap(RhoRuntime.createRuntime(_, Par(), RChainScheduler.rholangEC)) - def mkRuntimes[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( + def mkRuntimes[F[_]: Async: Parallel: ContextShift: Metrics: Span: Log]( prefix: String, initRegistry: Boolean = false ): Resource[F, (RhoRuntime[F], ReplayRhoRuntime[F], RhoHistoryRepository[F])] = @@ -71,7 +71,7 @@ object Resources { .evalMap(_.rSpaceStores) .evalMap(createRuntimes(_, initRegistry = initRegistry)) - def createRuntimes[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( + def createRuntimes[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], initRegistry: Boolean = false, additionalSystemProcesses: Seq[Definition[F]] = Seq.empty diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala index 6a1770869e7..843b311d3e2 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala @@ -2,7 +2,7 @@ package coop.rchain.rholang.interpreter import cats.Parallel import cats.effect.testing.scalatest.AsyncIOSpec -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -24,7 +24,7 @@ import org.scalatest.matchers.should.Matchers // import coop.rchain.shared.RChainScheduler._ // val outcomeCh = "ret" // -// private def execute[F[_]: Concurrent: Parallel: ContextShift: Metrics: Span: Log]( +// private def execute[F[_]: Async: Parallel: ContextShift: Metrics: Span: Log]( // source: String // ): F[Either[InterpreterError, BigInt]] = // mkRuntime[F]("rholang-bigint") diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala index cd8d596c848..5969cfcb4e0 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala @@ -64,7 +64,7 @@ class CostAccountingSpec }.unsafeRunSync } - private def createRuntimesWithCostLog[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span]( + private def createRuntimesWithCostLog[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], costLog: FunctorTell[F, Chain[Cost]], initRegistry: Boolean = false, diff --git a/roscala/src/main/scala/coop/rchain/roscala/util/LockedMap.scala b/roscala/src/main/scala/coop/rchain/roscala/util/LockedMap.scala index 5c462cd1068..74661a638be 100644 --- a/roscala/src/main/scala/coop/rchain/roscala/util/LockedMap.scala +++ b/roscala/src/main/scala/coop/rchain/roscala/util/LockedMap.scala @@ -25,7 +25,7 @@ class LockedMap[K, V] { map.put(i, v) } - def useWithReadLock[R](f: ConcurrentHashMap[K, V] => R): R = + def useWithReadLock[R](f: AsyncHashMap[K, V] => R): R = lock.readLock().withLock { f(map) } diff --git a/roscala/src/main/scala/coop/rchain/roscala/util/syntax.scala b/roscala/src/main/scala/coop/rchain/roscala/util/syntax.scala index 1cf6a2e64b9..3dc8c1b30ec 100644 --- a/roscala/src/main/scala/coop/rchain/roscala/util/syntax.scala +++ b/roscala/src/main/scala/coop/rchain/roscala/util/syntax.scala @@ -16,7 +16,7 @@ object syntax { } } - implicit class ConcurrentHashMapOps[K, V](val hm: ConcurrentHashMap[K, V]) extends AnyVal { + implicit class ConcurrentHashMapOps[K, V](val hm: AsyncHashMap[K, V]) extends AnyVal { @inline def apply(k: K): V = hm.get(k) @inline def update(k: K, v: V) = hm.put(k, v) diff --git a/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala b/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala index 92bbeba86ed..66b83b01cfd 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/HotStore.scala @@ -61,7 +61,7 @@ private final case class HistoryStoreCache[F[_], C, P, A, K]( joins: Map[C, Deferred[F, Seq[Seq[C]]]] ) -private class InMemHotStore[F[_]: Concurrent, C, P, A, K]( +private class InMemHotStore[F[_]: Async, C, P, A, K]( hotStoreState: Ref[F, HotStoreState[C, P, A, K]], // this is what is inside history store, lazily populated. Starting data for HotStoreState historyStoreCache: Ref[F, HistoryStoreCache[F, C, P, A, K]], @@ -362,7 +362,7 @@ private class InMemHotStore[F[_]: Concurrent, C, P, A, K]( object HotStore { - def apply[F[_]: Concurrent, C, P, A, K]( + def apply[F[_]: Async, C, P, A, K]( hotStoreStateRef: Ref[F, HotStoreState[C, P, A, K]], historyReaderBase: HistoryReaderBase[F, C, P, A, K] ): F[HotStore[F, C, P, A, K]] = @@ -372,7 +372,7 @@ object HotStore { ) .map(new InMemHotStore[F, C, P, A, K](hotStoreStateRef, _, historyReaderBase)) - def apply[F[_]: Concurrent, C, P, A, K]( + def apply[F[_]: Async, C, P, A, K]( cache: HotStoreState[C, P, A, K], historyReader: HistoryReaderBase[F, C, P, A, K] ): F[HotStore[F, C, P, A, K]] = @@ -381,7 +381,7 @@ object HotStore { store <- HotStore(cache, historyReader) } yield store - def apply[F[_]: Concurrent, C, P, A, K]( + def apply[F[_]: Async, C, P, A, K]( historyReader: HistoryReaderBase[F, C, P, A, K] ): F[HotStore[F, C, P, A, K]] = apply(HotStoreState[C, P, A, K](), historyReader) diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala index c9476df0e18..3a0cb7a3b92 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala @@ -18,7 +18,7 @@ import monix.execution.atomic.AtomicAny import scala.collection.SortedSet import scala.concurrent.ExecutionContext -class RSpace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( +class RSpace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], storeAtom: AtomicAny[HotStore[F, C, P, A, K]], rholangEC: ExecutionContext @@ -235,7 +235,7 @@ object RSpace { /** * Creates [[RSpace]] from [[HistoryRepository]] and [[HotStore]]. */ - def apply[F[_]: Concurrent: ContextShift: Span: Metrics: Log, C, P, A, K]( + def apply[F[_]: Async: ContextShift: Span: Metrics: Log, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], store: HotStore[F, C, P, A, K], rholangEC: ExecutionContext @@ -252,7 +252,7 @@ object RSpace { /** * Creates [[RSpace]] from [[KeyValueStore]]'s, */ - def create[F[_]: Concurrent: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( + def create[F[_]: Async: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( store: RSpaceStore[F], rholangEC: ExecutionContext )( @@ -272,7 +272,7 @@ object RSpace { /** * Creates [[RSpace]] and [[ReplayRSpace]] from [[KeyValueStore]]'s. */ - def createWithReplay[F[_]: Concurrent: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( + def createWithReplay[F[_]: Async: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( store: RSpaceStore[F], rholangEC: ExecutionContext )( @@ -296,7 +296,7 @@ object RSpace { /** * Creates [[HistoryRepository]] and [[HotStore]]. */ - def createHistoryRepo[F[_]: Concurrent: Parallel: Log: Span, C, P, A, K](store: RSpaceStore[F])( + def createHistoryRepo[F[_]: Async: Parallel: Log: Span, C, P, A, K](store: RSpaceStore[F])( implicit sc: Serialize[C], sp: Serialize[P], diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala index 2ceaf64ecf7..7347344df8a 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace import cats.Applicative -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.catscontrib._ @@ -21,7 +21,7 @@ import scala.concurrent.{ExecutionContext, SyncVar} import scala.util.Random import cats.effect.Ref -abstract class RSpaceOps[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( +abstract class RSpaceOps[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], val storeAtom: AtomicAny[HotStore[F, C, P, A, K]], rholangEC: ExecutionContext diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala index 5be5f70c6f3..ef34510e4b4 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala @@ -18,7 +18,7 @@ import scala.collection.JavaConverters._ import scala.collection.{immutable, SortedSet} import scala.concurrent.ExecutionContext -class ReplayRSpace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( +class ReplayRSpace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], storeAtom: AtomicAny[HotStore[F, C, P, A, K]], rholangEC: ExecutionContext @@ -317,7 +317,7 @@ object ReplayRSpace { /** * Creates [[ReplayRSpace]] from [[HistoryRepository]] and [[HotStore]]. */ - def apply[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( + def apply[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], store: HotStore[F, C, P, A, K], rholangEC: ExecutionContext diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala index edb1493380f..6c1d64852ec 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala @@ -54,7 +54,7 @@ object ReportingRspace { /** * Creates [[ReportingRspace]] from [[HistoryRepository]] and [[HotStore]]. */ - def apply[F[_]: Concurrent: ContextShift: Span: Metrics: Log, C, P, A, K]( + def apply[F[_]: Async: ContextShift: Span: Metrics: Log, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], store: HotStore[F, C, P, A, K] )( @@ -72,7 +72,7 @@ object ReportingRspace { /** * Creates [[RSpace]] from [[KeyValueStore]]'s, */ - def create[F[_]: Concurrent: ContextShift: Parallel: Log: Metrics: Span, C, P, A, K]( + def create[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span, C, P, A, K]( store: RSpaceStore[F] )( implicit sc: Serialize[C], @@ -88,7 +88,7 @@ object ReportingRspace { } yield reportingRSpace } -class ReportingRspace[F[_]: Concurrent: ContextShift: Log: Metrics: Span, C, P, A, K]( +class ReportingRspace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], storeAtom: AtomicAny[HotStore[F, C, P, A, K]] )( diff --git a/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala b/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala index e027f1ed88c..0b59a54e212 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/concurrent/MultiLock.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.concurrent -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.catscontrib.ski.kp import coop.rchain.metrics.Metrics @@ -9,7 +9,7 @@ import coop.rchain.metrics.Metrics.Source import scala.collection.concurrent.TrieMap import cats.effect.std.Semaphore -class MultiLock[F[_]: Concurrent: Metrics, K](metricSource: Metrics.Source) { +class MultiLock[F[_]: Async: Metrics, K](metricSource: Metrics.Source) { implicit private val ms: Source = metricSource @@ -34,7 +34,7 @@ class MultiLock[F[_]: Concurrent: Metrics, K](metricSource: Metrics.Source) { import coop.rchain.metrics.implicits._ - Concurrent[F].bracket( + Sync[F].bracket( for { _ <- Metrics[F].incrementGauge("lock.queue") locks <- acquireLocks.timer("lock.acquire") diff --git a/rspace/src/main/scala/coop/rchain/rspace/concurrent/TwoStepLock.scala b/rspace/src/main/scala/coop/rchain/rspace/concurrent/TwoStepLock.scala index a93f04023bb..fed8b0bea2b 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/concurrent/TwoStepLock.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/concurrent/TwoStepLock.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.concurrent -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.metrics.Metrics @@ -10,7 +10,7 @@ trait TwoStepLock[F[_], K] { ): F[W] } -class ConcurrentTwoStepLockF[F[_]: Concurrent: Metrics, K](ms: Metrics.Source) +class ConcurrentTwoStepLockF[F[_]: Async: Metrics, K](ms: Metrics.Source) extends TwoStepLock[F, K] { private[this] val phaseA: MultiLock[F, K] = new MultiLock[F, K](Metrics.Source(ms, "two-step-lock.phase-a")) diff --git a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala index 3c7c376f96a..2ecda12611b 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.examples -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.{Applicative, Id} import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager @@ -81,7 +81,7 @@ object AddressBookExample { object implicits { - implicit val concurrentF: Concurrent[Id] = coop.rchain.catscontrib.effect.implicits.concurrentId + implicit val concurrentF: Async[Id] = coop.rchain.catscontrib.effect.implicits.concurrentId implicit val contextShiftId: ContextShift[Id] = new ContextShift[Id] { diff --git a/rspace/src/main/scala/coop/rchain/rspace/history/History.scala b/rspace/src/main/scala/coop/rchain/rspace/history/History.scala index 3eb2b70bd69..ccfd246ec7a 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/history/History.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/history/History.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace.history import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.history.instances.RadixHistory import coop.rchain.store.KeyValueStore @@ -41,7 +41,7 @@ trait History[F[_]] { object History { val emptyRootHash: Blake2b256Hash = RadixHistory.emptyRootHash - def create[F[_]: Concurrent: Sync: Parallel]( + def create[F[_]: Async: Sync: Parallel]( root: Blake2b256Hash, store: KeyValueStore[F] ): F[RadixHistory[F]] = RadixHistory(root, RadixHistory.createStore(store)) diff --git a/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepository.scala b/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepository.scala index 2e0bd66391f..a2bad815ee3 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepository.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepository.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace.history import cats.Parallel -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.metrics.Span import coop.rchain.rspace.hashing.Blake2b256Hash @@ -54,7 +54,7 @@ object HistoryRepositoryInstances { val PREFIX_KONT: Byte = 0x01 val PREFIX_JOINS: Byte = 0x02 - def lmdbRepository[F[_]: Concurrent: Parallel: Log: Span, C, P, A, K]( + def lmdbRepository[F[_]: Async: Parallel: Log: Span, C, P, A, K]( historyKeyValueStore: KeyValueStore[F], rootsKeyValueStore: KeyValueStore[F], coldKeyValueStore: KeyValueStore[F] diff --git a/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepositoryImpl.scala b/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepositoryImpl.scala index 258e949a53f..52bed14e192 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepositoryImpl.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/history/HistoryRepositoryImpl.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace.history import cats.Parallel -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.typesafe.scalalogging.Logger import coop.rchain.metrics.{Metrics, Span} @@ -16,7 +16,7 @@ import coop.rchain.shared.syntax._ import coop.rchain.shared.{Log, Serialize} import fs2.Stream -final case class HistoryRepositoryImpl[F[_]: Concurrent: Parallel: Log: Span, C, P, A, K]( +final case class HistoryRepositoryImpl[F[_]: Async: Parallel: Log: Span, C, P, A, K]( currentHistory: History[F], rootsRepository: RootRepository[F], leafStore: ColdKeyValueStore[F], @@ -226,7 +226,7 @@ final case class HistoryRepositoryImpl[F[_]: Concurrent: Parallel: Log: Span, C, .reset(root = stateHash) .map( new RSpaceHistoryReaderImpl(_, leafStore)( - Concurrent[F], + Async[F], serializeC, serializeP, serializeA, diff --git a/rspace/src/main/scala/coop/rchain/rspace/history/instances/RSpaceHistoryReaderImpl.scala b/rspace/src/main/scala/coop/rchain/rspace/history/instances/RSpaceHistoryReaderImpl.scala index 74c77f3cdce..99d0f558339 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/history/instances/RSpaceHistoryReaderImpl.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/history/instances/RSpaceHistoryReaderImpl.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.history.instances -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.rspace.hashing.{Blake2b256Hash, StableHashProvider} import coop.rchain.rspace.history.ColdStoreInstances.ColdKeyValueStore @@ -11,7 +11,7 @@ import coop.rchain.shared.Serialize import coop.rchain.shared.syntax._ import scodec.bits.ByteVector -class RSpaceHistoryReaderImpl[F[_]: Concurrent, C, P, A, K]( +class RSpaceHistoryReaderImpl[F[_]: Async, C, P, A, K]( targetHistory: History[F], leafStore: ColdKeyValueStore[F] )( diff --git a/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala b/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala index adc67498b46..5e6e70bd8de 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/merger/EventLogIndex.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.merger -import cats.effect.Concurrent +import cats.effect.Async import cats.kernel.Monoid import cats.syntax.all._ import coop.rchain.rspace.merger.EventLogMergingLogic.{ @@ -31,7 +31,7 @@ final case class EventLogIndex( object EventLogIndex { - def apply[F[_]: Concurrent]( + def apply[F[_]: Async]( eventLog: List[Event], produceExistsInPreState: Produce => F[Boolean], produceTouchPreStateJoin: Produce => F[Boolean], diff --git a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala index 1ffb6a2ba9d..badc277bf9c 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace.merger import cats.Monoid -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.rspace.hashing.{Blake2b256Hash, StableHashProvider} import coop.rchain.rspace.history.{ColdStoreInstances, DataLeaf, HistoryReaderBinary} @@ -30,7 +30,7 @@ final case class StateChange( object StateChange { - private def computeValueChange[F[_]: Concurrent]( + private def computeValueChange[F[_]: Async]( historyPointer: Blake2b256Hash, startValue: Blake2b256Hash => F[Seq[ByteVector]], endValue: Blake2b256Hash => F[Seq[ByteVector]] @@ -42,7 +42,7 @@ object StateChange { deleted = startValue diff endValue } yield ChannelChange(added, deleted) - def apply[F[_]: Concurrent, C, P, A, K]( + def apply[F[_]: Async, C, P, A, K]( preStateReader: HistoryReaderBinary[F, C, P, A, K], postStateReader: HistoryReaderBinary[F, C, P, A, K], eventLogIndex: EventLogIndex, diff --git a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChangeMerger.scala b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChangeMerger.scala index 71104c1e69c..c1a2907c87a 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChangeMerger.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChangeMerger.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.merger -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.rspace._ import coop.rchain.rspace.hashing.{Blake2b256Hash, StableHashProvider} @@ -24,7 +24,7 @@ object StateChangeMerger { joinAction: Option[JoinAction] ) - def computeTrieActions[F[_]: Concurrent, C, P, A, K]( + def computeTrieActions[F[_]: Async, C, P, A, K]( changes: StateChange, baseReader: HistoryReaderBinary[F, C, P, A, K], mergeableChs: NumberChannelsDiff, diff --git a/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceExporterSyntax.scala b/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceExporterSyntax.scala index ced417bfd45..6a040b2338b 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceExporterSyntax.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceExporterSyntax.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.state -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.state.exporters.RSpaceExporterItems.StoreItems @@ -52,7 +52,7 @@ final class RSpaceExporterOps[F[_]]( // Export to disk def writeToDisk[C, P, A, K](root: Blake2b256Hash, dirPath: Path, chunkSize: Int)( - implicit m: Concurrent[F], + implicit m: Async[F], l: Log[F] ): F[Unit] = RSpaceExporterDisk.writeToDisk[F](exporter, root, dirPath, chunkSize) diff --git a/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceImporter.scala b/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceImporter.scala index 9822223149e..56f199f71fb 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceImporter.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/state/RSpaceImporter.scala @@ -22,7 +22,7 @@ final case class StateValidationError(message: String) extends Exception(message } object RSpaceImporter { - def validateStateItems[F[_]: Concurrent]( + def validateStateItems[F[_]: Async]( historyItems: Seq[(Blake2b256Hash, ByteVector)], dataItems: Seq[(Blake2b256Hash, ByteVector)], startPath: Seq[(Blake2b256Hash, Option[Byte])], diff --git a/rspace/src/main/scala/coop/rchain/rspace/state/exporters/RSpaceExporterDisk.scala b/rspace/src/main/scala/coop/rchain/rspace/state/exporters/RSpaceExporterDisk.scala index 43a7d1e34fa..fe9937f6969 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/state/exporters/RSpaceExporterDisk.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/state/exporters/RSpaceExporterDisk.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace.state.exporters import cats.Monad -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.state.{RSpaceExporter, RSpaceImporter} @@ -16,7 +16,7 @@ import java.nio.file.Path object RSpaceExporterDisk { - def writeToDisk[F[_]: Concurrent: Log]( + def writeToDisk[F[_]: Async: Log]( exporter: RSpaceExporter[F], root: Blake2b256Hash, dirPath: Path, diff --git a/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceExporterStore.scala b/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceExporterStore.scala index bdf031eef0a..e30624fa9af 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceExporterStore.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceExporterStore.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.state.instances -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.history.RootsStoreInstances @@ -15,7 +15,7 @@ import java.nio.ByteBuffer object RSpaceExporterStore { // RSpace exporter constructor / smart constructor "guards" private class - def apply[F[_]: Concurrent]( + def apply[F[_]: Async]( historyStore: KeyValueStore[F], valueStore: KeyValueStore[F], rootsStore: KeyValueStore[F] @@ -23,7 +23,7 @@ object RSpaceExporterStore { final case object NoRootError extends Exception - private final case class RSpaceExporterImpl[F[_]: Concurrent]( + private final case class RSpaceExporterImpl[F[_]: Async]( sourceHistoryStore: KeyValueStore[F], sourceValueStore: KeyValueStore[F], sourceRootsStore: KeyValueStore[F] diff --git a/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceImporterStore.scala b/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceImporterStore.scala index 04dcb69d22c..2ccce7df7ee 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceImporterStore.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/state/instances/RSpaceImporterStore.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace.state.instances import java.nio.ByteBuffer -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.history.RootsStoreInstances @@ -12,7 +12,7 @@ import scodec.bits.ByteVector object RSpaceImporterStore { // RSpace importer constructor / smart constructor "guards" private class - def apply[F[_]: Concurrent]( + def apply[F[_]: Async]( historyStore: KeyValueStore[F], valueStore: KeyValueStore[F], rootsStore: KeyValueStore[F] diff --git a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala index 461a9e0ca1c..567fa571a4e 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala @@ -1,7 +1,7 @@ package coop.rchain.rspace import cats.Parallel -import cats.effect.{Concurrent, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.rspace.examples.StringExamples.{StringsCaptor, _} import coop.rchain.rspace.examples.StringExamples.implicits._ @@ -1117,7 +1117,7 @@ trait InMemHotStoreSpec extends HotStoreSpec[IO] { import coop.rchain.shared.RChainScheduler._ protected type F[A] = IO[A] - implicit override val S: Sync[F] = implicitly[Concurrent[IO]] + implicit override val S: Sync[F] = implicitly[Async[IO]] implicit override val P: Parallel[IO] = IO.ioParallel def C( c: HotStoreState[String, Pattern, String, StringsCaptor] = HotStoreState() diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala index a691d240b36..5e75b0bd4d7 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala @@ -2,7 +2,7 @@ package coop.rchain.rspace import cats.Parallel.Aux import cats._ -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import coop.rchain.rspace.examples.AddressBookExample import coop.rchain.rspace.examples.AddressBookExample._ diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala index 16313b4f04c..9577c784b88 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala @@ -27,7 +27,7 @@ trait StorageTestsBase[F[_], C, P, A, K] extends AnyFlatSpec with Matchers with type HR = HistoryRepository[F, C, P, A, K] type AtST = AtomicAny[ST] - implicit def concurrentF: Concurrent[F] + implicit def concurrentF: Async[F] implicit def parF: Parallel[F] implicit def logF: Log[F] implicit def metricsF: Metrics[F] @@ -93,7 +93,7 @@ trait TaskTests[C, P, A, R, K] extends StorageTestsBase[IO, C, P, R, K] { implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() implicit val spanF: Span[IO] = NoopSpan[IO]() implicit val contextShiftF: ContextShift[IO] = coop.rchain.shared.RChainScheduler.csIO - implicit val concurrentF: Concurrent[IO] = Concurrent[IO] + implicit val concurrentF: Async[IO] = Async[IO] implicit val monadF: Monad[IO] = Monad[IO] override def run[RES](f: IO[RES]): RES = f.unsafeRunSync } diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala index af6545fffca..b224bc3c6ff 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala @@ -87,7 +87,7 @@ class MultiLockTest extends AnyFlatSpec with Matchers { } "FunctionalMultiLock" should "not allow concurrent modifications of same keys" in { - import cats.effect.{Concurrent, IO} + import cats.effect.{Async, IO} import cats.implicits._ implicit val metrics: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO] diff --git a/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala b/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala index c074e53aee9..02b08e486da 100644 --- a/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala +++ b/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala @@ -10,7 +10,7 @@ import scala.util.control.NonFatal package object implicits { // this is for testing purposes, do not use in production code! - implicit val concurrentId: Concurrent[Id] = + implicit val concurrentId: Async[Id] = new Concurrent[Id] { override def start[A](fa: Id[A]): Id[Fiber[Id, A]] = ??? override def racePair[A, B]( diff --git a/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala b/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala index a2e5f8fdc6c..ce819cb82bf 100644 --- a/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala +++ b/shared/src/main/scala/coop/rchain/fs2/Fs2StreamSyntax.scala @@ -1,13 +1,12 @@ package coop.rchain.fs2 -import cats.effect.Concurrent +import cats.effect.{Async, Ref, Temporal} import cats.syntax.all._ import fs2.Stream import fs2.Stream._ import java.util.concurrent.TimeUnit import scala.concurrent.duration.{FiniteDuration, NANOSECONDS} -import cats.effect.{Ref, Temporal} trait Fs2StreamSyntax { implicit final def sharedSyntaxFs2Stream[F[_], A](stream: Stream[F, A]): Fs2StreamOps[F, A] = @@ -27,25 +26,25 @@ class Fs2StreamOps[F[_], A]( /** * Variant of [[Stream.parEvalMap]] with parallelism bound to number of processors. */ - def parEvalMapProcBounded[F2[x] >: F[x]: Concurrent, B](f: A => F2[B]): Stream[F2, B] = + def parEvalMapProcBounded[F2[x] >: F[x]: Async, B](f: A => F2[B]): Stream[F2, B] = stream.parEvalMap[F2, B](availableProcessors)(f) /** * Variant of [[Stream.parEvalMapUnordered]] with parallelism bound to number of processors. */ - def parEvalMapUnorderedProcBounded[F2[x] >: F[x]: Concurrent, B](f: A => F2[B]): Stream[F2, B] = + def parEvalMapUnorderedProcBounded[F2[x] >: F[x]: Async, B](f: A => F2[B]): Stream[F2, B] = stream.parEvalMapUnordered[F2, B](availableProcessors)(f) /** * Variant of [[Stream.evalFilterAsync]] with parallelism bound to number of processors. */ - def evalFilterAsyncProcBounded[F2[x] >: F[x]: Concurrent](f: A => F2[Boolean]): Stream[F2, A] = + def evalFilterAsyncProcBounded[F2[x] >: F[x]: Async](f: A => F2[Boolean]): Stream[F2, A] = stream.evalFilterAsync[F2](availableProcessors)(f) /** * Variant of [[Stream.evalFilterAsync]] without keeping order of results. */ - def evalFilterAsyncUnordered[F2[x] >: F[x]: Concurrent]( + def evalFilterAsyncUnordered[F2[x] >: F[x]: Async]( maxConcurrent: Int )(f: A => F2[Boolean]): Stream[F2, A] = stream @@ -57,7 +56,7 @@ class Fs2StreamOps[F[_], A]( /** * Variant of [[evalFilterAsyncUnordered]] with parallelism bound to number of processors. */ - def evalFilterAsyncUnorderedProcBounded[F2[x] >: F[x]: Concurrent]( + def evalFilterAsyncUnorderedProcBounded[F2[x] >: F[x]: Async]( f: A => F2[Boolean] ): Stream[F2, A] = evalFilterAsyncUnordered[F2](availableProcessors)(f) @@ -79,9 +78,9 @@ class Fs2StreamOps[F[_], A]( def evalOnIdle[B]( action: F[B], timeout: FiniteDuration - )(implicit c: Concurrent[F], t: Temporal[F]): Stream[F, A] = { + )(implicit t: Temporal[F]): Stream[F, A] = { // Current time in nano seconds - val nanoTime = Temporal[F].clock.monotonic(NANOSECONDS) + val nanoTime = Temporal[F].realTime.map(_.toNanos) // Timeout in nano seconds val timeoutNano = timeout.toNanos @@ -132,6 +131,6 @@ class Fs2StreamOfStreamsOps[F[_], A]( /** * Variant of [[Stream.parJoin]] with parallelism bound to number of processors. */ - def parJoinProcBounded(implicit F: Concurrent[F]): Stream[F, A] = + def parJoinProcBounded(implicit F: Async[F]): Stream[F, A] = streams.parJoin(availableProcessors) } diff --git a/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala b/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala index a08cc554b4e..a34b79052e7 100644 --- a/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala +++ b/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala @@ -38,11 +38,11 @@ object MetricsSemaphore { )(implicit ms: Metrics.Source): MetricsSemaphore[F] = new MetricsSemaphore(underlying) - def apply[F[_]: Concurrent: Metrics]( + def apply[F[_]: Async: Metrics]( n: Long )(implicit ms: Metrics.Source): F[MetricsSemaphore[F]] = Semaphore[F](n).map(apply(_)) - def single[F[_]: Concurrent: Metrics](implicit ms: Metrics.Source): F[MetricsSemaphore[F]] = + def single[F[_]: Async: Metrics](implicit ms: Metrics.Source): F[MetricsSemaphore[F]] = apply(1L) } diff --git a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala index f44c6cd0233..6f6d74c953f 100644 --- a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala +++ b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala @@ -1,16 +1,9 @@ package coop.rchain.shared -import cats.effect.IO - -import java.util.concurrent.{Executors, ThreadFactory} import java.util.concurrent.atomic.AtomicLong +import java.util.concurrent.{Executors, ThreadFactory} object RChainScheduler { - implicit val mainEC = scala.concurrent.ExecutionContext.Implicits.global - implicit val csIO: ContextShift[IO] = IO.contextShift(mainEC) - val rholangEC = mainEC - implicit val timer = IO.timer(mainEC) - val ioScheduler = Executors.newCachedThreadPool(new ThreadFactory { private val counter = new AtomicLong(0L) diff --git a/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala b/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala index 5864c06a38d..eb891e9733a 100644 --- a/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala +++ b/shared/src/main/scala/coop/rchain/store/LazyAdHocKeyValueCache.scala @@ -1,7 +1,7 @@ package coop.rchain.store import cats.Applicative -import cats.effect.Concurrent +import cats.effect.Async import cats.syntax.all._ import cats.effect.{Deferred, Ref} @@ -17,7 +17,7 @@ final case class NoOpKeyValueCache[F[_]: Applicative, K, V]() extends KeyValueCa override def toMap: F[Map[K, V]] = Map.empty[K, V].pure[F] } -class LazyAdHocKeyValueCache[F[_]: Concurrent, K, V] private[LazyAdHocKeyValueCache] ( +class LazyAdHocKeyValueCache[F[_]: Async, K, V] private[LazyAdHocKeyValueCache] ( cache: Ref[F, Map[K, Deferred[F, V]]] ) extends KeyValueCache[F, K, V] { @@ -53,7 +53,7 @@ class LazyAdHocKeyValueCache[F[_]: Concurrent, K, V] private[LazyAdHocKeyValueCa * Similar to [[LazyKeyValueCache]] but allows to delay initialization of populate function till get call */ object LazyAdHocKeyValueCache { - def apply[F[_]: Concurrent, K, V]: F[LazyAdHocKeyValueCache[F, K, V]] = + def apply[F[_]: Async, K, V]: F[LazyAdHocKeyValueCache[F, K, V]] = for { cache <- Ref.of[F, Map[K, Deferred[F, V]]](Map.empty[K, Deferred[F, V]]) } yield new LazyAdHocKeyValueCache(cache) diff --git a/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala b/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala index 4c60682a208..0f788a84148 100644 --- a/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala +++ b/shared/src/main/scala/coop/rchain/store/LazyKeyValueCache.scala @@ -1,10 +1,9 @@ package coop.rchain.store -import cats.effect.Concurrent +import cats.effect.{Async, Deferred, Ref, Sync} import cats.syntax.all._ -import cats.effect.{Deferred, Ref} -class LazyKeyValueCache[F[_]: Concurrent, K, V] private[LazyKeyValueCache] ( +class LazyKeyValueCache[F[_]: Async, K, V] private[LazyKeyValueCache] ( cache: Ref[F, Map[K, Deferred[F, V]]], populate: K => F[V] ) { @@ -20,7 +19,7 @@ class LazyKeyValueCache[F[_]: Concurrent, K, V] private[LazyKeyValueCache] ( } } (d, empty) = ret - _ <- Concurrent[F].whenA(empty)( + _ <- Sync[F].whenA(empty)( populate(key) >>= d.complete ) r <- d.get @@ -42,7 +41,7 @@ class LazyKeyValueCache[F[_]: Concurrent, K, V] private[LazyKeyValueCache] ( * Cache that populates value using supplied function only once per value request. */ object LazyKeyValueCache { - def apply[F[_]: Concurrent, K, V]( + def apply[F[_]: Async, K, V]( populateKeyWithValue: K => F[V] ): F[LazyKeyValueCache[F, K, V]] = for { diff --git a/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala b/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala index a2f0c044524..f64308de7a2 100644 --- a/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala +++ b/shared/src/main/scala/coop/rchain/store/LmdbDirStoreManager.scala @@ -1,18 +1,17 @@ package coop.rchain.store -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Deferred, Ref, Sync} import cats.syntax.all._ import coop.rchain.shared.Log import coop.rchain.store.LmdbDirStoreManager.{Db, LmdbEnvConfig} import java.nio.file.Path -import cats.effect.{Deferred, Ref} object LmdbDirStoreManager { // TODO: Return instance as Resource with the call to _shutdown_. // Shutdown can also be removed from the interface and be only // implemented as instance method if applicable. - def apply[F[_]: Concurrent: Log]( + def apply[F[_]: Async: Log]( dirPath: Path, dbInstanceMapping: Map[Db, LmdbEnvConfig] ): F[KeyValueStoreManager[F]] = @@ -40,7 +39,7 @@ object LmdbDirStoreManager { // The idea for this class is to manage multiple of key-value lmdb databases. // For LMDB this allows control which databases are part of the same environment (file). -private final case class LmdbDirStoreManager[F[_]: Concurrent: Log]( +private final case class LmdbDirStoreManager[F[_]: Async: Log]( dirPath: Path, dbMapping: Map[Db, LmdbEnvConfig] ) extends KeyValueStoreManager[F] { diff --git a/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala b/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala index 6f75740c966..a8c769d46c7 100644 --- a/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala +++ b/shared/src/main/scala/coop/rchain/store/LmdbStoreManager.scala @@ -2,17 +2,15 @@ package coop.rchain.store import java.nio.ByteBuffer import java.nio.file.{Files, Path} - -import cats.effect.{Concurrent, Sync} +import cats.effect.{Async, Deferred, Ref, Sync} import cats.syntax.all._ import coop.rchain.shared.{Log, LogSource} import enumeratum.{Enum, EnumEntry} import org.lmdbjava.ByteBufferProxy.PROXY_SAFE import org.lmdbjava.{DbiFlags, Env, EnvFlags} -import cats.effect.{Deferred, Ref} object LmdbStoreManager { - def apply[F[_]: Concurrent: Log](dirPath: Path, maxEnvSize: Long): F[KeyValueStoreManager[F]] = + def apply[F[_]: Async: Log](dirPath: Path, maxEnvSize: Long): F[KeyValueStoreManager[F]] = Deferred[F, Env[ByteBuffer]] map (LmdbStoreManagerImpl(dirPath, maxEnvSize, _)) } @@ -24,7 +22,7 @@ object LmdbStoreManager { * @param envDefer deferred object for LMDB environment in use. * @return LMDB store manager. */ -private final case class LmdbStoreManagerImpl[F[_]: Concurrent: Log]( +private final case class LmdbStoreManagerImpl[F[_]: Async: Log]( dirPath: Path, maxEnvSize: Long, envDefer: Deferred[F, Env[ByteBuffer]] diff --git a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala index df8fe12b6b2..0c28022009a 100644 --- a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala +++ b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.shared -import cats.effect.{Concurrent, IO} +import cats.effect.{Async, IO} import cats.syntax.all._ import coop.rchain.shared.syntax.sharedSyntaxFs2Stream import fs2.Stream @@ -18,13 +18,12 @@ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { /** * Creates a Stream of 2 elements creating String "11", if timeout occurs it will insert zeroes e.g. "101" */ - def test[F[_]: Concurrent: Temporal](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { - st => - val addOne = Stream.eval(st.updateAndGet(_ + "1")) - val pause = Stream.sleep(1.second)(Temporal[F]).drain - val addZero = st.update(_ + "0") + def test[F[_]: Async: Temporal](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { st => + val addOne = Stream.eval(st.updateAndGet(_ + "1")) + val pause = Stream.sleep(1.second)(Temporal[F]).drain + val addZero = st.update(_ + "0") - (addOne ++ pause ++ addOne).evalOnIdle(addZero, timeout).compile.lastOrError + (addOne ++ pause ++ addOne).evalOnIdle(addZero, timeout).compile.lastOrError } // Helper to construct success result From a916eddc95b6a41b60d5ddc6f8f6e1cf3bd01b07 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Fri, 7 Apr 2023 19:30:14 +0400 Subject: [PATCH 14/17] API adjustments, deprecates removal --- .../rchain/casper/BlockExecutionTracker.scala | 2 +- .../rchain/casper/MultiParentCasper.scala | 2 +- .../coop/rchain/casper/api/BlockApiImpl.scala | 2 +- .../rchain/casper/blocks/BlockProcessor.scala | 10 +-- .../rchain/casper/blocks/BlockReceiver.scala | 14 ++-- .../rchain/casper/blocks/BlockRetriever.scala | 8 +- .../casper/blocks/proposer/Proposer.scala | 4 +- .../casper/dag/BlockDagKeyValueStorage.scala | 3 +- .../casper/engine/LfsBlockRequester.scala | 29 +++---- .../engine/LfsTupleSpaceRequester.scala | 23 +++--- .../rchain/casper/engine/NodeLaunch.scala | 10 +-- .../rchain/casper/engine/NodeRunning.scala | 18 ++--- .../rchain/casper/engine/NodeSyncing.scala | 16 ++-- .../rchain/casper/protocol/CommUtil.scala | 2 +- .../protocol/client/DeployRuntime.scala | 9 +-- .../protocol/client/DeployService.scala | 5 +- .../casper/protocol/client/ListenAtName.scala | 9 +-- .../protocol/client/ProposeService.scala | 5 +- .../casper/reporting/ReportingCasper.scala | 4 +- .../casper/rholang/InterpreterUtil.scala | 10 ++- .../casper/rholang/RuntimeManager.scala | 8 +- .../rholang/syntax/RuntimeReplaySyntax.scala | 4 +- .../casper/rholang/syntax/RuntimeSyntax.scala | 2 +- .../coop/rchain/casper/util/BondsParser.scala | 50 ++++++------ .../rchain/casper/util/ConstructDeploy.scala | 15 ++-- .../coop/rchain/casper/util/VaultParser.scala | 28 +++---- .../HashSetCasperSpecification.scala | 1 + .../rchain/casper/addblock/ProposerSpec.scala | 1 + .../MultiParentCasperCommunicationSpec.scala | 8 +- .../batch1/MultiParentCasperMergeSpec.scala | 5 +- .../batch1/MultiParentCasperRholangSpec.scala | 7 +- .../batch1/MultiParentCasperSmokeSpec.scala | 2 +- .../batch2/BlockReceiverEffectsSpec.scala | 30 +++---- .../casper/batch2/LmdbKeyValueStoreSpec.scala | 3 +- .../rchain/casper/batch2/ValidateTest.scala | 8 +- .../engine/RunningHandleHasBlockSpec.scala | 1 + .../rchain/casper/genesis/GenesisTest.scala | 4 +- .../helper/BlockDagStorageFixture.scala | 6 +- .../rchain/casper/helper/BlockGenerator.scala | 6 +- .../rchain/casper/helper/BondingUtil.scala | 4 +- .../coop/rchain/casper/helper/RhoSpec.scala | 1 + .../coop/rchain/casper/helper/TestNode.scala | 20 ++--- .../rchain/casper/helper/TestRhoRuntime.scala | 2 +- .../merging/MergeNumberChannelSpec.scala | 2 +- .../rchain/casper/merging/MergingCases.scala | 15 ++-- .../rchain/casper/rholang/DeployIdTest.scala | 1 + .../casper/rholang/DeployerIdTest.scala | 11 +-- .../casper/rholang/InterpreterUtilTest.scala | 9 +-- .../rchain/casper/rholang/Resources.scala | 4 +- .../casper/rholang/RuntimeManagerTest.scala | 40 +++++----- .../sync/BlockRetrieverRequesAllSpec.scala | 11 +-- .../casper/sync/BlockRetrieverSpec.scala | 4 +- .../rchain/casper/util/GenesisBuilder.scala | 1 + .../util/scalatest/Fs2StreamMatchers.scala | 1 + .../comm/discovery/GrpcKademliaRPC.scala | 5 +- .../coop/rchain/comm/rp/HandleMessages.scala | 10 +-- .../rchain/comm/transport/GrpcTransport.scala | 2 +- .../transport/GrpcTransportReceiver.scala | 16 ++-- .../comm/transport/GrpcTransportServer.scala | 8 +- .../rchain/comm/transport/StreamHandler.scala | 1 - .../comm/transport/StreamObservable.scala | 8 +- .../rchain/comm/discovery/DistanceSpec.scala | 43 +++++----- .../comm/discovery/GrpcKademliaRPCSpec.scala | 1 + .../comm/discovery/KademliaRPCRuntime.scala | 5 +- .../comm/discovery/KademliaRPCSpec.scala | 4 +- .../rchain/comm/discovery/KademliaSpec.scala | 43 +++++----- .../rchain/comm/discovery/PeerTableSpec.scala | 19 ++--- .../rchain/comm/rp/ClearConnectionsSpec.scala | 35 ++++---- .../coop/rchain/comm/rp/ConnectSpec.scala | 4 +- .../coop/rchain/comm/rp/ConnectionsSpec.scala | 11 +-- .../rchain/comm/rp/FindAndConnectSpec.scala | 25 +++--- .../comm/rp/HandleProtocolHandshakeSpec.scala | 6 +- .../coop/rchain/comm/rp/ScalaTestCats.scala | 20 ++--- .../comm/transport/GrpcTransportSpec.scala | 1 + .../transport/PacketStoreRestoreSpec.scala | 4 +- .../comm/transport/StreamHandlerSpec.scala | 1 + .../comm/transport/TransportLayerSpec.scala | 5 +- .../rchain/p2p/EffectsTestInstances.scala | 2 +- .../scala/coop/rchain/graphz/GraphzSpec.scala | 1 + .../models/rholang/sorter/ordering.scala | 3 +- .../main/scala/coop/rchain/node/Main.scala | 3 +- .../scala/coop/rchain/node/api/package.scala | 5 +- .../node/diagnostics/effects/package.scala | 8 +- .../coop/rchain/node/effects/package.scala | 6 +- .../node/instances/ProposerInstance.scala | 8 +- .../node/revvaultexport/StateBalances.scala | 3 +- .../mainnet1/StateBalanceMain.scala | 1 + .../mainnet1/reporting/MergeBalanceMain.scala | 1 + .../reporting/TransactionBalanceMain.scala | 1 + .../reporting/TransactionBalances.scala | 10 +-- .../rchain/node/runtime/NetworkServers.scala | 27 ++++--- .../rchain/node/runtime/NodeCallCtx.scala | 2 +- .../coop/rchain/node/runtime/NodeMain.scala | 12 +-- .../rchain/node/runtime/NodeRuntime.scala | 4 +- .../coop/rchain/node/runtime/Setup.scala | 32 ++++---- .../coop/rchain/node/web/WebApiDocsV1.scala | 4 +- .../coop/rchain/node/web/WebApiRoutes.scala | 4 +- .../coop/rchain/node/web/WebApiRoutesV1.scala | 3 +- .../coop/rchain/node/TransactionAPISpec.scala | 3 +- .../node/mergeablity/ComputeMerge.scala | 2 +- .../node/mergeablity/MergeabilityRules.scala | 2 +- .../TreeHashMapMergeabilitySpec.scala | 2 +- .../rchain/node/perf/HistoryGenKeySpec.scala | 11 +-- .../revvaultexport/RhoTrieTraverserTest.scala | 1 + .../VaultBalanceGetterTest.scala | 2 +- .../rholang/interpreter/RhoRuntime.scala | 6 +- .../rholang/interpreter/RholangCLI.scala | 5 +- .../accounting/CostAccounting.scala | 1 - .../coop/rchain/rholang/InterpreterSpec.scala | 2 +- .../scala/coop/rchain/rholang/PeekSpec.scala | 1 + .../coop/rchain/rholang/StackSafetySpec.scala | 1 + .../rchain/rholang/StoragePrinterSpec.scala | 4 +- .../interpreter/BigIntNormalizerSpec.scala | 2 +- .../CostAccountingReducerTest.scala | 1 + .../interpreter/CryptoChannelsSpec.scala | 1 + .../interpreter/PersistentStoreTester.scala | 1 + .../rholang/interpreter/ReduceSpec.scala | 1 + .../rholang/interpreter/ReplaySpec.scala | 1 + .../rholang/interpreter/RuntimeSpec.scala | 1 + .../interpreter/ShortCircuitBooleanSpec.scala | 1 + .../CostAccountingPropertyTest.scala | 3 +- .../accounting/CostAccountingSpec.scala | 3 +- .../accounting/RholangMethodsCostsSpec.scala | 1 + .../interpreter/accounting/package.scala | 2 +- .../interpreter/matcher/MatchTest.scala | 2 +- .../matcher/MatcherMonadSpec.scala | 2 +- .../storage/ChargingRSpaceTest.scala | 2 +- .../scala/rholang/rosette/CompilerTests.scala | 1 + .../coop/rchain/rspace/bench/BasicBench.scala | 49 ++++++------ .../rspace/bench/EvalBenchStateBase.scala | 28 +++---- .../rchain/rspace/bench/RSpaceBench.scala | 50 ++++++------ .../rspace/bench/ReplayRSpaceBench.scala | 42 +++++----- .../rspace/bench/RhoBenchBaseState.scala | 42 +++++----- .../bench/RhoReplayBenchBaseState.scala | 7 +- .../coop/rchain/rspace/bench/package.scala | 10 +-- .../scala/coop/rchain/rspace/RSpace.scala | 8 +- .../scala/coop/rchain/rspace/RSpaceOps.scala | 2 +- .../coop/rchain/rspace/ReplayRSpace.scala | 4 +- .../coop/rchain/rspace/ReportingRspace.scala | 10 +-- .../rspace/examples/AddressBookExample.scala | 79 +++++++++---------- .../rchain/rspace/merger/StateChange.scala | 2 +- .../rchain/rspace/ExportImportTests.scala | 3 +- .../coop/rchain/rspace/HotStoreSpec.scala | 3 +- .../rchain/rspace/ReplayRSpaceTests.scala | 9 +-- .../rchain/rspace/StorageActionsTests.scala | 2 +- .../rchain/rspace/StorageExamplesTests.scala | 4 +- .../coop/rchain/rspace/StorageTestsBase.scala | 20 ++--- .../rchain/rspace/TestImplicitHelpers.scala | 10 ++- .../rspace/concurrent/MultiLockTest.scala | 1 + .../rspace/concurrent/TwoStepLockTest.scala | 1 + .../rspace/history/HistoryActionTests.scala | 1 + .../HistoryRepositoryGenerativeSpec.scala | 1 + .../history/HistoryRepositorySpec.scala | 1 + .../rchain/rspace/history/RadixTreeSpec.scala | 1 + .../main/scala/coop/rchain/shared/Log.scala | 2 - .../main/scala/coop/rchain/shared/Time.scala | 47 ----------- .../rchain/shared/Fs2ExtensionsSpec.scala | 5 +- .../coop/rchain/shared/StreamTSpec.scala | 36 ++++----- .../coop/rchain/shared/scalatestcontrib.scala | 1 + .../store/InMemoryKeyValueStoreSpec.scala | 1 + 160 files changed, 717 insertions(+), 750 deletions(-) delete mode 100644 shared/src/main/scala/coop/rchain/shared/Time.scala diff --git a/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala b/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala index 97ef71d22cf..586bc381bad 100644 --- a/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala +++ b/casper/src/main/scala/coop/rchain/casper/BlockExecutionTracker.scala @@ -19,7 +19,7 @@ final case class DeployStatusError(status: String) extends DeployStatus object StatefulExecutionTracker { def apply[F[_]: Sync]: F[StatefulExecutionTracker[F]] = for { - ref <- Ref.of(Map.empty[DeployId, DeployStatus]) + ref <- Ref[F].of(Map.empty[DeployId, DeployStatus]) } yield new StatefulExecutionTracker(ref) } diff --git a/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala b/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala index 914fc5bbcb4..1c9647d37c0 100644 --- a/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/MultiParentCasper.scala @@ -169,7 +169,7 @@ object MultiParentCasper { rejectedDeploys = csRejectedDeploys ) - def validate[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validate[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage, shardId: String, minPhloPrice: Long diff --git a/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala b/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala index a771f605e8f..f57dd2deb23 100644 --- a/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala +++ b/casper/src/main/scala/coop/rchain/casper/api/BlockApiImpl.scala @@ -312,7 +312,7 @@ class BlockApiImpl[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log val reverseHeightMap = heightMap.toIndexedSeq.reverse val iterBlockHashes = reverseHeightMap.iterator.map(_._2.toList) Stream - .fromIterator(iterBlockHashes) + .fromIterator(iterBlockHashes, 1) .evalMap(_.traverse(BlockStore[F].getUnsafe)) .evalMap(_.traverse(transform)) } diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala index c0cd39e5989..c102b2e4f3d 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockProcessor.scala @@ -12,7 +12,7 @@ import coop.rchain.metrics.{Metrics, Span} import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import cats.effect.Temporal object BlockProcessor { @@ -22,9 +22,9 @@ object BlockProcessor { * - input block must have all dependencies in the DAG * - blocks created by node itself are not processed here, but in Proposer */ - def apply[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( + def apply[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( inputBlocks: Stream[F, BlockMessage], - validatedQueue: Queue[F, BlockMessage], + validatedQueue: Channel[F, BlockMessage], shardId: String, minPhloPrice: Long ): Stream[F, (BlockMessage, ValidBlockProcessing)] = @@ -34,14 +34,14 @@ object BlockProcessor { result <- validateAndAddToDag(block, shardId, minPhloPrice) // Notify finished block validation - _ <- validatedQueue.enqueue1(block) + _ <- validatedQueue.send(block) // Broadcast block to the peers _ <- CommUtil[F].sendBlockHash(block.blockHash, block.sender) } yield (block, result) } - def validateAndAddToDag[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( + def validateAndAddToDag[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: CommUtil: Log: Metrics: Span]( block: BlockMessage, shardId: String, minPhloPrice: Long diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala index d9ae1dde5ac..3d6123ea5dc 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockReceiver.scala @@ -13,7 +13,7 @@ import coop.rchain.models.BlockHash.BlockHash import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import cats.effect.Ref sealed trait RecvStatus @@ -232,7 +232,7 @@ object BlockReceiver { } // Process incoming blocks - def incomingBlocks(receiverOutputQueue: Queue[F, BlockHash]) = + def incomingBlocks(receiverOutputQueue: Channel[F, BlockHash]) = incomingBlocksStream .evalFilterAsyncUnorderedProcBounded { block => // Filter (ignore) blocks that are not of interest (pass integrity check, incorrect shard or version, ...) @@ -266,7 +266,7 @@ object BlockReceiver { parentsToValidate <- block.justifications.filterA(notValidated[F]) _ <- if (hasAllDeps) { - receiverOutputQueue.enqueue1(block.blockHash) + receiverOutputQueue.send(block.blockHash) } else { requestMissingDependencies(pendingRequests).whenA(pendingRequests.nonEmpty) *> sendToValidate(parentsToValidate).whenA(parentsToValidate.nonEmpty) @@ -282,7 +282,7 @@ object BlockReceiver { } // Process validated blocks - def validatedBlocks(receiverOutputQueue: Queue[F, BlockHash]) = + def validatedBlocks(receiverOutputQueue: Channel[F, BlockHash]) = finishedProcessingStream.parEvalMapUnorderedProcBounded { block => val parents = block.justifications.toSet for { @@ -290,13 +290,13 @@ object BlockReceiver { next <- state.modify(_.finished(block.blockHash, parents)) // Send dependency free blocks to validation - _ <- next.toList.traverse_(receiverOutputQueue.enqueue1) + _ <- next.toList.traverse_(receiverOutputQueue.send) } yield () } // Return output stream, in parallel process incoming and validated blocks - Queue.unbounded[F, BlockHash].map { outQueue => - outQueue.dequeue concurrently incomingBlocks(outQueue) concurrently validatedBlocks(outQueue) + Channel.unbounded[F, BlockHash].map { outQueue => + outQueue.stream concurrently incomingBlocks(outQueue) concurrently validatedBlocks(outQueue) } } diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala b/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala index d4df4472992..b1fe1370a15 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/BlockRetriever.scala @@ -136,7 +136,7 @@ object BlockRetriever { admitHashReason: AdmitHashReason ): F[AdmitHashResult] = for { - now <- Temporal[F].clock.realTime(TimeUnit.MILLISECONDS) + now <- Temporal[F].realTime.map(_.toMillis) result <- RequestedBlocks[F] .modify[AdmitHashResult] { state => val unknownHash = !state.contains(hash) @@ -229,7 +229,7 @@ object BlockRetriever { s"Remain waiting: ${waitingListTail.map(_.endpoint.host).mkString(", ")}." ) _ <- CommUtil[F].requestForBlock(nextPeer, hash) - ts <- Temporal[F].clock.realTime(TimeUnit.MILLISECONDS) + ts <- Temporal[F].realTime.map(_.toMillis) _ <- RequestedBlocks.put( hash, requested.copy( @@ -260,8 +260,8 @@ object BlockRetriever { _ <- state.keySet.toList.traverse(hash => { val requested = state(hash) for { - expired <- Temporal[F].clock - .realTime(TimeUnit.MILLISECONDS) + expired <- Temporal[F].realTime + .map(_.toMillis) .map(_ - requested.timestamp > ageThreshold.toMillis) _ <- Log[F] .debug( diff --git a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala index d31d7676a5d..5de288b5a3e 100644 --- a/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala +++ b/casper/src/main/scala/coop/rchain/casper/blocks/proposer/Proposer.scala @@ -22,7 +22,7 @@ import coop.rchain.models.syntax._ import coop.rchain.sdk.consensus.Stake import coop.rchain.sdk.error.FatalError import coop.rchain.shared.syntax._ -import coop.rchain.shared.{Log, Time} +import coop.rchain.shared.Log sealed abstract class ProposerResult object ProposerEmpty extends ProposerResult @@ -113,7 +113,7 @@ class Proposer[F[_]: Async: Log: Span]( object Proposer { // format: off def apply[F[_] - /* Execution */ : Async: Temporal: Time + /* Execution */ : Async /* Storage */ : BlockStore: BlockDagStorage /* Rholang */ : RuntimeManager /* Comm */ : CommUtil diff --git a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala index 124ea00b9af..84a7360ea4d 100644 --- a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala +++ b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala @@ -248,7 +248,8 @@ object BlockDagKeyValueStorage { // TODO: include only non-finalized block dmsSt <- Ref.of(DagMessageState[BlockHash, Validator]()) fsSt <- Ref.of(Map[Set[BlockHash], FringeData]()) - initMsgMapJob = Stream.fromIterator(heightMap.values.flatten.iterator).evalMap { hash => + i = heightMap.values.flatten.iterator + initMsgMapJob = Stream.fromIterator(i, 1).evalMap { hash => for { ds <- dmsSt.get fs <- fsSt.get diff --git a/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala b/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala index 173534feedb..150e6b17460 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/LfsBlockRequester.scala @@ -9,7 +9,7 @@ import coop.rchain.models.BlockHash.BlockHash import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import scala.collection.immutable.SortedMap import scala.concurrent.duration._ @@ -148,7 +148,7 @@ object LfsBlockRequester { * @param validateBlock Check if received block is valid * @return fs2.Stream processing all blocks */ - def stream[F[_]: Async: Temporal: Log]( + def stream[F[_]: Async: Log]( fringe: FinalizedFringe, incomingBlocks: Stream[F, BlockMessage], blockHeightsBeforeFringe: Int, @@ -166,15 +166,15 @@ object LfsBlockRequester { def createStream( st: Ref[F, ST[BlockHash]], - requestQueue: Queue[F, Boolean], - responseHashQueue: Queue[F, BlockHash] + requestQueue: Channel[F, Boolean], + responseHashQueue: Channel[F, BlockHash] ): Stream[F, ST[BlockHash]] = { def broadcastStreams(ids: Set[BlockHash]): Stream[F, Stream[F, Unit]] = { // Create broadcast requests to peers val broadcastRequests = ids.map(requestForBlock andThen Stream.eval) // Create stream of requests - Stream.fromIterator(broadcastRequests.iterator) + Stream.fromIterator(broadcastRequests.iterator, 1) } def processBlock(block: BlockMessage): F[Unit] = @@ -186,7 +186,7 @@ object LfsBlockRequester { _ <- saveBlock(block).whenA(isValid) // Trigger request queue (without resend of already requested) - _ <- requestQueue.enqueue1(false) + _ <- requestQueue.send(false) } yield () /** @@ -259,8 +259,9 @@ object LfsBlockRequester { existingHashes <- hashes.toList.filterA(containsBlock) // Enqueue hashes of exiting blocks - _ <- responseHashQueue - .enqueue(Stream.emits(existingHashes)) + _ <- Stream + .emits(existingHashes) + .evalMap(responseHashQueue.send) .compile .drain .whenA(existingHashes.nonEmpty) @@ -276,14 +277,14 @@ object LfsBlockRequester { /** * Request stream is pulling new block hashes ready for broadcast requests. */ - val requestStream = requestQueue.dequeueChunk(maxSize = 1).evalTap(requestNext) + val requestStream = requestQueue.stream.evalTap(requestNext) /** * Response stream is handling incoming block messages. Responses can be processed in parallel. */ val responseStream1 = incomingBlocks.parEvalMapProcBounded(processBlock) - val responseStream2 = responseHashQueue.dequeue + val responseStream2 = responseHashQueue.stream .parEvalMapProcBounded { hash => for { block <- getBlockFromStore(hash) @@ -300,7 +301,7 @@ object LfsBlockRequester { */ val timeoutMsg = s"No block responses for $requestTimeout. Resending requests." // Triggers request queue (resend already requested) - val resendRequests = requestQueue.enqueue1(true) <* Log[F].warn(timeoutMsg) + val resendRequests = requestQueue.send(true) <* Log[F].warn(timeoutMsg) /** * Final result! Concurrently pulling requests and handling responses @@ -323,13 +324,13 @@ object LfsBlockRequester { ) // Queue to trigger processing of requests. `True` to resend requests. - requestQueue <- Queue.bounded[F, Boolean](maxSize = 2) + requestQueue <- Channel.bounded[F, Boolean](capacity = 2) // Response queue for existing blocks in the store. - responseHashQueue <- Queue.unbounded[F, BlockHash] + responseHashQueue <- Channel.unbounded[F, BlockHash] // Light the fire! / Starts the first request for block // - `true` if requested blocks should be re-requested - _ <- requestQueue.enqueue1(false) + _ <- requestQueue.send(false) // Create block receiver stream } yield createStream(st, requestQueue, responseHashQueue) diff --git a/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala b/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala index f93808bce16..69e2bdb17bc 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/LfsTupleSpaceRequester.scala @@ -4,13 +4,12 @@ import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol._ import coop.rchain.models.syntax._ -import coop.rchain.casper.util.ProtoUtil import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.state.RSpaceImporter import coop.rchain.shared.ByteVectorOps._ import coop.rchain.shared.syntax._ import coop.rchain.shared.{Log, Stopwatch} -import fs2.concurrent.Queue +import fs2.concurrent.Channel import fs2.{Pure, Stream} import scodec.bits.ByteVector @@ -94,9 +93,9 @@ object LfsTupleSpaceRequester { * @param validateTupleSpaceItems Check if received statet chunk is valid * @return fs2.Stream processing all tuple space state */ - def stream[F[_]: Async: Temporal: Log]( + def stream[F[_]: Async: Log]( fringe: FinalizedFringe, - tupleSpaceMessageQueue: Queue[F, StoreItemsMessage], + tupleSpaceMessageQueue: Channel[F, StoreItemsMessage], requestForStoreItem: (StatePartPath, Int) => F[Unit], requestTimeout: FiniteDuration, stateImporter: RSpaceImporter[F], @@ -112,7 +111,7 @@ object LfsTupleSpaceRequester { def createStream( st: Ref[F, ST[StatePartPath]], - requestQueue: Queue[F, Boolean] + requestQueue: Channel[F, Boolean] ): Stream[F, ST[StatePartPath]] = { def broadcastStreams(ids: Seq[StatePartPath]): Stream[Pure, Stream[F, Unit]] = { @@ -132,7 +131,7 @@ object LfsTupleSpaceRequester { */ val requestStream = for { // Request queue is a trigger when to check the state - resend <- requestQueue.dequeueChunk(maxSize = 1) + resend <- requestQueue.stream // Check if stream is finished (no more requests) isEnd <- Stream.eval(st.get.map(_.isFinished)) @@ -149,7 +148,7 @@ object LfsTupleSpaceRequester { */ val responseStream = for { // Response queue is incoming message source / async callback handler - msg <- tupleSpaceMessageQueue.dequeue + msg <- tupleSpaceMessageQueue.stream StoreItemsMessage(startPath, lastPath, historyItems, dataItems) = msg @@ -158,7 +157,7 @@ object LfsTupleSpaceRequester { // Add chunk paths for requesting and trigger request queue (without resend of already requested) _ <- Stream - .eval(st.update(_.add(Set(lastPath))) >> requestQueue.enqueue1(false)) + .eval(st.update(_.add(Set(lastPath))) >> requestQueue.send(false)) .whenA(isReceived) // Import chunk to RSpace @@ -207,7 +206,7 @@ object LfsTupleSpaceRequester { _ <- st.update(_.done(startPath)) // Trigger request queue again to process finished chunks - _ <- requestQueue.enqueue1(false) + _ <- requestQueue.send(false) } yield () }.whenA(isReceived) } yield () @@ -216,7 +215,7 @@ object LfsTupleSpaceRequester { * Timeout to resend block requests if response is not received */ val timeoutMsg = s"No tuple space state responses for $requestTimeout. Resending requests." - val resendRequests = requestQueue.enqueue1(true) <* Log[F].warn(timeoutMsg) + val resendRequests = requestQueue.send(true) <* Log[F].warn(timeoutMsg) /** * Final result! Concurrently pulling requests and handling responses @@ -238,11 +237,11 @@ object LfsTupleSpaceRequester { st <- Ref.of[F, ST[StatePartPath]](ST(Seq(startRequest))) // Queue to trigger processing of requests. `True` to resend requests. - requestQueue <- Queue.bounded[F, Boolean](maxSize = 2) + requestQueue <- Channel.bounded[F, Boolean](capacity = 2) // Light the fire! / Starts the first request for chunk of state // - `true` if requested chunks should be re-requested - _ <- requestQueue.enqueue1(false) + _ <- requestQueue.send(false) // Create tuple space state receiver stream } yield createStream(st, requestQueue) diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala index 455c73fae7f..9fc98465d0a 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeLaunch.scala @@ -24,7 +24,7 @@ import coop.rchain.rspace.state.RSpaceStateManager import coop.rchain.shared._ import coop.rchain.shared.syntax._ import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import scala.concurrent.duration.DurationInt import cats.effect.{Deferred, Temporal} @@ -35,7 +35,7 @@ object NodeLaunch { // format: off def apply[F[_] - /* Execution */ : Async: Parallel: ContextShift: Time: Temporal + /* Execution */ : Async: Parallel /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -43,7 +43,7 @@ object NodeLaunch { /* Diagnostics */ : Log: Metrics: Span] // format: on ( packets: Stream[F, PeerMessage], - incomingBlocksQueue: Queue[F, BlockMessage], + incomingBlocksQueue: Channel[F, BlockMessage], conf: CasperConf, trimState: Boolean, disableStateExporter: Boolean, @@ -140,7 +140,7 @@ object NodeLaunch { } yield () } - def createGenesisBlockFromConfig[F[_]: Async: ContextShift: RuntimeManager: Log]( + def createGenesisBlockFromConfig[F[_]: Async: RuntimeManager: Log]( validator: ValidatorIdentity, conf: CasperConf ): F[BlockMessage] = @@ -162,7 +162,7 @@ object NodeLaunch { conf.genesisBlockData.systemContractPubKey ) - def createGenesisBlock[F[_]: Async: ContextShift: RuntimeManager: Log]( + def createGenesisBlock[F[_]: Async: RuntimeManager: Log]( validator: ValidatorIdentity, shardId: String, blockNumber: Long, diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala index 5eb9e1a456e..eeffcf41f11 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeRunning.scala @@ -9,7 +9,7 @@ import coop.rchain.blockstorage.BlockStore.BlockStore import coop.rchain.blockstorage.dag.BlockDagStorage import coop.rchain.casper._ import coop.rchain.casper.blocks.{BlockReceiver, BlockRetriever} -import coop.rchain.casper.protocol.{CommUtil, _} +import coop.rchain.casper.protocol._ import coop.rchain.casper.syntax._ import coop.rchain.comm.PeerNode import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfAsk} @@ -19,20 +19,20 @@ import coop.rchain.models.BlockHash.BlockHash import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.state.{RSpaceExporter, RSpaceStateManager} import coop.rchain.shared.syntax._ -import coop.rchain.shared.{Log, Time} -import fs2.concurrent.Queue +import coop.rchain.shared.Log +import fs2.concurrent.Channel object NodeRunning { // format: off def apply[F[_] - /* Execution */ : Async: Time + /* Execution */ : Async /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Storage */ : BlockStore: BlockDagStorage: RSpaceStateManager /* Diagnostics */ : Log: Metrics] // format: on ( - blockProcessingQueue: Queue[F, BlockMessage], + blockProcessingQueue: Channel[F, BlockMessage], validatorId: Option[ValidatorIdentity], disableStateExporter: Boolean ): F[NodeRunning[F]] = Sync[F].delay( @@ -212,13 +212,13 @@ object NodeRunning { // format: off class NodeRunning[F[_] - /* Execution */ : Async: Time + /* Execution */ : Async /* Transport */ : TransportLayer: CommUtil: BlockRetriever /* State */ : RPConfAsk: ConnectionsCell /* Storage */ : BlockStore: BlockDagStorage: RSpaceStateManager /* Diagnostics */ : Log: Metrics] // format: on ( - incomingBlocksQueue: Queue[F, BlockMessage], + incomingBlocksQueue: Channel[F, BlockMessage], validatorId: Option[ValidatorIdentity], disableStateExporter: Boolean ) { @@ -251,7 +251,7 @@ class NodeRunning[F[_] s"Ignoring BlockMessage ${PrettyPrinter.buildString(b, short = true)} " + s"from ${peer.endpoint.host}" ), - incomingBlocksQueue.enqueue1(b) <* Log[F].debug( + incomingBlocksQueue.send(b).void <* Log[F].debug( s"Incoming BlockMessage ${PrettyPrinter.buildString(b, short = true)} " + s"from ${peer.endpoint.host}" ) @@ -271,7 +271,7 @@ class NodeRunning[F[_] val processKnownBlock = for { blockNotValidated <- BlockReceiver.notValidated(blockHash) - _ <- (BlockStore[F].getUnsafe(blockHash) >>= incomingBlocksQueue.enqueue1) + _ <- (BlockStore[F].getUnsafe(blockHash) >>= incomingBlocksQueue.send) .whenA(blockNotValidated) } yield () val logProcess = Log[F].debug( diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala index 17293bbd214..1468f234cb5 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala @@ -19,7 +19,7 @@ import coop.rchain.models.BlockMetadata import coop.rchain.rspace.state.{RSpaceImporter, RSpaceStateManager} import coop.rchain.shared._ import coop.rchain.shared.syntax._ -import fs2.concurrent.Queue +import fs2.concurrent.Channel import scala.collection.immutable.SortedMap import scala.concurrent.duration._ @@ -32,7 +32,7 @@ object NodeSyncing { */ // format: off def apply[F[_] - /* Execution */ : Async: Time: Temporal + /* Execution */ : Async /* Transport */ : TransportLayer: CommUtil /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -44,8 +44,8 @@ object NodeSyncing { trimState: Boolean = true ): F[NodeSyncing[F]] = for { - incomingBlocksQueue <- Queue.bounded[F, BlockMessage](50) - stateResponseQueue <- Queue.bounded[F, StoreItemsMessage](50) + incomingBlocksQueue <- Channel.bounded[F, BlockMessage](50) + stateResponseQueue <- Channel.bounded[F, StoreItemsMessage](50) engine = new NodeSyncing( finished, incomingBlocksQueue, @@ -62,7 +62,7 @@ object NodeSyncing { */ // format: off class NodeSyncing[F[_] - /* Execution */ : Async: Time: Temporal + /* Execution */ : Async /* Transport */ : TransportLayer: CommUtil /* State */ : RPConfAsk: ConnectionsCell /* Rholang */ : RuntimeManager @@ -70,9 +70,9 @@ class NodeSyncing[F[_] /* Diagnostics */ : Log: Metrics: Span] // format: on ( finished: Deferred[F, Unit], - incomingBlocksQueue: Queue[F, BlockMessage], + incomingBlocksQueue: Channel[F, BlockMessage], validatorId: Option[ValidatorIdentity], - tupleSpaceQueue: Queue[F, StoreItemsMessage], + tupleSpaceQueue: Channel[F, StoreItemsMessage], trimState: Boolean = true ) { def handle(peer: PeerNode, msg: CasperMessage): F[Unit] = msg match { @@ -136,7 +136,7 @@ class NodeSyncing[F[_] // Request all blocks for Last Finalized State blockRequestStream <- LfsBlockRequester.stream( fringe, - incomingBlocksQueue.dequeue, + incomingBlocksQueue.stream, MultiParentCasper.deployLifespan, hash => CommUtil[F].broadcastRequestForBlock(hash, 1.some), requestTimeout = 30.seconds, diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala b/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala index dd6f4dd3aae..4d2087856a0 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/CommUtil.scala @@ -50,7 +50,7 @@ object CommUtil { def apply[F[_]](implicit ev: CommUtil[F]): CommUtil[F] = ev - def of[F[_]: Async: Temporal: TransportLayer: RPConfAsk: ConnectionsCell: Log]: CommUtil[F] = + def of[F[_]: Async: TransportLayer: RPConfAsk: ConnectionsCell: Log]: CommUtil[F] = new CommUtil[F] { def sendToPeers(message: Packet, scopeSize: Option[Int]): F[Unit] = diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployRuntime.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployRuntime.scala index 6123a54502a..488d298b16a 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployRuntime.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployRuntime.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.protocol.client import cats.data.EitherT -import cats.effect.Sync +import cats.effect.{Async, Sync} import cats.syntax.all._ import cats.{Functor, Id, Monad} import com.google.protobuf.ByteString @@ -13,7 +13,6 @@ import coop.rchain.crypto.{PrivateKey, PublicKey} import coop.rchain.models.Par import coop.rchain.models.syntax._ import coop.rchain.shared.ThrowableOps._ -import coop.rchain.shared.Time import scala.io.Source import scala.util.Try @@ -40,7 +39,7 @@ object DeployRuntime { def machineVerifiableDag[F[_]: Monad: Sync: DeployService]: F[Unit] = gracefulExit(DeployService[F].machineVerifiableDag(MachineVerifyQuery())) - def listenForDataAtName[F[_]: Functor: Sync: DeployService: Time]( + def listenForDataAtName[F[_]: Async: DeployService]( name: Id[Name] ): F[Unit] = gracefulExit { @@ -50,7 +49,7 @@ object DeployRuntime { }.map(kp("")).value } - def listenForContinuationAtName[F[_]: Functor: Sync: Time: DeployService]( + def listenForContinuationAtName[F[_]: Async: DeployService]( names: List[Name] ): F[Unit] = gracefulExit { @@ -60,7 +59,7 @@ object DeployRuntime { }.map(kp("")).value } - def findDeploy[F[_]: Functor: Sync: Time: DeployService]( + def findDeploy[F[_]: Async: DeployService]( deployId: Array[Byte] ): F[Unit] = gracefulExit( diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala index 15ce62e7fcb..25190f96ed8 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.protocol.client -import cats.effect.{AsyncEffect, Sync} +import cats.effect.std.Dispatcher +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.deploy.v1.{DeployExecStatus, DeployServiceFs2Grpc} @@ -39,7 +40,7 @@ object DeployService { def apply[F[_]](implicit ev: DeployService[F]): DeployService[F] = ev } -class GrpcDeployService[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessageSize: Int) +class GrpcDeployService[F[_]: Async](host: String, port: Int, maxMessageSize: Int) extends DeployService[F] with Closeable { diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/ListenAtName.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/ListenAtName.scala index 3dbb1ba6b7e..3a6ec99f587 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/ListenAtName.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/ListenAtName.scala @@ -1,12 +1,11 @@ package coop.rchain.casper.protocol.client import cats.Id -import cats.effect.Sync +import cats.effect.{Async, Sync, Temporal} import cats.syntax.all._ import coop.rchain.casper.rholang.InterpreterUtil import coop.rchain.models.rholang.RhoType.RhoName import coop.rchain.models.{NormalizerEnv, Par} -import coop.rchain.shared.Time object ListenAtName { sealed trait Name @@ -43,12 +42,12 @@ object ListenAtName { } } - private def applyUntil[A, F[_]: Sync: Time](retrieve: F[A])(breakCond: A => Boolean): F[A] = { + private def applyUntil[A, F[_]: Async](retrieve: F[A])(breakCond: A => Boolean): F[A] = { import scala.concurrent.duration._ def loop: F[A] = for { - _ <- Time[F].sleep(1.second) + _ <- Temporal[F].sleep(1.second) data <- retrieve res <- if (breakCond(data)) data.pure[F] else loop @@ -57,7 +56,7 @@ object ListenAtName { loop } - def listenAtNameUntilChanges[A1, G[_], F[_]: Sync: Time]( + def listenAtNameUntilChanges[A1, G[_], F[_]: Async]( name: G[Name] )(request: G[Par] => F[Seq[A1]])(implicit par: BuildPar[λ[A => F[G[A]]]]): F[Unit] = { val nameF = name.pure[F] diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala index 9d1a67be19a..9a10890d695 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.protocol.client -import cats.effect.{AsyncEffect, Sync} +import cats.effect.{Async, Sync} +import cats.effect.std.Dispatcher import coop.rchain.casper.protocol._ import coop.rchain.casper.protocol.propose.v1._ import coop.rchain.models.either.implicits._ @@ -19,7 +20,7 @@ object ProposeService { def apply[F[_]](implicit ev: ProposeService[F]): ProposeService[F] = ev } -class GrpcProposeService[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessageSize: Int) +class GrpcProposeService[F[_]: Async](host: String, port: Int, maxMessageSize: Int) extends ProposeService[F] with Closeable { diff --git a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala index 856607ed60e..1a31ff40501 100644 --- a/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala +++ b/casper/src/main/scala/coop/rchain/casper/reporting/ReportingCasper.scala @@ -83,7 +83,7 @@ object ReportingCasper { type RhoReportingRspace[F[_]] = ReportingRspace[F, Par, BindPattern, ListParWithRandom, TaggedContinuation] - def rhoReporter[F[_]: Async: ContextShift: Parallel: BlockDagStorage: Log: Metrics: Span]( + def rhoReporter[F[_]: Async: Parallel: BlockDagStorage: Log: Metrics: Span]( rspaceStore: RSpaceStore[F], shardId: String ): ReportingCasper[F] = @@ -169,7 +169,7 @@ object ReportingRuntime { implicit val RuntimeMetricsSource: Source = Metrics.Source(RholangMetricsSource, "reportingRuntime") - def createReportingRSpace[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( + def createReportingRSpace[F[_]: Async: Parallel: Log: Metrics: Span]( store: RSpaceStore[F] ): F[RhoReportingRspace[F]] = { import coop.rchain.rholang.interpreter.storage._ diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala index b2b65b79c54..699795457e1 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala @@ -29,9 +29,11 @@ import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.rholang.interpreter.errors.InterpreterError import coop.rchain.shared.{Log, LogSource} -import retry.{retryingOnFailures, RetryPolicies} +import retry.{retryingOnFailures, RetryPolicies, Sleep} import cats.effect.Temporal +import scala.concurrent.duration.FiniteDuration + object InterpreterUtil { implicit private val logSource: LogSource = LogSource(this.getClass) @@ -48,7 +50,7 @@ object InterpreterUtil { // TODO: most of this function is legacy code, it should be refactored with separation of errors that are // handled (with included data e.g. hash not equal) and fatal errors which should NOT be handled - def validateBlockCheckpoint[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validateBlockCheckpoint[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[(BlockMetadata, BlockProcessing[Boolean])] = for { @@ -130,11 +132,11 @@ object InterpreterUtil { (bmd, result) } - def validateBlockCheckpointLegacy[F[_]: Async: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( + def validateBlockCheckpointLegacy[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Metrics: Span]( block: BlockMessage ): F[BlockProcessing[Boolean]] = validateBlockCheckpoint(block).map(_._2) - private def replayBlock[F[_]: Sync: Temporal: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( + private def replayBlock[F[_]: Async: RuntimeManager: BlockDagStorage: BlockStore: Log: Span]( initialStateHash: StateHash, block: BlockMessage, rand: Blake2b512Random diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala index 4cb326cbcf8..cdcc6fa0d07 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala @@ -71,7 +71,7 @@ trait RuntimeManager[F[_]] { def getMergeableStore: MergeableStore[F] } -final case class RuntimeManagerImpl[F[_]: Async: Metrics: Span: Log: ContextShift: Parallel]( +final case class RuntimeManagerImpl[F[_]: Async: Metrics: Span: Log: Parallel]( space: RhoISpace[F], replaySpace: RhoReplayISpace[F], historyRepo: RhoHistoryRepository[F], @@ -258,7 +258,7 @@ object RuntimeManager { def apply[F[_]](implicit F: RuntimeManager[F]): F.type = F - def apply[F[_]: Async: ContextShift: Parallel: Metrics: Span: Log]( + def apply[F[_]: Async: Parallel: Metrics: Span: Log]( rSpace: RhoISpace[F], replayRSpace: RhoReplayISpace[F], historyRepo: RhoHistoryRepository[F], @@ -277,7 +277,7 @@ object RuntimeManager { ) ) - def apply[F[_]: Async: ContextShift: Parallel: Metrics: Span: Log]( + def apply[F[_]: Async: Parallel: Metrics: Span: Log]( store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, @@ -288,7 +288,7 @@ object RuntimeManager { _._1 ) - def createWithHistory[F[_]: Async: ContextShift: Parallel: Metrics: Span: Log]( + def createWithHistory[F[_]: Async: Parallel: Metrics: Span: Log]( store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala index 25ed91e91ea..e8292986e35 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeReplaySyntax.scala @@ -134,7 +134,7 @@ final class RuntimeReplayOps[F[_]](private val runtime: ReplayRhoRuntime[F]) ext } } } - val refT = Ref.of(Vector[NumberChannelsEndVal]()).liftEitherT[ReplayFailure] + val refT = Ref[F].of(Vector[NumberChannelsEndVal]()).liftEitherT[ReplayFailure] refT.flatMap { mergeable => EitherT @@ -158,7 +158,7 @@ final class RuntimeReplayOps[F[_]](private val runtime: ReplayRhoRuntime[F]) ext span: Span[F], log: Log[F] ): EitherT[F, ReplayFailure, NumberChannelsEndVal] = { - val refT = Ref.of(Set[Par]()).liftEitherT[ReplayFailure] + val refT = Ref[F].of(Set[Par]()).liftEitherT[ReplayFailure] refT flatMap { mergeable => val expectedFailure = processedDeploy.systemDeployError val preChargeF = diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala index 59357880313..984e4f4726c 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/syntax/RuntimeSyntax.scala @@ -195,7 +195,7 @@ final class RuntimeOps[F[_]](private val runtime: RhoRuntime[F]) extends AnyVal ) // Event logs and mergeable channels are accumulated inside local state - Ref.of(EvalCollector()) flatMap { st => + Ref[F].of(EvalCollector()) flatMap { st => // System deploy result of evaluation type R[S <: SystemDeploy] = Either[SystemDeployUserError, S#Result] diff --git a/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala b/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala index 07fae97855f..cb6c6e2dc94 100644 --- a/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala +++ b/casper/src/main/scala/coop/rchain/casper/util/BondsParser.scala @@ -1,16 +1,14 @@ package coop.rchain.casper.util -import cats.effect.Sync +import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import coop.rchain.crypto.PublicKey import coop.rchain.crypto.signatures.Secp256k1 import coop.rchain.shared.{Base16, Log} import coop.rchain.models.syntax._ +import fs2.io.file.{Files, Path} import fs2.{io, text, Pipe, Stream} -import java.nio.file.Path -import cats.effect.Resource - object BondsParser { /** @@ -20,11 +18,11 @@ object BondsParser { * Cats Effect 3 removed ContextShift and Blocker. * - https://typelevel.org/cats-effect/docs/migration-guide#blocker */ - def parse[F[_]: Sync: ContextShift: Log](bondsPath: Path): F[Map[PublicKey, Long]] = { + def parse[F[_]: Async: Log](bondsPath: Path): F[Map[PublicKey, Long]] = { def readLines = - io.file - .readAll[F](bondsPath, blocker, chunkSize = 4096) - .through(text.utf8Decode) + Files[F] + .readAll(bondsPath) + .through(text.utf8.decode) .through(text.lines) .filter(_.trim.nonEmpty) .evalMap { line => @@ -59,31 +57,31 @@ object BondsParser { case ex: Throwable => new Exception(s"FAILED PARSING BONDS FILE: $bondsPath\n$ex") } - Resource.unit[F].use(readLines) + Resource.unit[F].use(_ => readLines) } - def parse[F[_]: Sync: ContextShift: Log]( + def parse[F[_]: Async: Log]( bondsPathStr: String, autogenShardSize: Int ): F[Map[PublicKey, Long]] = { - val bondsPath = Path.of(bondsPathStr) + val bondsPath = Path(bondsPathStr) def readLines = - io.file - .exists(blocker, bondsPath) + Files[F] + .exists(bondsPath) .ifM( Log[F].info(s"Parsing bonds file $bondsPath.") >> parse(bondsPath), Log[F].warn(s"BONDS FILE NOT FOUND: $bondsPath. Creating file with random bonds.") >> - newValidators[F](autogenShardSize, bondsPath.toAbsolutePath) + newValidators[F](autogenShardSize, bondsPath.absolute) ) - Resource.unit[F].use(readLines) + Resource.unit[F].use(_ => readLines) } - private def newValidators[F[_]: Sync: ContextShift: Log]( + private def newValidators[F[_]: Async: Log]( autogenShardSize: Int, bondsFilePath: Path ): F[Map[PublicKey, Long]] = { - val genesisFolder = bondsFilePath.getParent + val genesisFolder = bondsFilePath.parent.get // Generate private/public key pairs val keys = Vector.fill(autogenShardSize)(Secp256k1.newKeyPair) @@ -91,18 +89,18 @@ object BondsParser { val bonds = pubKeys.iterator.zipWithIndex.toMap.mapValues(_.toLong + 1L) def toFile(filePath: Path): Pipe[F, String, Unit] = - _.through(text.utf8Encode).through(io.file.writeAll(filePath, blocker)) + _.through(text.utf8.encode).through(Files[F].writeAll(filePath)) // Write generated `.sk` files with private key as content def writeSkFiles = Stream - .fromIterator(keys.iterator) + .fromIterator(keys.iterator, 1) .flatMap { case (privateKey, publicKey) => val sk = Base16.encode(privateKey.bytes) val pk = Base16.encode(publicKey.bytes) val skFile = genesisFolder.resolve(s"$pk.sk") - toFile(skFile, blocker)(Stream.emit(sk)) + toFile(skFile)(Stream.emit(sk)) } .compile .drain @@ -111,20 +109,18 @@ object BondsParser { def writeBondsFile = { val br = System.lineSeparator() val bondsStream = Stream - .fromIterator(bonds.iterator) + .fromIterator(bonds.iterator, 1) .evalMap { case (publicKey, stake) => val pk = Base16.encode(publicKey.bytes) Log[F].info(s"Bond generated $pk => $stake") *> s"$pk $stake$br".pure } - toFile(bondsFilePath, blocker)(bondsStream).compile.drain + toFile(bondsFilePath)(bondsStream).compile.drain } - // Write .sk files and bonds file - Resource.unit[F].use { blocker => - io.file.createDirectories(blocker, genesisFolder) *> - writeSkFiles(blocker) *> writeBondsFile(blocker) *> bonds.pure[F] - } + Files[F].createDirectories(genesisFolder) *> + writeSkFiles *> writeBondsFile *> bonds.pure[F] + } private def tryWithMsg[F[_]: Sync, A](f: => A)(failMsg: => String) = diff --git a/casper/src/main/scala/coop/rchain/casper/util/ConstructDeploy.scala b/casper/src/main/scala/coop/rchain/casper/util/ConstructDeploy.scala index cdb5a6f3557..e64fe8ba11f 100644 --- a/casper/src/main/scala/coop/rchain/casper/util/ConstructDeploy.scala +++ b/casper/src/main/scala/coop/rchain/casper/util/ConstructDeploy.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.util +import cats.effect.Clock import coop.rchain.models.PCost import cats.syntax.all._ import cats.{Functor, Monad} @@ -7,7 +8,7 @@ import com.google.protobuf.ByteString import coop.rchain.casper.protocol.{DeployData, ProcessedDeploy, ProcessedDeployProto} import coop.rchain.crypto.PrivateKey import coop.rchain.crypto.signatures.{Secp256k1, Signed} -import coop.rchain.shared.{Base16, Time} +import coop.rchain.shared.Base16 object ConstructDeploy { @@ -60,7 +61,7 @@ object ConstructDeploy { shardId = shardId ) - def sourceDeployNowF[F[_]: Time: Functor]( + def sourceDeployNowF[F[_]: Clock: Functor]( source: String, phloLimit: Long = 1000000, phloPrice: Long = 1L, @@ -68,7 +69,7 @@ object ConstructDeploy { vabn: Long = 0, shardId: String = "" ): F[Signed[DeployData]] = - Time[F].nanoTime.map { + Clock[F].realTime.map(_.toNanos).map { sourceDeploy( source, _, @@ -81,25 +82,25 @@ object ConstructDeploy { } // TODO: replace usages with basicSendDeployData - def basicDeployData[F[_]: Monad: Time]( + def basicDeployData[F[_]: Monad: Clock]( id: Int, sec: PrivateKey = defaultSec, shardId: String = "" ): F[Signed[DeployData]] = sourceDeployNowF(source = s"@$id!($id)", sec = sec, shardId = shardId) - def basicSendDeployData[F[_]: Monad: Time]( + def basicSendDeployData[F[_]: Monad: Clock]( id: Int, shardId: String = "" ): F[Signed[DeployData]] = basicDeployData[F](id, shardId = shardId) - def basicReceiveDeployData[F[_]: Monad: Time]( + def basicReceiveDeployData[F[_]: Monad: Clock]( id: Int, shardId: String = "" ): F[Signed[DeployData]] = sourceDeployNowF(source = s"for(_ <- @$id){ Nil }", shardId = shardId) - def basicProcessedDeploy[F[_]: Monad: Time]( + def basicProcessedDeploy[F[_]: Monad: Clock]( id: Int, shardId: String = "" ): F[ProcessedDeploy] = diff --git a/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala b/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala index 65dc4e58523..d982db6999a 100644 --- a/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala +++ b/casper/src/main/scala/coop/rchain/casper/util/VaultParser.scala @@ -1,14 +1,12 @@ package coop.rchain.casper.util -import cats.effect.Sync +import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import coop.rchain.casper.genesis.contracts.Vault import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.shared.Log -import fs2.{io, text} - -import java.nio.file.Path -import cats.effect.Resource +import fs2.text +import fs2.io.file.{Files, Path} object VaultParser { @@ -19,11 +17,11 @@ object VaultParser { * Cats Effect 3 removed ContextShift and Blocker. * - https://typelevel.org/cats-effect/docs/migration-guide#blocker */ - def parse[F[_]: Sync: ContextShift: Log](vaultsPath: Path): F[Seq[Vault]] = { + def parse[F[_]: Async: Log](vaultsPath: Path): F[Seq[Vault]] = { def readLines = - io.file - .readAll[F](vaultsPath, blocker, chunkSize = 4096) - .through(text.utf8Decode) + Files[F] + .readAll(vaultsPath) + .through(text.utf8.decode) .through(text.lines) .filter(_.trim.nonEmpty) .evalMap { line => @@ -62,22 +60,22 @@ object VaultParser { case ex: Throwable => new Exception(s"FAILED PARSING WALLETS FILE: $vaultsPath\n$ex") } - Resource.unit[F].use(readLines) + Resource.unit[F].use(_ => readLines) } - def parse[F[_]: Sync: ContextShift: Log](vaultsPathStr: String): F[Seq[Vault]] = { - val vaultsPath = Path.of(vaultsPathStr) + def parse[F[_]: Async: Log](vaultsPathStr: String): F[Seq[Vault]] = { + val vaultsPath = Path(vaultsPathStr) def readLines = - io.file - .exists(blocker, vaultsPath) + Files[F] + .exists(vaultsPath) .ifM( Log[F].info(s"Parsing wallets file $vaultsPath.") >> parse(vaultsPath), Log[F] .warn(s"WALLETS FILE NOT FOUND: $vaultsPath. No vaults will be put in genesis block.") .as(Seq.empty[Vault]) ) - Resource.unit[F].use(readLines) + Resource.unit[F].use(_ => readLines) } private def tryWithMsg[F[_]: Sync, A](f: => A)(failMsg: => String) = diff --git a/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala b/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala index cba3ac9875b..b0dcc02756f 100644 --- a/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala +++ b/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala @@ -1,5 +1,6 @@ package coop.rchain.casper +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.dag.BlockDagStorage.DeployId diff --git a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala index 909550ced4c..92a37edb44b 100644 --- a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala @@ -18,6 +18,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import coop.rchain.shared.RChainScheduler._ import cats.effect.Deferred +import cats.effect.unsafe.implicits.global class ProposerSpec extends AnyFlatSpec with Matchers with BlockDagStorageFixture { diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala index 54df05a3c8b..22288f270cb 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala @@ -1,5 +1,7 @@ package coop.rchain.casper.batch1 +import cats.effect.IO +import cats.effect.kernel.Async import cats.syntax.all._ import coop.rchain.casper.helper.TestNode import coop.rchain.casper.helper.TestNode._ @@ -25,7 +27,7 @@ class MultiParentCasperCommunicationSpec extends AnyFlatSpec with Matchers with "MultiParentCasper" should "ask peers for blocks it is missing" ignore effectTest { TestNode.networkEff(genesis, networkSize = 3).use { nodes => for { - deploy1 <- ConstructDeploy.sourceDeployNowF( + deploy1 <- ConstructDeploy.sourceDeployNowF[Effect]( "for(_ <- @1){ Nil } | @1!(1)", shardId = genesis.genesisBlock.shardId ) @@ -35,7 +37,7 @@ class MultiParentCasperCommunicationSpec extends AnyFlatSpec with Matchers with _ <- nodes(2).shutoff() //nodes(2) misses this block deploy2 <- ConstructDeploy - .sourceDeployNowF("@2!(2)", shardId = genesis.genesisBlock.shardId) + .sourceDeployNowF[Effect]("@2!(2)", shardId = genesis.genesisBlock.shardId) signedBlock2 <- nodes(0).addBlock(deploy2) _ <- nodes(2).addBlock(signedBlock2) @@ -79,7 +81,7 @@ class MultiParentCasperCommunicationSpec extends AnyFlatSpec with Matchers with // TODO reenable when merging of REV balances is done it should "ask peers for blocks it is missing and add them" ignore effectTest { def makeDeploy(i: Int): Effect[Signed[DeployData]] = - ConstructDeploy.sourceDeployNowF( + ConstructDeploy.sourceDeployNowF[Effect]( Vector("@2!(2)", "@1!(1)")(i), sec = if (i == 0) ConstructDeploy.defaultSec else ConstructDeploy.defaultSec2 ) diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala index 7b449b543c0..c347334489e 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala @@ -31,7 +31,10 @@ class MultiParentCasperMergeSpec extends AnyFlatSpec with Matchers with Inspecto shardId = shardId ) deployData1 <- ConstructDeploy - .sourceDeployNowF("@1!(1) | for(@x <- @1){ @1!(x) }", shardId = shardId) + .sourceDeployNowF[Effect]( + "@1!(1) | for(@x <- @1){ @1!(x) }", + shardId = shardId + ) deployData2 <- ConstructDeploy.basicDeployData[Effect](2, shardId = shardId) deploys = Vector( deployData0, diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala index 1378ff0930e..0b19cc9d2d8 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala @@ -80,13 +80,16 @@ class MultiParentCasperRholangSpec extends AnyFlatSpec with Matchers with Inspec for { registerDeploy <- ConstructDeploy - .sourceDeployNowF(registerSource, shardId = genesis.genesisBlock.shardId) + .sourceDeployNowF[Effect]( + registerSource, + shardId = genesis.genesisBlock.shardId + ) block0 <- node.addBlock(registerDeploy) registryId <- getDataAtPrivateChannel[Effect]( block0, calculateDeployUnforgeableName(block0) ) - callDeploy <- ConstructDeploy.sourceDeployNowF( + callDeploy <- ConstructDeploy.sourceDeployNowF[Effect]( callSource(registryId.head), shardId = genesis.genesisBlock.shardId ) diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala index b76e7a57710..ae515f1b1b3 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala @@ -21,7 +21,7 @@ class MultiParentCasperSmokeSpec extends AnyFlatSpec with Matchers with Inspecto it should "perform the most basic deploy successfully" in effectTest { TestNode.standaloneEff(genesis).use { node => ConstructDeploy - .sourceDeployNowF("new x in { x!(0) }", shardId = genesis.genesisBlock.shardId) >>= (node + .sourceDeployNowF[Effect]("new x in { x!(0) }", shardId = genesis.genesisBlock.shardId) >>= (node .addBlock(_)) } } diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala index 1c09060b23f..2bf61d7f261 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala @@ -14,7 +14,7 @@ import coop.rchain.crypto.signatures.Secp256k1 import coop.rchain.models.BlockHash.BlockHash import coop.rchain.models.syntax._ import coop.rchain.shared.Log -import fs2.concurrent.Queue +import fs2.concurrent.Channel import org.mockito.captor.ArgCaptor import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} @@ -42,7 +42,7 @@ import scala.collection.immutable.SortedMap // case (incomingQueue, _, outStream, bs, br, bds) => // for { // block <- IO.delay(makeBlock()) -// _ <- incomingQueue.enqueue1(block) +// _ <- incomingQueue.send(block) // outList <- outStream.take(1).compile.toList // } yield { // bs.put(Seq((block.blockHash, block))) wasCalled once @@ -59,7 +59,7 @@ import scala.collection.immutable.SortedMap // case (incomingQueue, _, outStream, bs, br, bds) => // for { // block <- IO.delay(makeBlock()) -// _ <- incomingQueue.enqueue1(block) +// _ <- incomingQueue.send(block) // } yield { // bs.put(*) wasNever called // bs.contains(*) wasNever called @@ -73,7 +73,7 @@ import scala.collection.immutable.SortedMap // case (incomingQueue, _, outStream, bs, br, bds) => // for { // block <- IO.delay(makeBlock().copy(blockHash = "abc".unsafeHexToByteString)) -// _ <- incomingQueue.enqueue1(block) +// _ <- incomingQueue.send(block) // } yield { // bs.put(*) wasNever called // bs.contains(*) wasNever called @@ -87,7 +87,7 @@ import scala.collection.immutable.SortedMap // case (incomingQueue, _, outStream, bs, br, bds) => // for { // block <- IO.delay(makeBlock().copy(sig = "abc".unsafeHexToByteString)) -// _ <- incomingQueue.enqueue1(block) +// _ <- incomingQueue.send(block) // } yield { // bs.put(*) wasNever called // bs.contains(*) wasNever called @@ -105,15 +105,15 @@ import scala.collection.immutable.SortedMap // a2 = makeBlock(List(a1.blockHash)) // // // Put the parent and child in the input queue -// _ <- incomingQueue.enqueue1(a2) -// _ <- incomingQueue.enqueue1(a1) +// _ <- incomingQueue.send(a2) +// _ <- incomingQueue.send(a1) // // // Dependencies of the child (its parent) have not yet been resolved, // // so only the parent goes to the output queue, since it has no dependencies // a1InOutQueue <- outStream.take(1).compile.lastOrError // // // A1 is now validated (e.g. in BlockProcessor) -// _ <- validatedQueue.enqueue1(a1) +// _ <- validatedQueue.send(a1) // // // All dependencies of child A2 are resolved, so it also goes to the output queue // a2InOutQueue <- outStream.take(1).compile.lastOrError @@ -166,8 +166,8 @@ import scala.collection.immutable.SortedMap // // private def withEnv[F[_]: Async: Log](shardId: String)( // f: ( -// Queue[F, BlockMessage], -// Queue[F, BlockMessage], +// Channel[F, BlockMessage], +// Channel[F, BlockMessage], // Stream[F, BlockHash], // BlockStore[F], // BlockRetriever[F], @@ -176,10 +176,10 @@ import scala.collection.immutable.SortedMap // ): F[Assertion] = // for { // state <- Ref[F].of(BlockReceiverState[BlockHash]) -// incomingBlockQueue <- Queue.unbounded[F, BlockMessage] -// incomingBlockStream = incomingBlockQueue.dequeue -// validatedBlocksQueue <- Queue.unbounded[F, BlockMessage] -// validatedBlocksStream = validatedBlocksQueue.dequeue +// incomingBlockQueue <- Channel.unbounded[F, BlockMessage] +// incomingBlockStream = incomingBlockQueue.stream +// validatedBlocksQueue <- Channel.unbounded[F, BlockMessage] +// validatedBlocksStream = validatedBlocksQueue.stream // // // Create mock separately for each test // bs = blockStoreMock[F] @@ -193,7 +193,7 @@ import scala.collection.immutable.SortedMap // incomingBlockStream, // validatedBlocksStream, // shardId, -// incomingBlockQueue.enqueue1 +// incomingBlockQueue.send // ) // } // res <- f(incomingBlockQueue, validatedBlocksQueue, blockReceiver, bs, br, bds) diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala index 6e212420689..fcdde639145 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala @@ -1,5 +1,7 @@ package coop.rchain.casper.batch2 +import cats.effect.unsafe.implicits.global + import java.nio.file.Files import cats.effect.{Async, IO} import cats.syntax.all._ @@ -19,7 +21,6 @@ class LmdbKeyValueStoreSpec with Matchers with ScalaCheckDrivenPropertyChecks with BeforeAndAfterAll { - implicit val scheduler = monix.execution.Scheduler.global val tempPath = Files.createTempDirectory(s"lmdb-test-") val tempDir = Directory(Path(tempPath.toFile)) diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala index c207f3b8b13..76c637e3ee4 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.batch2 -import cats.effect.{IO, Sync} +import cats.effect.unsafe.implicits.global +import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore.BlockStore @@ -25,7 +26,6 @@ import coop.rchain.models.blockImplicits._ import coop.rchain.models.syntax._ import coop.rchain.p2p.EffectsTestInstances.LogStub import coop.rchain.rspace.syntax._ -import coop.rchain.shared.Time import coop.rchain.shared.scalatestcontrib._ import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec @@ -58,7 +58,7 @@ class ValidateTest timeEff.reset() } - def createChain[F[_]: Sync: Time: BlockStore: BlockDagStorage]( + def createChain[F[_]: Async: BlockStore: BlockDagStorage]( length: Int, bonds: Map[Validator, Long] = Map.empty ): F[Vector[BlockMessage]] = @@ -71,7 +71,7 @@ class ValidateTest } } yield blocks - def createChainWithRoundRobinValidators[F[_]: Sync: Time: BlockStore: BlockDagStorage]( + def createChainWithRoundRobinValidators[F[_]: Async: BlockStore: BlockDagStorage]( length: Int, validatorLength: Int ): F[Vector[BlockMessage]] = { diff --git a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala index b4fcfba62f5..14454716bee 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala @@ -26,6 +26,7 @@ import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import cats.effect.Ref +import cats.effect.unsafe.implicits.global class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { import coop.rchain.shared.RChainScheduler._ diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala index 843a6bf8f26..e47d1ee1b03 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.genesis import cats.Parallel +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore @@ -238,6 +239,7 @@ object GenesisTest { )( implicit genesisPath: Path, runtimeManager: RuntimeManager[IO], + c: Async[IO], log: LogStub[IO] ): IO[BlockMessage] = for { @@ -274,7 +276,7 @@ object GenesisTest { ) } yield genesisBlock - def withGenResources[F[_]: Async: ContextShift: Parallel]( + def withGenResources[F[_]: Async: Parallel]( body: (RuntimeManager[F], Path, LogStub[F]) => F[Unit] ): F[Unit] = { val storePath = storageLocation diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala index cdc29efdde8..33c3de1e0f9 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.helper +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO, Resource} import cats.syntax.all._ import coop.rchain.blockstorage.BlockStore @@ -11,7 +12,7 @@ import coop.rchain.casper.util.GenesisBuilder.GenesisContext import coop.rchain.metrics.Metrics import coop.rchain.metrics.Metrics.MetricsNOP import coop.rchain.rholang -import coop.rchain.shared.{Log, Time} +import coop.rchain.shared.Log import org.scalatest.{BeforeAndAfter, Suite} import java.nio.file.Path @@ -56,8 +57,7 @@ trait BlockDagStorageFixture extends BeforeAndAfter { self: Suite => object BlockDagStorageTestFixture { - def withStorageF[F[_]: Async: Metrics: Log] - : Resource[F, (BlockStore[F], BlockDagStorage[F])] = { + def withStorageF[F[_]: Async: Metrics: Log]: Resource[F, (BlockStore[F], BlockDagStorage[F])] = { def create(dir: Path) = for { kvm <- Resources.mkTestRNodeStoreManager[F](dir) diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala index 79a11dde77b..15ce2dbbc41 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala @@ -1,7 +1,7 @@ package coop.rchain.casper.helper import cats.Applicative -import cats.effect.{Async, IO, Sync} +import cats.effect.{Async, IO, Sync, Temporal} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.blockstorage.BlockStore @@ -25,7 +25,7 @@ import coop.rchain.models.syntax._ import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.syntax._ -import coop.rchain.shared.{Log, LogSource, Time} +import coop.rchain.shared.{Log, LogSource} object BlockGenerator { private[this] val GenerateBlockMetricsSource = @@ -182,7 +182,7 @@ trait BlockGenerator { sendersLatest.map(_.senderSeq).toList.maximumOption.getOrElse(-1L) } - def createValidatorBlock[F[_]: Sync: Time: BlockStore: BlockDagStorage]( + def createValidatorBlock[F[_]: Async: BlockStore: BlockDagStorage]( justifications: Seq[BlockMessage], validator: Validator, bonds: Map[Validator, Long], diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BondingUtil.scala b/casper/src/test/scala/coop/rchain/casper/helper/BondingUtil.scala index 6804705b2e5..af72e6363da 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BondingUtil.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BondingUtil.scala @@ -1,15 +1,15 @@ package coop.rchain.casper.helper import cats.Functor +import cats.effect.kernel.Clock import cats.syntax.functor._ import coop.rchain.casper.protocol.DeployData import coop.rchain.casper.util.ConstructDeploy import coop.rchain.crypto.PrivateKey import coop.rchain.crypto.signatures.Signed -import coop.rchain.shared.Time object BondingUtil { - def bondingDeploy[F[_]: Functor: Time]( + def bondingDeploy[F[_]: Functor: Clock]( amount: Long, privateKey: PrivateKey, shardId: String = "" diff --git a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala index 5347843d754..74fb9cd2674 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.helper +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.casper.genesis.Genesis diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala index 96754ef9c7b..0330efb715d 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala @@ -34,14 +34,14 @@ import coop.rchain.p2p.EffectsTestInstances._ import coop.rchain.rholang.interpreter.RhoRuntime.RhoHistoryRepository import coop.rchain.rspace.syntax._ import coop.rchain.shared._ -import fs2.concurrent.Queue +import fs2.concurrent.Channel import monix.execution.Scheduler import java.nio.file.Path import scala.concurrent.duration.{FiniteDuration, MILLISECONDS} import cats.effect.{Deferred, Ref, Temporal} -case class TestNode[F[_]: Async: Temporal]( +case class TestNode[F[_]: Async]( name: String, local: PeerNode, tle: TransportLayerTestImpl[F], @@ -66,11 +66,10 @@ case class TestNode[F[_]: Async: Temporal]( rhoHistoryRepositoryEffect: RhoHistoryRepository[F], logEffect: LogStub[F], requestedBlocksEffect: RequestedBlocks[F], - timeEffect: Time[F], transportLayerEffect: TransportLayerTestImpl[F], connectionsCellEffect: Ref[F, Connections], rpConfAskEffect: RPConfAsk[F], - routingMessageQueue: Queue[F, RoutingMessage], + routingMessageQueue: Channel[F, RoutingMessage], shardName: String, minPhloPrice: Long ) { @@ -90,7 +89,6 @@ case class TestNode[F[_]: Async: Temporal]( implicit val sp: Span[F] = spanEffect implicit val runtimeManager: RuntimeManager[F] = runtimeManagerEffect implicit val rhoHistoryRepository: RhoHistoryRepository[F] = rhoHistoryRepositoryEffect - implicit val t: Time[F] = timeEffect implicit val transportLayerEff: TransportLayerTestImpl[F] = transportLayerEffect implicit val connectionsCell: Ref[F, Connections] = connectionsCellEffect implicit val rp: RPConfAsk[F] = rpConfAskEffect @@ -283,10 +281,6 @@ case class TestNode[F[_]: Async: Temporal]( object TestNode { type Effect[A] = IO[A] - import scala.concurrent.ExecutionContext.Implicits.global - implicit val cs: ContextShift[IO] = IO.contextShift(global) - implicit val t: Temporal[IO] = IO.timer(global) - def standaloneEff(genesis: GenesisContext): Resource[Effect, TestNode[Effect]] = networkEff( genesis, @@ -315,7 +309,7 @@ object TestNode { ) } - private def networkF[F[_]: Async: Parallel: ContextShift: Temporal: TestNetwork]( + private def networkF[F[_]: Async: Parallel: TestNetwork]( sks: IndexedSeq[PrivateKey], genesis: BlockMessage, storageMatrixPath: Path, @@ -373,7 +367,7 @@ object TestNode { } } - private def createNode[F[_]: Async: Temporal: Parallel: ContextShift: TestNetwork]( + private def createNode[F[_]: Async: Parallel: TestNetwork]( name: String, currentPeerNode: PeerNode, genesis: BlockMessage, @@ -420,7 +414,6 @@ object TestNode { implicit val rm = runtimeManager implicit val rhr = runtimeManager.getHistoryRepo implicit val logEff = new LogStub[F](Log.log[F]) - implicit val timeEff = logicalTime implicit val connectionsCell = Ref.unsafe[F, Connections](Connect.Connections.empty) implicit val transportLayerEff = tle implicit val rpConfAsk = createRPConfAsk[F](currentPeerNode) @@ -465,7 +458,7 @@ object TestNode { } // Remove TransportLayer handling in TestNode (too low level for these tests) - routingMessageQueue <- Queue.unbounded[F, RoutingMessage] + routingMessageQueue <- Channel.unbounded[F, RoutingMessage] node = new TestNode[F]( name, @@ -489,7 +482,6 @@ object TestNode { rhoHistoryRepositoryEffect = rhr, spanEffect = spanEff, logEffect = logEff, - timeEffect = timeEff, connectionsCellEffect = connectionsCell, transportLayerEffect = transportLayerEff, rpConfAskEffect = rpConfAsk, diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala index 5d5812b6146..5222372af1b 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestRhoRuntime.scala @@ -8,7 +8,7 @@ import coop.rchain.rholang.interpreter.{ReplayRhoRuntime, RhoRuntime} import coop.rchain.shared.Log object TestRhoRuntime { - def rhoRuntimeEff[F[_]: Log: Metrics: Span: Async: Parallel: ContextShift]( + def rhoRuntimeEff[F[_]: Log: Metrics: Span: Async: Parallel]( initRegistry: Boolean = true ): Resource[F, (RhoRuntime[F], ReplayRhoRuntime[F], RhoHistoryRepository[F])] = mkRuntimes[F]("hash-set-casper-test-genesis-", initRegistry = initRegistry) diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala index 187bc15720c..6cd27843b6b 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala @@ -92,7 +92,7 @@ class MergeNumberChannelSpec extends AnyFlatSpec { RhoName(baseRhoSeed.next()) } - def testCase[F[_]: Async: ContextShift: Parallel: Span: Log]( + def testCase[F[_]: Async: Parallel: Span: Log]( baseTerms: Seq[String], leftTerms: Seq[DeployTestInfo], rightTerms: Seq[DeployTestInfo], diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala index eaa0fa12234..bb1e3552cfb 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala @@ -13,17 +13,16 @@ import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.rspace.merger.{EventLogIndex, EventLogMergingLogic} import coop.rchain.sdk.dag.merging.ConflictResolutionLogic import coop.rchain.shared.scalatestcontrib.effectTest -import coop.rchain.shared.{Log, Time} +import coop.rchain.shared.Log import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class MergingCases extends AnyFlatSpec with Matchers { - val genesisContext = GenesisBuilder.buildGenesis(validatorsNum = 5) - val genesis = genesisContext.genesisBlock - implicit val logEff = Log.log[IO] - implicit val timeF: Time[IO] = new LogicalTime[IO] - import coop.rchain.shared.RChainScheduler._ + val genesisContext = GenesisBuilder.buildGenesis(validatorsNum = 5) + val genesis = genesisContext.genesisBlock + implicit val logEff = Log.log[IO] + implicit val ioP = IO.parallelForIO val runtimeManagerResource: Resource[IO, RuntimeManager[IO]] = for { dir <- Resources.copyStorage[IO](genesisContext.storageDirectory) @@ -50,8 +49,8 @@ class MergingCases extends AnyFlatSpec with Matchers { val blockNum = 1L for { - d1 <- ConstructDeploy.sourceDeployNowF("Nil", sec = payer1Key) - d2 <- ConstructDeploy.sourceDeployNowF("Nil", sec = payer2Key) + d1 <- ConstructDeploy.sourceDeployNowF[IO]("Nil", sec = payer1Key) + d2 <- ConstructDeploy.sourceDeployNowF[IO]("Nil", sec = payer2Key) userDeploys = Seq(d1, d2) blockData = BlockData( blockNum, diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala index ba0fb818cfd..6880b48c153 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.rholang +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Resource} import cats.syntax.all._ import cats.implicits.catsSyntaxApplicativeId diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala index c388db9fcb6..2f1e2840088 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala @@ -36,7 +36,7 @@ class DeployerIdTest extends AnyFlatSpec with Matchers { val pk = ByteString.copyFrom(Secp256k1.toPublic(sk).bytes) runtimeManager.use { mgr => for { - deploy <- ConstructDeploy.sourceDeployNowF( + deploy <- ConstructDeploy.sourceDeployNowF[IO]( s"""new return, auth(`rho:rchain:deployerId`) in { return!(*auth) }""", sec = sk ) @@ -81,14 +81,15 @@ class DeployerIdTest extends AnyFlatSpec with Matchers { TestNode.standaloneEff(genesisContext).use { node => for { - contract <- ConstructDeploy.sourceDeployNowF( + contract <- ConstructDeploy.sourceDeployNowF[IO]( checkDeployerDefinition, sec = deployer, shardId = genesisContext.genesisBlock.shardId ) - block <- node.addBlock(contract) - stateHash = block.postStateHash - checkAuthDeploy <- ConstructDeploy.sourceDeployNowF(checkDeployerCall, sec = contractUser) + block <- node.addBlock(contract) + stateHash = block.postStateHash + checkAuthDeploy <- ConstructDeploy + .sourceDeployNowF[IO](checkDeployerCall, sec = contractUser) result <- node.runtimeManager.spawnRuntime >>= { _.captureResults(stateHash, checkAuthDeploy) } diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala index a8cbbb01a91..0153ef65bc5 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala @@ -24,7 +24,7 @@ import coop.rchain.models.syntax._ import coop.rchain.p2p.EffectsTestInstances.LogStub import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.scalatestcontrib._ -import coop.rchain.shared.{Log, LogSource, Time} +import coop.rchain.shared.{Log, LogSource} import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -42,7 +42,6 @@ class InterpreterUtilTest implicit val span: Span[IO] = new NoopSpan[IO] implicit val logSource: LogSource = LogSource(this.getClass) import coop.rchain.shared.RChainScheduler._ - implicit private val timeEff = Time.fromTimer[IO] val genesisContext = GenesisBuilder.buildGenesis() val genesis = genesisContext.genesisBlock @@ -310,9 +309,9 @@ class InterpreterUtilTest //deploy each Rholang program separately and record its cost for { - deploy1 <- ConstructDeploy.sourceDeployNowF("@1!(Nil)") - deploy2 <- ConstructDeploy.sourceDeployNowF("@3!([1,2,3,4])") - deploy3 <- ConstructDeploy.sourceDeployNowF("for(@x <- @0) { @4!(x.toByteArray()) }") + deploy1 <- ConstructDeploy.sourceDeployNowF[IO]("@1!(Nil)") + deploy2 <- ConstructDeploy.sourceDeployNowF[IO]("@3!([1,2,3,4])") + deploy3 <- ConstructDeploy.sourceDeployNowF[IO]("for(@x <- @0) { @4!(x.toByteArray()) }") cost1 <- computeDeployCosts(deploy1) cost2 <- computeDeployCosts(deploy2) diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala index 15b02f844b1..4c12e1e5304 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala @@ -36,7 +36,7 @@ object Resources { dbMappings >>= (xs => LmdbDirStoreManager[F](dirPath, xs.toMap)) } - def mkRuntimeManager[F[_]: Async: Parallel: ContextShift: Log]( + def mkRuntimeManager[F[_]: Async: Parallel: Log]( prefix: String, mergeableTagName: Par ): Resource[F, RuntimeManager[F]] = @@ -46,7 +46,7 @@ object Resources { // TODO: This is confusing to create another instances for Log, Metrics and Span. // Investigate if it can be removed or define it as parameters. Similar for [[mkRuntimeManagerWithHistoryAt]]. - def mkRuntimeManagerAt[F[_]: Async: Parallel: ContextShift]( + def mkRuntimeManagerAt[F[_]: Async: Parallel]( kvm: KeyValueStoreManager[F], mergeableTagName: Par ): F[RuntimeManager[F]] = { diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala index df13be060f2..b86a1da2c22 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.rholang import cats.data.EitherT +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Resource, Sync} import cats.syntax.all._ import cats.{Applicative, Functor, Id} @@ -29,7 +30,7 @@ import coop.rchain.rholang.interpreter.errors.BugFoundError import coop.rchain.rholang.interpreter.{accounting, ParBuilderUtil, ReplayRhoRuntime} import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.shared.scalatestcontrib.effectTest -import coop.rchain.shared.{Base16, Log, Time} +import coop.rchain.shared.{Base16, Log} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -52,12 +53,9 @@ object SystemDeployReplayResult { } class RuntimeManagerTest extends AnyFlatSpec with Matchers { - - implicit val timeF: Time[IO] = new LogicalTime[IO] implicit val log: Log[IO] = Log.log[IO] implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() - import coop.rchain.shared.RChainScheduler._ val genesisContext = GenesisBuilder.buildGenesis() val genesis = genesisContext.genesisBlock @@ -133,7 +131,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { # } # } #""".stripMargin('#') - ConstructDeploy.sourceDeployNowF(source = source, phloLimit = 100000) >>= { deploy => + ConstructDeploy.sourceDeployNowF[IO](source = source, phloLimit = 100000) >>= { deploy => computeState(runtimeManager, deploy, genPostState) >>= { case (playStateHash1, processedDeploy) => replayComputeState(runtimeManager)(genPostState, processedDeploy) map { @@ -303,7 +301,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { "computeState" should "capture rholang errors" in effectTest { val badRholang = """ for(@x <- @"x" & @y <- @"y"){ @"xy"!(x + y) } | @"x"!(1) | @"y"!("hi") """ for { - deploy <- ConstructDeploy.sourceDeployNowF(badRholang) + deploy <- ConstructDeploy.sourceDeployNowF[IO](badRholang) result <- runtimeManagerResource.use( computeState(_, deploy, genesis.postStateHash) ) @@ -321,12 +319,12 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { val s1 = "@2!(2)" val s2 = "for(@a <- @1){ @123!(5 * a) }" - val deploys0F = Vector(s0, s1, s2).traverse(ConstructDeploy.sourceDeployNowF(_)) + val deploys0F = Vector(s0, s1, s2).traverse(ConstructDeploy.sourceDeployNowF[IO](_)) val s3 = "@1!(1)" val s4 = "for(@a <- @2){ @456!(5 * a) }" - val deploys1F = Vector(s3, s4).traverse(ConstructDeploy.sourceDeployNowF(_)) + val deploys1F = Vector(s3, s4).traverse(ConstructDeploy.sourceDeployNowF[IO](_)) runtimeManagerResource.use { runtimeManager => for { @@ -398,7 +396,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { it should "capture rholang parsing errors and charge for parsing" in effectTest { val badRholang = """ for(@x <- @"x" & @y <- @"y"){ @"xy"!(x + y) | @"x"!(1) | @"y"!("hi") """ for { - deploy <- ConstructDeploy.sourceDeployNowF(badRholang) + deploy <- ConstructDeploy.sourceDeployNowF[IO](badRholang) result <- runtimeManagerResource.use( computeState(_, deploy, genesis.postStateHash) ) @@ -416,7 +414,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { implicit val rand: Blake2b512Random = Blake2b512Random(Array.empty[Byte]) val initialPhlo = Cost.UNSAFE_MAX for { - deploy <- ConstructDeploy.sourceDeployNowF(correctRholang) + deploy <- ConstructDeploy.sourceDeployNowF[IO](correctRholang) runtime <- runtimeManager.spawnRuntime _ <- runtime.cost.set(initialPhlo) @@ -440,7 +438,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { runtimeManagerResource.use { mgr => for { - deploy0 <- ConstructDeploy.sourceDeployNowF( + deploy0 <- ConstructDeploy.sourceDeployNowF[IO]( s""" |new rl(`rho:registry:lookup`), NonNegativeNumberCh in { | rl!(`rho:lang:nonNegativeNumber`, *NonNegativeNumberCh) | @@ -451,7 +449,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { ) result0 <- computeState(mgr, deploy0, genesis.postStateHash) hash = result0._1 - deploy1 <- ConstructDeploy.sourceDeployNowF( + deploy1 <- ConstructDeploy.sourceDeployNowF[IO]( s"""new return in { for(nn <- @"nn"){ nn!("value", *return) } } """ ) result1 <- mgr.spawnRuntime >>= { _.captureResults(hash, deploy1) } @@ -518,16 +516,14 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { } "emptyStateHash" should "not remember previous hot store state" in effectTest { - implicit val timeEff: LogicalTime[Id] = new LogicalTime[Id] - - val term = ConstructDeploy.basicDeployData[Id](0) + val term = ConstructDeploy.basicDeployData[IO](0) def run: IO[StateHash] = runtimeManagerResource .use { m => for { hash <- RuntimeManager.emptyStateHashFixed.pure[IO] - afterHash <- computeState(m, term, genesis.postStateHash) + afterHash <- computeState(m, term.unsafeRunSync(), genesis.postStateHash) .map(_ => hash) } yield afterHash } @@ -542,7 +538,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { "computeState" should "be replayed by replayComputeState" in effectTest { runtimeManagerResource.use { runtimeManager => for { - deploy <- ConstructDeploy.sourceDeployNowF( + deploy <- ConstructDeploy.sourceDeployNowF[IO]( """ # new deployerId(`rho:rchain:deployerId`), # rl(`rho:registry:lookup`), @@ -607,8 +603,8 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { runtimeManagerResource.use { mgr => for { - deploy0 <- ConstructDeploy.sourceDeployNowF(""" for(@x <- @"w") { @"z"!("Got x") } """) - deploy1 <- ConstructDeploy.sourceDeployNowF( + deploy0 <- ConstructDeploy.sourceDeployNowF[IO](""" for(@x <- @"w") { @"z"!("Got x") } """) + deploy1 <- ConstructDeploy.sourceDeployNowF[IO]( """ for(@x <- @"x" & @y <- @"y"){ @"xy"!(x + y) | @"x"!(1) | @"y"!(10) } """ ) genPostState = genesis.postStateHash @@ -742,7 +738,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { # d9!(2) #} #""".stripMargin('#') - ConstructDeploy.sourceDeployNowF(source = source, phloLimit = Int.MaxValue - 2) >>= { + ConstructDeploy.sourceDeployNowF[IO](source = source, phloLimit = Int.MaxValue - 2) >>= { deploy => computeState(runtimeManager, deploy, genPostState) >>= { case (playStateHash1, processedDeploy) => @@ -759,7 +755,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { private def invalidReplay(source: String): IO[Either[ReplayFailure, StateHash]] = runtimeManagerResource.use { runtimeManager => for { - deploy <- ConstructDeploy.sourceDeployNowF(source, phloLimit = 10000) + deploy <- ConstructDeploy.sourceDeployNowF[IO](source, phloLimit = 10000) genPostState = genesis.postStateHash blockData = BlockData( genesisBlockNum, @@ -830,7 +826,7 @@ class RuntimeManagerTest extends AnyFlatSpec with Matchers { val genPostState = genesis.postStateHash for { - deploy <- ConstructDeploy.sourceDeployNowF(term) + deploy <- ConstructDeploy.sourceDeployNowF[IO](term) result <- runtimeManagerResource.use { rm => val blockData = BlockData( 1L, diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala index 6eff7edb39a..f596d3604b1 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.sync -import cats.effect.IO +import cats.effect.{Clock, IO, Ref} import com.google.protobuf.ByteString import coop.rchain.casper.blocks.BlockRetriever import coop.rchain.casper.blocks.BlockRetriever.{RequestState, RequestedBlocks} @@ -20,9 +20,8 @@ import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers -import java.util.concurrent.TimeUnit import scala.concurrent.duration._ -import cats.effect.Ref +import cats.effect.unsafe.implicits.global class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { @@ -74,7 +73,7 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi describe("if block request is still within a timeout") { it("should keep the request not touch") { val requested = - RequestState(timestamp = timer.clock.realTime(TimeUnit.MILLISECONDS).unsafeRunSync) + RequestState(timestamp = Clock[IO].realTime.map(_.toMillis).unsafeRunSync) currentRequests.set(Map(hash -> requested)).unsafeRunSync // when blockRetriever.requestAll(timeout).unsafeRunSync @@ -120,9 +119,7 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi } it("timestamp is reset") { val waitingList = List(peerNode("waiting1"), peerNode("waiting2")) - val initTime = timer.clock - .realTime(TimeUnit.MILLISECONDS) - .unsafeRunSync + val initTime = Clock[IO].realTime.map(_.toMillis).unsafeRunSync val requested = RequestState( timestamp = initTime - timeout.toMillis - 1, peers = Set(peerNode("peer")), diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala index 8a0948754cd..1b77d00911b 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala @@ -11,11 +11,12 @@ import coop.rchain.comm.{Endpoint, NodeIdentifier, PeerNode} import coop.rchain.metrics.Metrics import coop.rchain.models.BlockHash.BlockHash import coop.rchain.p2p.EffectsTestInstances.{createRPConfAsk, LogStub, TransportLayerStub} -import coop.rchain.shared.{Log, Time} +import coop.rchain.shared.Log import org.scalatest.BeforeAndAfterEach import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import cats.effect.Ref +import cats.effect.unsafe.implicits.global class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { @@ -39,7 +40,6 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher implicit val transportLayer = new TransportLayerStub[IO] implicit val rpConf = createRPConfAsk[IO](local) import coop.rchain.shared.RChainScheduler._ - implicit val time = Time.fromTimer[IO] implicit val commUtil = CommUtil.of[IO] implicit val blockRetriever = BlockRetriever.of[IO] diff --git a/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala b/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala index 47c324b29d8..9badf203092 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala @@ -24,6 +24,7 @@ import coop.rchain.shared.syntax._ import java.nio.file.{Files, Path} import scala.collection.compat.immutable.LazyList import scala.collection.mutable +import cats.effect.unsafe.implicits.global object GenesisBuilder { diff --git a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala index bfe083b379a..f6e87c56ce1 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala @@ -1,6 +1,7 @@ package coop.rchain.casper.util.scalatest import cats.effect.IO +import cats.effect.unsafe.implicits.global import fs2.Stream import org.scalatest.matchers.{MatchResult, Matcher} diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala index 3034e98d7c8..e0cf03c171e 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala @@ -1,6 +1,7 @@ package coop.rchain.comm.discovery -import cats.effect.{AsyncEffect, Sync} +import cats.effect.std.Dispatcher +import cats.effect.{Async, Sync} import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.catscontrib.ski._ @@ -14,7 +15,7 @@ import io.grpc.netty._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -class GrpcKademliaRPC[F[_]: Sync: AsyncEffect: RPConfAsk: Metrics]( +class GrpcKademliaRPC[F[_]: Async: RPConfAsk: Metrics]( networkId: String, timeout: FiniteDuration, grpcEC: ExecutionContext diff --git a/comm/src/main/scala/coop/rchain/comm/rp/HandleMessages.scala b/comm/src/main/scala/coop/rchain/comm/rp/HandleMessages.scala index 4a0c0a4a3bd..7f71d9b808c 100644 --- a/comm/src/main/scala/coop/rchain/comm/rp/HandleMessages.scala +++ b/comm/src/main/scala/coop/rchain/comm/rp/HandleMessages.scala @@ -12,7 +12,7 @@ import coop.rchain.comm.transport.CommunicationResponse._ import coop.rchain.comm.transport._ import coop.rchain.metrics.Metrics import coop.rchain.shared._ -import fs2.concurrent.Queue +import fs2.concurrent.Channel import java.net.InetAddress import scala.Function.const @@ -26,14 +26,14 @@ object HandleMessages { def handle[F[_]: Sync: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics]( protocol: Protocol, - routingMessageQueue: Queue[F, RoutingMessage] + routingMessageQueue: Channel[F, RoutingMessage] ): F[CommunicationResponse] = handle_[F](protocol, ProtocolHelper.sender(protocol), routingMessageQueue) private def handle_[F[_]: Sync: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics]( proto: Protocol, sender: PeerNode, - routingMessageQueue: Queue[F, RoutingMessage] + routingMessageQueue: Channel[F, RoutingMessage] ): F[CommunicationResponse] = proto.message match { case Protocol.Message.Heartbeat(heartbeat) => handleHeartbeat[F](sender, heartbeat) @@ -61,9 +61,9 @@ object HandleMessages { def handlePacket[F[_]: Functor]( remote: PeerNode, packet: Packet, - routingMessageQueue: Queue[F, RoutingMessage] + routingMessageQueue: Channel[F, RoutingMessage] ): F[CommunicationResponse] = - routingMessageQueue.enqueue1(RoutingMessage(remote, packet)).as(handledWithoutMessage) + routingMessageQueue.send(RoutingMessage(remote, packet)).as(handledWithoutMessage) def handleProtocolHandshakeResponse[F[_]: Monad: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics]( peer: PeerNode diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala index 7ce4a84e25b..8aa11a72eff 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransport.scala @@ -106,7 +106,7 @@ object GrpcTransport { packetChunkSize: Int ): F[CommErr[Unit]] = { val chunkIt = Stream.eval(Chunker.chunkIt[F](networkId, blob, packetChunkSize)).flatMap { i => - Stream.fromIterator(i) + Stream.fromIterator(i, 1) } transport.stream(chunkIt, new Metadata).attempt.map(processResponse(peer, _)) } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala index 2528627528a..bf54308c4b8 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala @@ -1,6 +1,7 @@ package coop.rchain.comm.transport -import cats.effect.{Async, ConcurrentEffect, Resource, Sync} +import cats.effect.std.Dispatcher +import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import cats.effect.syntax.all._ import coop.rchain.comm.protocol.routing._ @@ -12,7 +13,7 @@ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import fs2.Stream import io.grpc.{Metadata, Server} -import fs2.concurrent.Queue +import fs2.concurrent.Channel import io.grpc.netty.NettyServerBuilder import io.netty.handler.ssl.SslContext import io.netty.internal.tcnative.AsyncTask @@ -30,7 +31,7 @@ object GrpcTransportReceiver { type MessageBuffers[F[_]] = (Send => F[Boolean], StreamMessage => F[Boolean], Stream[F, Unit]) type MessageHandlers[F[_]] = (Send => F[Unit], StreamMessage => F[Unit]) - def create[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Temporal]( + def create[F[_]: Async: RPConfAsk: Log: Metrics]( networkId: String, port: Int, serverSslContext: SslContext, @@ -54,12 +55,11 @@ object GrpcTransportReceiver { private def getBuffers(peer: PeerNode): F[MessageBuffers[F]] = { def createBuffers(clear: F[Unit]): F[MessageBuffers[F]] = for { - tellBuffer <- Queue.bounded[F, Send](64) - blobBuffer <- Queue.bounded[F, StreamMessage](8) - stream = tellBuffer - .dequeueChunk(1) + tellBuffer <- Channel.bounded[F, Send](64) + blobBuffer <- Channel.bounded[F, StreamMessage](8) + stream = tellBuffer.stream .parEvalMapUnordered(parallelism)(messageHandlers._1(_)) concurrently - blobBuffer.dequeueChunk(1).parEvalMapUnordered(parallelism)(messageHandlers._2(_)) + blobBuffer.stream.parEvalMapUnordered(parallelism)(messageHandlers._2(_)) // inbound queue lives for 10 minutes TODO synchronize with Kademlia table cleanup s = (Stream.fixedDelay(10.minutes) ++ Stream.eval(clear)) concurrently stream _ <- Sync[F] diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala index bcd77629d1b..ce4ebca31b4 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportServer.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.{Async, ConcurrentEffect, Resource, Sync} +import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.comm.protocol.routing.Protocol @@ -18,7 +18,7 @@ import java.nio.file.Path import scala.collection.concurrent.TrieMap import scala.io.Source import scala.util.{Left, Right, Using} -import cats.effect.{Deferred, Ref, Temporal} +import cats.effect.{Deferred, Ref} trait TransportLayerServer[F[_]] { def resource( @@ -38,7 +38,7 @@ object TransportLayerServer { } } -class GrpcTransportServer[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Temporal]( +class GrpcTransportServer[F[_]: Async: RPConfAsk: Log: Metrics]( networkId: String, port: Int, cert: String, @@ -108,7 +108,7 @@ class GrpcTransportServer[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Tem object GrpcTransportServer { - def acquireServer[F[_]: Async: AsyncEffect: RPConfAsk: Log: Metrics: Temporal]( + def acquireServer[F[_]: Async: RPConfAsk: Log: Metrics]( networkId: String, port: Int, certPath: Path, diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala index d6a89d5925c..aeac669b2cd 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamHandler.scala @@ -2,7 +2,6 @@ package coop.rchain.comm.transport import cats.data._ import cats.effect.Sync -import cats.effect.implicits.catsEffectSyntaxBracket import cats.syntax.all._ import coop.rchain.comm.PeerNode import coop.rchain.comm.protocol.routing._ diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala index e710e348c27..767edb4e596 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala @@ -6,7 +6,7 @@ import coop.rchain.comm.PeerNode import coop.rchain.comm.transport.PacketOps._ import coop.rchain.shared.Log import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import scala.collection.concurrent.TrieMap @@ -16,7 +16,7 @@ class StreamObservableClass[F[_]: Async: Log]( peer: PeerNode, bufferSize: Int, cache: TrieMap[String, Array[Byte]], - private val subject: Queue[F, StreamMsgId] + private val subject: Channel[F, StreamMsgId] ) { def enque(blob: Blob): F[Unit] = { @@ -51,8 +51,8 @@ object StreamObservable { bufferSize: Int, cache: TrieMap[String, Array[Byte]] ): F[StreamObservable[F]] = - Queue.bounded[F, StreamMsgId](bufferSize).map { q => + Channel.bounded[F, StreamMsgId](bufferSize).map { q => val x = new StreamObservableClass(peer, bufferSize, cache, q) - (x.enque, q.dequeueChunk(1)) + (x.enque, q.stream) } } diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/DistanceSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/DistanceSpec.scala index 08809a28160..7ec28fbde63 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/DistanceSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/DistanceSpec.scala @@ -1,5 +1,10 @@ package coop.rchain.comm.discovery +import cats.effect.IO +import cats.effect.testing.scalatest.AsyncIOSpec +import cats.effect.unsafe.implicits.global +import cats.syntax.all._ + import java.util import cats.{catsInstancesForId => _, _} import coop.rchain.catscontrib.effect.implicits._ @@ -18,15 +23,15 @@ class DistanceSpec extends AnyFlatSpec with Matchers { } val endpoint = Endpoint("", 0, 0) - implicit val ping: KademliaRPC[Id] = new KademliaRPC[Id] { - def ping(node: PeerNode): Boolean = true - def lookup(key: Seq[Byte], peer: PeerNode): Seq[PeerNode] = Seq.empty[PeerNode] + implicit val ping: KademliaRPC[IO] = new KademliaRPC[IO] { + def ping(node: PeerNode): IO[Boolean] = true.pure[IO] + def lookup(key: Seq[Byte], peer: PeerNode): IO[Seq[PeerNode]] = Seq.empty[PeerNode].pure[IO] } "A PeerNode of width n bytes" should "have distance to itself equal to 8n" in { for (i <- 1 to 64) { val home = PeerNode(NodeIdentifier(randBytes(i)), endpoint) - val nt = PeerTable[PeerNode, Id](home.key) + val nt = PeerTable[PeerNode, IO](home.key) nt.distance(home) should be(Some(8 * nt.width)) } } @@ -48,7 +53,7 @@ class DistanceSpec extends AnyFlatSpec with Matchers { } def testKey(key: Array[Byte]): Boolean = { - val table = PeerTable[PeerNode, Id](key) + val table = PeerTable[PeerNode, IO](key) oneOffs(key).map(table.distance(_)) == (0 until 8 * width).map(Option[Int]) } @@ -71,22 +76,22 @@ class DistanceSpec extends AnyFlatSpec with Matchers { } s"An empty table of width $width" should "have no peers" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) assert(table.table.forall(_.isEmpty)) } it should "return no peers" in { - val table = PeerTable[PeerNode, Id](kr) - table.peers.size should be(0) + val table = PeerTable[PeerNode, IO](kr) + table.peers.map(_.size).unsafeRunSync() should be(0) } it should "return no values on lookup" in { - val table = PeerTable[PeerNode, Id](kr) - table.lookup(randBytes(width)).size should be(0) + val table = PeerTable[PeerNode, IO](kr) + table.lookup(randBytes(width)).map(_.size) should be(0) } s"A table of width $width" should "add a key at most once" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) val toAdd = oneOffs(kr).head val dist = table.distance(toAdd).get for (i <- 1 to 10) { @@ -96,7 +101,7 @@ class DistanceSpec extends AnyFlatSpec with Matchers { } s"A table of width $width with peers at all distances" should "have no empty buckets" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) for (k <- oneOffs(kr)) { table.updateLastSeen(PeerNode(NodeIdentifier(k), endpoint)) } @@ -104,7 +109,7 @@ class DistanceSpec extends AnyFlatSpec with Matchers { } it should s"return min(k, ${8 * width}) peers on lookup" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) val krOneOffs = oneOffs(kr) for (k <- krOneOffs) { table.updateLastSeen(PeerNode(NodeIdentifier(k), endpoint)) @@ -112,29 +117,29 @@ class DistanceSpec extends AnyFlatSpec with Matchers { val randomKey = randBytes(width) val expected = if (krOneOffs.exists(util.Arrays.equals(_, randomKey))) 8 * width - 1 else 8 * width - table.lookup(randomKey).size should be(scala.math.min(table.k, expected)) + table.lookup(randomKey).map(_.size) should be(scala.math.min(table.k, expected)) } it should "not return sought peer on lookup" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) for (k <- oneOffs(kr)) { table.updateLastSeen(PeerNode(NodeIdentifier(k), endpoint)) } val target = table.table(table.width * 4)(0) val resp = table.lookup(target.key) - assert(resp.forall(_.key != target.key)) + assert(resp.map(_.forall(_.key != target.key)).unsafeRunSync()) } it should s"return ${8 * width} peers when sequenced" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) for (k <- oneOffs(kr)) { table.updateLastSeen(PeerNode(NodeIdentifier(k), endpoint)) } - table.peers.size should be(8 * width) + table.peers.map(_.size).unsafeRunSync() should be(8 * width) } it should "find each added peer" in { - val table = PeerTable[PeerNode, Id](kr) + val table = PeerTable[PeerNode, IO](kr) for (k <- oneOffs(kr)) { table.updateLastSeen(PeerNode(NodeIdentifier(k), endpoint)) } diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala index 49a6c777f3b..093f7173c5a 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.comm.discovery import cats.Applicative +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Resource, Sync} import cats.mtl.DefaultApplicativeAsk import coop.rchain.comm._ diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala index 97ff0d9aca3..c94b208656d 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCRuntime.scala @@ -1,7 +1,7 @@ package coop.rchain.comm.discovery import cats._ -import cats.effect.{Resource, Sync} +import cats.effect.{Async, Resource, Sync, Temporal} import cats.syntax.all._ import coop.rchain.comm._ import io.grpc @@ -10,9 +10,8 @@ import java.net.ServerSocket import scala.collection.mutable import scala.concurrent.duration._ import scala.util.{Try, Using} -import cats.effect.Temporal -abstract class KademliaRPCRuntime[F[_]: Sync: Temporal, E <: Environment] { +abstract class KademliaRPCRuntime[F[_]: Async, E <: Environment] { def createEnvironment(port: Int): F[E] diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala index b66e3d0e88b..74c91b56461 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaRPCSpec.scala @@ -8,9 +8,9 @@ import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ import scala.util.Random -import cats.effect.Temporal +import cats.effect.kernel.Async -abstract class KademliaRPCSpec[F[_]: Sync: Temporal, E <: Environment] +abstract class KademliaRPCSpec[F[_]: Async, E <: Environment] extends KademliaRPCRuntime[F, E] with AnyWordSpecLike with Matchers { diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaSpec.scala index 45d42c5360b..21f03a797a0 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/KademliaSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/KademliaSpec.scala @@ -1,12 +1,11 @@ package coop.rchain.comm.discovery import scala.collection.mutable - import cats.Id - +import cats.syntax.all._ +import cats.effect.IO import coop.rchain.catscontrib.effect.implicits._ import coop.rchain.comm._ - import org.scalatest.BeforeAndAfterEach import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers @@ -27,7 +26,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should add it to a bucket according to its distance") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) table.distance(peer0) shouldBe DISTANCE_6 // when table.updateLastSeen(peer0) @@ -38,7 +37,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should not ping the peer") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) // when table.updateLastSeen(peer0) // then @@ -50,7 +49,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should replace peer with new entry (the one with new IP)") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) table.updateLastSeen(peer1) // when val newPeer1 = peer1.copy(endpoint = Endpoint("otherIP", 0, 0)) @@ -62,7 +61,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should move peer to the end of the bucket (meaning it's been seen lately)") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) table.updateLastSeen(peer2) table.updateLastSeen(peer1) table.updateLastSeen(peer3) @@ -79,7 +78,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should add peer to the end of the bucket (meaning it's been seen lately)") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) table.updateLastSeen(peer2) table.updateLastSeen(peer3) bucketEntriesAt(DISTANCE_4, table) shouldEqual Seq(peer2, peer3) @@ -92,7 +91,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("no peers should be pinged") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) table.updateLastSeen(peer2) table.updateLastSeen(peer3) bucketEntriesAt(DISTANCE_4, table) shouldEqual Seq(peer2, peer3) @@ -107,7 +106,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should ping the oldest peer to check if it responds") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) thatBucket4IsFull(table) // when table.updateLastSeen(peer4) @@ -119,7 +118,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should drop the new peer") { // given implicit val ping: KademliaRPCMock = pingOk - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) thatBucket4IsFull(table) // when table.updateLastSeen(peer4) @@ -131,7 +130,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { it("should add the new peer and drop the oldest one") { // given implicit val ping: KademliaRPCMock = pingFail - val table = PeerTable[PeerNode, Id](local.key, 3) + val table = PeerTable[PeerNode, IO](local.key, 3) thatBucket4IsFull(table) // when table.updateLastSeen(peer4) @@ -142,7 +141,7 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { } } - private def thatBucket4IsFull(table: PeerTable[PeerNode, Id]): Unit = { + private def thatBucket4IsFull(table: PeerTable[PeerNode, IO]): Unit = { table.updateLastSeen(peer1) table.updateLastSeen(peer2) table.updateLastSeen(peer3) @@ -150,26 +149,26 @@ class KademliaSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach { private def bucketEntriesAt( distance: Option[Int], - table: PeerTable[PeerNode, Id] + table: PeerTable[PeerNode, IO] ): Seq[PeerNode] = distance.map(d => table.table(d).map(_.entry)).getOrElse(Seq.empty[PeerNode]) private val pingOk: KademliaRPCMock = new KademliaRPCMock(returns = true) private val pingFail: KademliaRPCMock = new KademliaRPCMock(returns = false) - private class KademliaRPCMock(returns: Boolean) extends KademliaRPC[Id] { + private class KademliaRPCMock(returns: Boolean) extends KademliaRPC[IO] { val pingedPeers: mutable.MutableList[PeerNode] = mutable.MutableList.empty[PeerNode] - def ping(peer: PeerNode): Boolean = { + def ping(peer: PeerNode): IO[Boolean] = { pingedPeers += peer - returns + returns.pure[IO] } - def lookup(key: Seq[Byte], peer: PeerNode): Seq[PeerNode] = Seq.empty[PeerNode] + def lookup(key: Seq[Byte], peer: PeerNode): IO[Seq[PeerNode]] = Seq.empty[PeerNode].pure[IO] def receive( - pingHandler: PeerNode => Id[Unit], - lookupHandler: (PeerNode, Array[Byte]) => Id[Seq[PeerNode]] - ): Id[Unit] = () - def shutdown(): Id[Unit] = () + pingHandler: PeerNode => IO[Unit], + lookupHandler: (PeerNode, Array[Byte]) => IO[Seq[PeerNode]] + ): IO[Unit] = ().pure[IO] + def shutdown(): IO[Unit] = ().pure[IO] } private def createPeer(id: String): PeerNode = { diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/PeerTableSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/PeerTableSpec.scala index 706707f7620..86e57a1cf24 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/PeerTableSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/PeerTableSpec.scala @@ -1,12 +1,13 @@ package coop.rchain.comm.discovery -import scala.util.Random +import cats.effect.IO +import cats.effect.unsafe.implicits.global +import cats.syntax.all._ +import scala.util.Random import cats.{Id, catsInstancesForId => _} - import coop.rchain.catscontrib.effect.implicits._ import coop.rchain.comm._ - import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -22,22 +23,22 @@ class PeerTableSpec extends AnyFlatSpec with Matchers with Inside { arr } - implicit val ping: KademliaRPC[Id] = new KademliaRPC[Id] { - def ping(node: PeerNode): Boolean = true - def lookup(key: Seq[Byte], peer: PeerNode): Seq[PeerNode] = Seq.empty[PeerNode] + implicit val ping: KademliaRPC[IO] = new KademliaRPC[IO] { + def ping(node: PeerNode): IO[Boolean] = true.pure[IO] + def lookup(key: Seq[Byte], peer: PeerNode): IO[Seq[PeerNode]] = Seq.empty[PeerNode].pure[IO] } "Peer that is already in the table" should "get updated" in { val id = randBytes(addressWidth) val peer0 = PeerNode(NodeIdentifier(id), Endpoint("new", 0, 0)) val peer1 = PeerNode(NodeIdentifier(id), Endpoint("changed", 0, 0)) - val table = PeerTable[PeerNode, Id](home.key) + val table = PeerTable[PeerNode, IO](home.key) table.updateLastSeen(peer0) - inside(table.peers) { + inside(table.peers.unsafeRunSync()) { case p +: Nil => p should equal(peer0) } table.updateLastSeen(peer1) - inside(table.peers) { + inside(table.peers.unsafeRunSync()) { case p +: Nil => p should equal(peer1) } } diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala index 76a8173072e..92820e1c504 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.comm.rp +import cats.effect.unsafe.implicits.global import cats.{catsInstancesForId => _, _} import coop.rchain.catscontrib.effect.implicits._ import coop.rchain.catscontrib.ski._ @@ -15,7 +16,7 @@ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ -import cats.effect.Ref +import cats.effect.{IO, Ref} class ClearConnectionsSpec extends AnyFunSpec @@ -27,10 +28,10 @@ class ClearConnectionsSpec val src: PeerNode = peer("src") val networkId = "test" - implicit val transport = new TransportLayerStub[Id] - implicit val log = new Log.NOPLog[Id] - implicit val metric = new Metrics.MetricsNOP[Id] - implicit val time = new LogicalTime[Id] + implicit val transport = new TransportLayerStub[IO] + implicit val log = new Log.NOPLog[IO] + implicit val metric = new Metrics.MetricsNOP[IO] + implicit val time = new LogicalTime[IO] override def beforeEach(): Unit = { transport.reset() @@ -46,9 +47,9 @@ class ClearConnectionsSpec implicit val connections = mkConnections(peer("A"), peer("B")) implicit val rpconf = conf(maxNumOfConnections = 5) // when - Connect.clearConnections[Id] + Connect.clearConnections[IO] // then - connections.get.size shouldBe 2 + connections.get.unsafeRunSync().size shouldBe 2 connections.get should contain(peer("A")) connections.get should contain(peer("B")) } @@ -57,7 +58,7 @@ class ClearConnectionsSpec implicit val connections = mkConnections(peer("A"), peer("B")) implicit val rpconf = conf(maxNumOfConnections = 5) // when - val cleared = Connect.clearConnections[Id] + val cleared = Connect.clearConnections[IO] // then cleared shouldBe 0 } @@ -70,7 +71,7 @@ class ClearConnectionsSpec implicit val rpconf = conf(maxNumOfConnections = 5, numOfConnectionsPinged = 2) // when - Connect.clearConnections[Id] + Connect.clearConnections[IO] // then transport.requests.size shouldBe 2 transport.requests.map(_.peer) should contain(peer("A")) @@ -86,9 +87,9 @@ class ClearConnectionsSpec case _ => alwaysSuccess }) // when - Connect.clearConnections[Id] + Connect.clearConnections[IO] // then - connections.get.size shouldBe 3 + connections.get.unsafeRunSync().size shouldBe 3 connections.get should not contain peer("A") connections.get should contain(peer("B")) connections.get should contain(peer("C")) @@ -104,9 +105,9 @@ class ClearConnectionsSpec case _ => alwaysSuccess }) // when - Connect.clearConnections[Id] + Connect.clearConnections[IO] // then - connections.get.size shouldBe 3 + connections.get.unsafeRunSync().size shouldBe 3 connections.get shouldEqual List(peer("D"), peer("B"), peer("C")) } @@ -119,7 +120,7 @@ class ClearConnectionsSpec case _ => alwaysSuccess }) // when - val cleared = Connect.clearConnections[Id] + val cleared = Connect.clearConnections[IO] // then cleared shouldBe 1 } @@ -129,13 +130,13 @@ class ClearConnectionsSpec private def peer(name: String, host: String = "host"): PeerNode = PeerNode(NodeIdentifier(name.getBytes), Endpoint(host, 80, 80)) - private def mkConnections(peers: PeerNode*): ConnectionsCell[Id] = - Ref.unsafe[Id, Connections](peers.toList) + private def mkConnections(peers: PeerNode*): ConnectionsCell[IO] = + Ref[IO].of(peers.toList).unsafeRunSync() private def conf( maxNumOfConnections: Int, numOfConnectionsPinged: Int = 5 - ): RPConfAsk[Id] = + ): RPConfAsk[IO] = new ConstApplicativeAsk( RPConf( clearConnections = ClearConnectionsConf(numOfConnectionsPinged), diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala index fea7aa68022..0ab6e5d0cfd 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala @@ -15,7 +15,7 @@ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{FiniteDuration, MILLISECONDS} -import cats.effect.Ref +import cats.effect.{IO, Ref} class ConnectSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with AppendedClues { @@ -24,7 +24,7 @@ class ConnectSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with val remote: PeerNode = peerNode("remote", 40401) val networkId = "test" - type Effect[A] = CommErrT[Id, A] + type Effect[A] = CommErrT[IO, A] implicit val logEff = new Log.NOPLog[Effect] implicit val timeEff = new LogicalTime[Effect] diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ConnectionsSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ConnectionsSpec.scala index c4564c426a0..6192d016de5 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ConnectionsSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ConnectionsSpec.scala @@ -1,23 +1,14 @@ package coop.rchain.comm.rp -import cats.{catsInstancesForId => _, _} -import coop.rchain.catscontrib.effect.implicits._ +import cats.{catsInstancesForId => _} import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.Connections._ import coop.rchain.comm.rp.Connect._ -import coop.rchain.metrics.Metrics -import coop.rchain.p2p.EffectsTestInstances._ -import coop.rchain.shared._ import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers class ConnectionsSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with AppendedClues { - - implicit val logEff = new Log.NOPLog[Id] - implicit val timeEff = new LogicalTime[Id] - implicit val metricEff = new Metrics.MetricsNOP[Id] - describe("Connections") { describe("addConn") { describe("if peer is not on the list yet") { diff --git a/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala index d2e1ced1a22..3c8d7846335 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.comm.rp +import cats.effect.unsafe.implicits.global import cats.syntax.all._ import cats.{catsInstancesForId => _, _} import org.scalatest.funspec.AnyFunSpec @@ -15,7 +16,7 @@ import coop.rchain.shared._ import org.scalatest._ import scala.concurrent.duration._ -import cats.effect.Ref +import cats.effect.{IO, Ref} class FindAndConnectSpec extends AnyFunSpec @@ -25,13 +26,13 @@ class FindAndConnectSpec import ScalaTestCats._ - type Effect[A] = Id[A] + type Effect[A] = IO[A] val src: PeerNode = peer("src") val deftimeout: FiniteDuration = FiniteDuration(1, MILLISECONDS) - implicit val log = new Log.NOPLog[Id] + implicit val log = new Log.NOPLog[IO] implicit val time = new LogicalTime[Effect] - implicit val metric = new Metrics.MetricsNOP[Id] + implicit val metric = new Metrics.MetricsNOP[IO] implicit val nodeDiscovery = new NodeDiscoveryStub[Effect]() implicit val rpConf = conf(defaultTimeout = deftimeout) @@ -71,7 +72,7 @@ class FindAndConnectSpec // when val result = Connect.findAndConnect[Effect](connect) // then - result.size shouldBe (2) + result.unsafeRunSync().size shouldBe (2) result should contain(peer("A")) result should not contain (peer("B")) result should contain(peer("C")) @@ -99,7 +100,7 @@ class FindAndConnectSpec // when val result = Connect.findAndConnect[Effect](connect) // then - result.size shouldBe (1) + result.unsafeRunSync().size shouldBe (1) result should contain(peer("A")) result should not contain (peer("B")) result should not contain (peer("C")) @@ -112,16 +113,18 @@ class FindAndConnectSpec private def peer(name: String): PeerNode = PeerNode(NodeIdentifier(name.getBytes), Endpoint("host", 80, 80)) - private def mkConnections(peers: PeerNode*): ConnectionsCell[Id] = - Ref.unsafe[Id, Connections](peers.reverse.foldLeft(Connections.empty) { - case (acc, el) => acc.addConn(el) - }) + private def mkConnections(peers: PeerNode*): ConnectionsCell[IO] = + Ref + .of[IO, Connections](peers.reverse.foldLeft(Connections.empty) { + case (acc, el) => acc.addConn(el) + }) + .unsafeRunSync() private def conf( maxNumOfConnections: Int = 5, numOfConnectionsPinged: Int = 5, defaultTimeout: FiniteDuration - ): RPConfAsk[Id] = + ): RPConfAsk[IO] = new ConstApplicativeAsk( RPConf( clearConnections = ClearConnectionsConf(numOfConnectionsPinged), diff --git a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala index a26222793e5..eda6c0497e9 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/HandleProtocolHandshakeSpec.scala @@ -8,11 +8,11 @@ import coop.rchain.metrics.Metrics import coop.rchain.p2p.EffectsTestInstances._ import coop.rchain.shared._ import coop.rchain.shared.scalatestcontrib.convertToAnyShouldWrapper -import fs2.concurrent.Queue +import fs2.concurrent.Channel import org.scalatest.flatspec.AnyFlatSpec import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks -import RChainScheduler._ import cats.effect.Ref +import cats.effect.unsafe.implicits.global class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChecks { @@ -121,7 +121,7 @@ class HandleProtocolHandshakeSpec extends AnyFlatSpec with ScalaCheckPropertyChe implicit val connectionRef = Ref.unsafe(Connect.Connections.empty) for { - routingMessageQueue <- Queue.unbounded[F, RoutingMessage] + routingMessageQueue <- Channel.unbounded[F, RoutingMessage] // Remote peer protocol handshake message protocol = ProtocolHelper.protocolHandshake(remotePeer, networkId = "test-network") diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ScalaTestCats.scala b/comm/src/test/scala/coop/rchain/comm/rp/ScalaTestCats.scala index 91e7595ccb3..d6214c4bfad 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ScalaTestCats.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ScalaTestCats.scala @@ -1,22 +1,22 @@ package coop.rchain.comm.rp -import cats.Id - +import cats.effect.IO +import cats.effect.unsafe.implicits.global import org.scalatest.enablers.Containing object ScalaTestCats { - implicit def idContaining[C](implicit C: Containing[C]): Containing[Id[C]] = - new Containing[Id[C]] { - def contains(container: cats.Id[C], element: Any): Boolean = { - val con: C = container + implicit def idContaining[C](implicit C: Containing[C]): Containing[IO[C]] = + new Containing[IO[C]] { + def contains(container: IO[C], element: Any): Boolean = { + val con: C = container.unsafeRunSync() C.contains(con, element) } - def containsNoneOf(container: cats.Id[C], elements: Seq[Any]): Boolean = { - val con: C = container + def containsNoneOf(container: IO[C], elements: Seq[Any]): Boolean = { + val con: C = container.unsafeRunSync() C.containsNoneOf(con, elements) } - def containsOneOf(container: cats.Id[C], elements: Seq[Any]): Boolean = { - val con: C = container + def containsOneOf(container: IO[C], elements: Seq[Any]): Boolean = { + val con: C = container.unsafeRunSync() C.containsOneOf(con, elements) } } diff --git a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala index 55e71ac3d3b..ea60b492db5 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.comm.transport import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.comm.CommError._ import coop.rchain.comm._ diff --git a/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala index 4ea5c74d731..1e6f4b60192 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/PacketStoreRestoreSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.comm.transport import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.comm.protocol.routing._ import org.scalacheck.Gen @@ -15,9 +16,6 @@ class PacketStoreRestoreSpec extends AnyFunSpec with Matchers with ScalaCheckDri import PacketOps._ - import scala.concurrent.ExecutionContext.Implicits.global - implicit val cs: ContextShift[IO] = IO.contextShift(global) - describe("Packet store & restore") { it("should store and restore to the original Packet") { forAll(contentGen) { content: Array[Byte] => diff --git a/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala index d1f73d0c3e6..bad77f9c06f 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/StreamHandlerSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.comm.transport import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.catscontrib.ski._ import coop.rchain.comm._ diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala index 51ce38dfc6f..d7090e9cd78 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerSpec.scala @@ -1,6 +1,6 @@ package coop.rchain.comm.transport -import cats.effect.Sync +import cats.effect.Async import com.google.protobuf.ByteString import coop.rchain.comm.CommError.CommErr import coop.rchain.comm._ @@ -10,9 +10,8 @@ import coop.rchain.comm.syntax._ import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike -import cats.effect.Temporal -abstract class TransportLayerSpec[F[_]: Sync: Temporal, E <: Environment] +abstract class TransportLayerSpec[F[_]: Async, E <: Environment] extends TransportLayerRuntime[F, E] with AnyWordSpecLike with Matchers diff --git a/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala b/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala index c9de1d73289..23b76c8e35c 100644 --- a/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala +++ b/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala @@ -19,7 +19,7 @@ object EffectsTestInstances { val networkId = "test" - class LogicalTime[F[_]: Sync] extends Time[F] { + class LogicalTime[F[_]: Sync] { var clock: Long = 0 def currentMillis: F[Long] = Sync[F].delay { diff --git a/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala b/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala index 2515a9729ee..15b59fcde3c 100644 --- a/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala +++ b/graphz/src/test/scala/coop/rchain/graphz/GraphzSpec.scala @@ -6,6 +6,7 @@ import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import cats.effect.Ref +import cats.effect.unsafe.implicits.global class GraphzSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with AppendedClues { diff --git a/models/src/main/scala/coop/rchain/models/rholang/sorter/ordering.scala b/models/src/main/scala/coop/rchain/models/rholang/sorter/ordering.scala index cc563366017..9488e5b05f8 100644 --- a/models/src/main/scala/coop/rchain/models/rholang/sorter/ordering.scala +++ b/models/src/main/scala/coop/rchain/models/rholang/sorter/ordering.scala @@ -1,9 +1,10 @@ package coop.rchain.models.rholang.sorter -import cats.effect.{ExitCase, Sync} +import cats.effect.{Sync} import coop.rchain.models.Par import coop.rchain.models.rholang.sorter.ScoredTerm._ import cats.Eval +import cats.effect.kernel.Resource.ExitCase import cats.implicits._ import coop.rchain.catscontrib.effect.implicits.sEval diff --git a/node/src/main/scala/coop/rchain/node/Main.scala b/node/src/main/scala/coop/rchain/node/Main.scala index 7c9962230c7..25a5711994f 100644 --- a/node/src/main/scala/coop/rchain/node/Main.scala +++ b/node/src/main/scala/coop/rchain/node/Main.scala @@ -21,8 +21,7 @@ object Main { LoggerFactory.getLogger(getClass).error("Unhandled exception in thread " + thread.getName, ex) }) - // Main scheduler for all CPU bounded tasks and ContextShift - import RChainScheduler._ + import cats.effect.unsafe.implicits.global implicit val console: ConsoleIO[IO] = NodeMain.consoleIO implicit val log: Log[IO] = effects.log diff --git a/node/src/main/scala/coop/rchain/node/api/package.scala b/node/src/main/scala/coop/rchain/node/api/package.scala index c7d12e1eb67..7d33636f92b 100644 --- a/node/src/main/scala/coop/rchain/node/api/package.scala +++ b/node/src/main/scala/coop/rchain/node/api/package.scala @@ -1,6 +1,7 @@ package coop.rchain.node -import cats.effect.{Async, ConcurrentEffect, Resource, Sync} +import cats.effect.std.Dispatcher +import cats.effect.{Async, Resource, Sync} import coop.rchain.casper.protocol.deploy.v1.DeployServiceFs2Grpc import coop.rchain.casper.protocol.propose.v1.ProposeServiceFs2Grpc import coop.rchain.node.model.ReplFs2Grpc @@ -17,7 +18,7 @@ import scala.concurrent.duration.FiniteDuration package object api { - def acquireInternalServer[F[_]: Sync: AsyncEffect]( + def acquireInternalServer[F[_]: Async]( host: String, port: Int, grpcEC: ExecutionContext, diff --git a/node/src/main/scala/coop/rchain/node/diagnostics/effects/package.scala b/node/src/main/scala/coop/rchain/node/diagnostics/effects/package.scala index f11119abe97..d8510c02654 100644 --- a/node/src/main/scala/coop/rchain/node/diagnostics/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/diagnostics/effects/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node.diagnostics -import cats.effect.{ExitCase, Sync} +import cats.effect.{Outcome, Sync} import cats.syntax.all._ import cats.mtl.ApplicativeLocal import coop.rchain.metrics.Metrics.Source @@ -104,9 +104,9 @@ package object effects { Sync[F].bracketCase( mark(s"started-$label") )(_ => block) { - case (_, ExitCase.Completed) => mark(s"finished-$label") - case (_, ExitCase.Error(_)) => mark(s"failed-$label") - case (_, ExitCase.Canceled) => mark(s"cancelled-$label") + case (_, Outcome.Succeeded(_)) => mark(s"finished-$label") + case (_, Outcome.Errored(_)) => mark(s"failed-$label") + case (_, Outcome.Canceled()) => mark(s"cancelled-$label") } } diff --git a/node/src/main/scala/coop/rchain/node/effects/package.scala b/node/src/main/scala/coop/rchain/node/effects/package.scala index bd28960e86e..e8764db741a 100644 --- a/node/src/main/scala/coop/rchain/node/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/effects/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{Async, ConcurrentEffect, IO, Sync} +import cats.effect.{Async, IO, Sync} import cats.mtl._ import cats.syntax.all._ import cats.{Applicative, Monad, Parallel} @@ -32,14 +32,14 @@ package object effects { def nodeDiscovery[F[_]: Monad: KademliaStore: KademliaRPC](id: NodeIdentifier): NodeDiscovery[F] = NodeDiscovery.kademlia(id) - def kademliaRPC[F[_]: Sync: AsyncEffect: RPConfAsk: Metrics]( + def kademliaRPC[F[_]: Async: RPConfAsk: Metrics]( networkId: String, timeout: FiniteDuration, grpcEC: ExecutionContext ): KademliaRPC[F] = new GrpcKademliaRPC(networkId, timeout, grpcEC) - def transportClient[F[_]: Async: ContextShift: AsyncEffect: Parallel: Log: Metrics]( + def transportClient[F[_]: Async: Parallel: Log: Metrics]( networkId: String, certPath: Path, keyPath: Path, diff --git a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala index dff6db89699..06b102d6940 100644 --- a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala +++ b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala @@ -9,18 +9,18 @@ import coop.rchain.casper.protocol.BlockMessage import coop.rchain.casper.state.instances.ProposerState import coop.rchain.shared.Log import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import cats.effect.{Deferred, Ref} -import cats.effect.std.Semaphore +import cats.effect.std.{PQueue, Semaphore} object ProposerInstance { def create[F[_]: Async: Log]( - proposeRequestsQueue: Queue[F, (Boolean, Deferred[F, ProposerResult])], + proposeRequestsQueue: Channel[F, (Boolean, Deferred[F, ProposerResult])], proposer: Proposer[F], state: Ref[F, ProposerState[F]] ): Stream[F, (ProposeResult, Option[BlockMessage])] = { // stream of requests to propose - val in = proposeRequestsQueue.dequeue + val in = proposeRequestsQueue.stream // max number of concurrent attempts to propose. Actual propose can happen only one at a time, but clients // are free to make propose attempt. In that case proposeID returned will be None. diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala index 29675e4a09b..f8aa1b29797 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala @@ -18,7 +18,6 @@ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import java.nio.file.Path -import scala.concurrent.ExecutionContext import scala.concurrent.ExecutionContext.global object StateBalances { @@ -41,7 +40,7 @@ object StateBalances { } yield unf } - def read[F[_]: Async: Parallel: ContextShift]( + def read[F[_]: Async: Parallel]( shardId: String, blockHash: String, vaultTreeHashMapDepth: Int, diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala index c9a352ec4c6..6e30a4c4e9b 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala @@ -94,6 +94,7 @@ object StateBalanceMain { } } yield () + import cats.effect.unsafe.implicits.global task.unsafeRunSync } } diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala index 0153d8db530..d8f23701b57 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala @@ -203,6 +203,7 @@ object MergeBalanceMain { } } yield adjustedAccounts + import cats.effect.unsafe.implicits.global val accountMap = task.unsafeRunSync val file = mergeFile.toFile diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala index f76553d31cc..d44aa653a3d 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala @@ -110,6 +110,7 @@ object TransactionBalanceMain { } } yield () + import cats.effect.unsafe.implicits.global task.unsafeRunSync } } diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala index 2d996625345..120a600d01c 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala @@ -116,13 +116,13 @@ object TransactionBalances { } yield perValidatorVaultAddr } - def generateRevAccountFromWalletAndBond[F[_]: Sync: ContextShift: Log]( + def generateRevAccountFromWalletAndBond[F[_]: Async: Log]( walletPath: Path, bondsPath: Path ): F[Map[String, RevAccount]] = for { - bondsMap <- BondsParser.parse(bondsPath) - vaults <- VaultParser.parse(walletPath) + bondsMap <- BondsParser.parse[F](fs2.io.file.Path.fromNioPath(bondsPath)) + vaults <- VaultParser.parse[F](fs2.io.file.Path.fromNioPath(walletPath)) accountMap = vaults .map(v => (v.revAddress.toBase58, RevAccount(v.revAddress, v.initialBalance, NormalVault))) .toMap @@ -169,7 +169,7 @@ object TransactionBalances { genesisVault.copy(vaultMaps = resultMap) } - def getGenesisVaultMap[F[_]: Sync: ContextShift: Span: Log]( + def getGenesisVaultMap[F[_]: Async: Span: Log]( walletPath: Path, bondsPath: Path, runtime: RhoRuntime[F], @@ -221,7 +221,7 @@ object TransactionBalances { } yield blockMes } - def main[F[_]: Async: Parallel: ContextShift]( + def main[F[_]: Async: Parallel]( dataDir: Path, walletPath: Path, bondPath: Path, diff --git a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala index fb665aa4ec2..bcb01539e76 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala @@ -1,6 +1,6 @@ package coop.rchain.node.runtime -import cats.effect.{Async, ConcurrentEffect, IO, Resource, Sync} +import cats.effect.{Async, IO, Resource, Sync} import cats.syntax.all._ import com.typesafe.config.Config import coop.rchain.casper.protocol.deploy.v1 @@ -25,7 +25,7 @@ import coop.rchain.node.{api, web} import coop.rchain.sdk.syntax.all._ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ -import fs2.concurrent.Queue +import fs2.concurrent.Channel import io.grpc.{Metadata, Server} import kamon.Kamon import kamon.system.SystemMetrics @@ -45,11 +45,11 @@ object NetworkServers { */ // format: off def create[F[_] - /* Execution */ : AsyncEffect: Temporal: ContextShift + /* Execution */ : Async /* Comm */ : TransportLayer: NodeDiscovery: KademliaStore: RPConfAsk: ConnectionsCell /* Diagnostics */ : Log: Metrics] // format: on ( - routingMessageQueue: Queue[F, RoutingMessage], + routingMessageQueue: Channel[F, RoutingMessage], grpcServices: GrpcServices[F], webApi: WebApi[F], adminWebApi: AdminWebApi[F], @@ -90,7 +90,7 @@ object NetworkServers { } yield () } - def internalServer[F[_]: Async: AsyncEffect: Log]( + def internalServer[F[_]: Async: Log]( nodeConf: NodeConf, replService: ReplFs2Grpc[F, Metadata], deployService: DeployServiceFs2Grpc[F, Metadata], @@ -113,7 +113,7 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def externalServer[F[_]: Async: AsyncEffect: Log]( + def externalServer[F[_]: Async: Log]( nodeConf: NodeConf, deployService: v1.DeployServiceFs2Grpc[F, Metadata], grpcEC: ExecutionContext @@ -132,9 +132,9 @@ object NetworkServers { nodeConf.apiServer.maxConnectionAgeGrace ) - def protocolServer[F[_]: Async: AsyncEffect: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Temporal]( + def protocolServer[F[_]: Async: TransportLayer: ConnectionsCell: RPConfAsk: Log: Metrics: Temporal]( nodeConf: NodeConf, - routingMessageQueue: Queue[F, RoutingMessage] + routingMessageQueue: Channel[F, RoutingMessage] ): Resource[F, Unit] = { val server = GrpcTransportServer.acquireServer[F]( nodeConf.protocolServer.networkId, @@ -148,7 +148,7 @@ object NetworkServers { server.resource( HandleMessages.handle[F](_, routingMessageQueue), - blob => routingMessageQueue.enqueue1(RoutingMessage(blob.sender, blob.packet)) + blob => routingMessageQueue.send(RoutingMessage(blob.sender, blob.packet)).void ) } @@ -164,12 +164,12 @@ object NetworkServers { grpcEC ) - def webApiServer[F[_]: ContextShift: AsyncEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def webApiServer[F[_]: Async: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], reportingRoutes: ReportingHttpRoutes[F], prometheusReporter: NewPrometheusReporter - ): Resource[F, server.Server[F]] = + ): Resource[F, server.Server] = web.acquireHttpServer[F]( nodeConf.apiServer.enableReporting, nodeConf.apiServer.host, @@ -180,12 +180,12 @@ object NetworkServers { reportingRoutes ) - def adminWebApiServer[F[_]: ContextShift: AsyncEffect: Temporal: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( + def adminWebApiServer[F[_]: Async: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( nodeConf: NodeConf, webApi: WebApi[F], adminWebApi: AdminWebApi[F], reportingRoutes: ReportingHttpRoutes[F] - ): Resource[F, server.Server[F]] = + ): Resource[F, server.Server] = web.acquireAdminHttpServer[F]( nodeConf.apiServer.host, nodeConf.apiServer.portAdminHttp, @@ -209,6 +209,7 @@ object NetworkServers { if (nodeConf.metrics.sigar) SystemMetrics.startCollecting() } + import scala.concurrent.ExecutionContext.Implicits.global // TODO: check new version of Kamon if supports custom effect def stop: F[Unit] = Async[F].async_ { cb => Kamon.stopAllReporters().onComplete { diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala index 3f69d581c22..d8e59700f06 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala @@ -14,7 +14,7 @@ final case class NodeCallCtx(trace: TraceId) { object NodeCallCtx { def init: NodeCallCtx = NodeCallCtx(Trace.next) - final case class NodeCallCtxReader[F[_]: AsyncEffect]() { + final case class NodeCallCtxReader[F[_]: Async]() { /** * Current implementation of Span uses ReaderT layer to hold the local state for tracing. diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala index 8e505aa70f1..579859baa14 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala @@ -1,7 +1,7 @@ package coop.rchain.node.runtime import cats.Parallel -import cats.effect.{AsyncEffect, Resource, Sync} +import cats.effect.{Async, Resource, Sync, Temporal} import cats.syntax.all._ import coop.rchain.casper.protocol.client.{DeployRuntime, GrpcDeployService, GrpcProposeService} import coop.rchain.crypto.PrivateKey @@ -14,8 +14,7 @@ import coop.rchain.node.effects import coop.rchain.node.effects.{ConsoleIO, GrpcReplClient} import coop.rchain.node.web.VersionInfo import coop.rchain.shared.StringOps.StringColors -import coop.rchain.shared.{Log, TerminalMode, Time} -import monix.execution.Scheduler +import coop.rchain.shared.{Log, TerminalMode} import org.slf4j.bridge.SLF4JBridgeHandler import java.io.File @@ -23,7 +22,6 @@ import java.nio.file.Path import scala.collection.JavaConverters._ import scala.tools.jline.console.ConsoleReader import scala.tools.jline.console.completer.StringsCompleter -import cats.effect.Temporal object NodeMain { @@ -34,7 +32,7 @@ object NodeMain { * * @param options command line options */ - def startNode[F[_]: AsyncEffect: Parallel: ContextShift: Temporal: ConsoleIO: Log]( + def startNode[F[_]: Parallel: Async: ConsoleIO: Log]( options: commandline.Options ): F[Unit] = Sync[F].defer { // Create merged configuration from CLI options and config file @@ -87,7 +85,7 @@ object NodeMain { * @param options command line options * @param console console */ - def runCLI[F[_]: Sync: AsyncEffect: ConsoleIO: Temporal]( + def runCLI[F[_]: Async: ConsoleIO]( options: commandline.Options ): F[Unit] = { val grpcPort = @@ -113,8 +111,6 @@ object NodeMain { options.grpcMaxRecvMessageSize() ) - implicit val time: Time[F] = Time.fromTimer - val program = subcommand(options) match { case Eval(files, printUnmatchedSendsOnly) => new ReplRuntime().evalProgram[F](files, printUnmatchedSendsOnly) diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala index d4c7bc9689e..2c312fc8910 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala @@ -1,7 +1,7 @@ package coop.rchain.node.runtime import cats.Parallel -import cats.effect.{AsyncEffect, Resource, Sync} +import cats.effect.{Async, Ref, Resource, Sync, Temporal} import cats.mtl._ import cats.syntax.all._ import com.typesafe.config.Config @@ -75,7 +75,7 @@ object NodeRuntime { } yield () } -class NodeRuntime[F[_]: AsyncEffect: Parallel: Temporal: ContextShift: LocalEnvironment: Log] private[node] ( +class NodeRuntime[F[_]: Parallel: Async: LocalEnvironment: Log] private[node] ( nodeConf: NodeConf, kamonConf: Config, id: NodeIdentifier diff --git a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala index b42a7d40101..1d6d9fca860 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala @@ -43,12 +43,13 @@ import coop.rchain.shared._ import coop.rchain.shared.syntax._ import coop.rchain.store.KeyValueStoreManager import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import monix.execution.Scheduler import cats.effect.{Deferred, Ref, Temporal} +import monix.eval.Coeval object Setup { - def setupNodeProgram[F[_]: Async: Parallel: ContextShift: Temporal: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( + def setupNodeProgram[F[_]: Async: Parallel: LocalEnvironment: TransportLayer: NodeDiscovery: Log: Metrics]( storeManager: KeyValueStoreManager[F], rpConnections: ConnectionsCell[F], rpConfAsk: ApplicativeAsk[F, RPConf], @@ -58,17 +59,13 @@ object Setup { ): F[ ( Stream[F, Unit], // Node startup process (protocol messages handling) - Queue[F, RoutingMessage], + Channel[F, RoutingMessage], GrpcServices[F], WebApi[F], AdminWebApi[F], ReportingHttpRoutes[F] ) ] = { - // TODO: temporary until Time is removed completely - // https://github.com/rchain/rchain/issues/3730 - implicit val time = Time.fromTimer(Temporal[F]) - for { // Block execution tracker executionTracker <- StatefulExecutionTracker[F] @@ -157,13 +154,13 @@ object Setup { } // Propose request is a tuple - Casper, async flag and deferred proposer result that will be resolved by proposer - proposerQueue <- Queue.unbounded[F, (Boolean, Deferred[F, ProposerResult])] + proposerQueue <- Channel.unbounded[F, (Boolean, Deferred[F, ProposerResult])] triggerProposeFOpt: Option[ProposeFunction[F]] = if (proposer.isDefined) Some( (isAsync: Boolean) => for { d <- Deferred[F, ProposerResult] - _ <- proposerQueue.enqueue1((isAsync, d)) + _ <- proposerQueue.send((isAsync, d)) r <- d.get } yield r ) @@ -171,18 +168,18 @@ object Setup { proposerStateRefOpt <- triggerProposeFOpt.traverse(_ => Ref.of(ProposerState[F]())) // Queue of received blocks from gRPC API - incomingBlocksQueue <- Queue.unbounded[F, BlockMessage] + incomingBlocksQueue <- Channel.unbounded[F, BlockMessage] // Stream of blocks received over the network - incomingBlockStream = incomingBlocksQueue.dequeue + incomingBlockStream = incomingBlocksQueue.stream // Queue of validated blocks, result of block processor - validatedBlocksQueue <- Queue.unbounded[F, BlockMessage] + validatedBlocksQueue <- Channel.unbounded[F, BlockMessage] // Validated blocks stream with auto-propose trigger - validatedBlocksStream = validatedBlocksQueue.dequeue.evalTap { _ => + validatedBlocksStream = validatedBlocksQueue.stream.evalTap { _ => // If auto-propose is enabled, trigger propose immediately after block finished validation triggerProposeFOpt.traverse(_(true)) whenA conf.autopropose } // Queue of network (protocol) messages - routingMessageQueue <- Queue.unbounded[F, RoutingMessage] + routingMessageQueue <- Channel.unbounded[F, RoutingMessage] // Block receiver, process incoming blocks and order by validated dependencies blockReceiverState <- { @@ -196,7 +193,7 @@ object Setup { incomingBlockStream, validatedBlocksStream, conf.casper.shardName, - incomingBlocksQueue.enqueue1 + incomingBlocksQueue.send(_).void ) } // Blocks from receiver with fork-choice tips request on idle @@ -273,8 +270,7 @@ object Setup { cacheTransactionAPI <- Transaction.cacheTransactionAPI(transactionAPI, storeManager) // Peer message stream - peerMessageStream = routingMessageQueue - .dequeueChunk(maxSize = 1) + peerMessageStream = routingMessageQueue.stream .parEvalMapUnorderedProcBounded { case RoutingMessage(peer, packet) => toCasperMessageProto(packet).toEither @@ -298,7 +294,7 @@ object Setup { implicit val br = blockRetriever for { _ <- BlockRetriever[F].requestAll(conf.casper.requestedBlocksTimeout) - _ <- Time[F].sleep(conf.casper.casperLoopInterval) + _ <- Temporal[F].sleep(conf.casper.casperLoopInterval) } yield () } diff --git a/node/src/main/scala/coop/rchain/node/web/WebApiDocsV1.scala b/node/src/main/scala/coop/rchain/node/web/WebApiDocsV1.scala index b39372e5537..ff180e0e03b 100644 --- a/node/src/main/scala/coop/rchain/node/web/WebApiDocsV1.scala +++ b/node/src/main/scala/coop/rchain/node/web/WebApiDocsV1.scala @@ -1,6 +1,6 @@ package coop.rchain.node.web -import cats.effect.Sync +import cats.effect.kernel.Concurrent import coop.rchain.node.api.v1.{WebApiAdminEndpoints, WebApiEndpoints} import endpoints4s.http4s.server import endpoints4s.http4s.server.Endpoints @@ -43,7 +43,7 @@ object WebApiDocs /** * OpenAPI endpoint definition (GET /openapi.json). */ -final case class WebApiDocServer[F[_]: Sync]() +final case class WebApiDocServer[F[_]: Concurrent]() extends Endpoints[F] with server.JsonEntitiesFromEncodersAndDecoders { implicit val jCodec: endpoints4s.Encoder[OpenApi, String] = OpenApi.stringEncoder diff --git a/node/src/main/scala/coop/rchain/node/web/WebApiRoutes.scala b/node/src/main/scala/coop/rchain/node/web/WebApiRoutes.scala index a8d28ccabd7..99d307b2570 100644 --- a/node/src/main/scala/coop/rchain/node/web/WebApiRoutes.scala +++ b/node/src/main/scala/coop/rchain/node/web/WebApiRoutes.scala @@ -1,6 +1,6 @@ package coop.rchain.node.web -import cats.effect.Sync +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.node.api.WebApi import coop.rchain.node.api.WebApi._ @@ -12,7 +12,7 @@ import org.http4s.{HttpRoutes, Response} object WebApiRoutes { - def service[F[_]: Sync: Log](webApi: WebApi[F]): HttpRoutes[F] = { + def service[F[_]: Async: Log](webApi: WebApi[F]): HttpRoutes[F] = { import coop.rchain.casper.protocol.{BlockInfo, LightBlockInfo} import io.circe._ import io.circe.generic.auto._ diff --git a/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala b/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala index 73005a0095d..b7620abec71 100644 --- a/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala +++ b/node/src/main/scala/coop/rchain/node/web/WebApiRoutesV1.scala @@ -1,5 +1,6 @@ package coop.rchain.node.web +import cats.effect.kernel.Concurrent import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.node.api.json.JsonEntitiesCirceFromSchema @@ -80,7 +81,7 @@ final case class WebApiRoutesV1[F[_]: Async: Log]( /** * Defines implementation (interpreter) for Admin Web API endpoints. */ -final case class AdminWebApiRoutesV1[F[_]: Sync]( +final case class AdminWebApiRoutesV1[F[_]: Concurrent]( adminWebApi: AdminWebApi[F] ) extends Endpoints[F] with JsonEntitiesCirceFromSchema diff --git a/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala b/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala index 219f29b5a0c..7c586a3f861 100644 --- a/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala +++ b/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala @@ -16,6 +16,7 @@ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers +import cats.effect.unsafe.implicits.global class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { val genesis: GenesisContext = buildGenesis() @@ -37,7 +38,7 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { reportingStore, readonly.validatorIdOpt ) - deploy <- ConstructDeploy.sourceDeployNowF( + deploy <- ConstructDeploy.sourceDeployNowF[IO]( term, sec = deployKey, phloLimit = phloLimit, diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala b/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala index d0367db53dd..ec979a5bcf8 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/ComputeMerge.scala @@ -46,7 +46,7 @@ trait ComputeMerge { * B1 "contract @0(0) = { 0 } | for (@1 <- @0) { 0 }" * */ - def computeMergeCase[F[_]: Async: Span: Log: Metrics: Parallel: ContextShift]( + def computeMergeCase[F[_]: Async: Span: Log: Metrics: Parallel]( baseDeployRand: Blake2b512Random, baseDeploySources: Seq[Signed[DeployData]], leftDeploySources: Seq[Signed[DeployData]], diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala b/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala index 94669c505c0..98f8b1c498f 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala @@ -1,6 +1,7 @@ package coop.rchain.node.mergeablity import cats.Monoid +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.casper.helper.TestRhoRuntime.rhoRuntimeEff @@ -258,7 +259,6 @@ trait BasicMergeabilityRules extends ComputeMerge { implicit val noopSpan: Span[IO] = NoopSpan[IO]() implicit val logger: Log[IO] = Log.log[IO] val baseDeployRand = Blake2b512Random.defaultRandom - import coop.rchain.shared.RChainScheduler._ computeMergeCase[IO]( baseDeployRand, Seq(baseDeploy), diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala b/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala index 50715d2a0f5..f9e0280797d 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala @@ -1,7 +1,7 @@ package coop.rchain.node.mergeablity import cats.effect.{IO, Sync} -import cats.implicits.catsSyntaxApplicative +import cats.effect.unsafe.implicits.global import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.casper.genesis.contracts.{Registry, StandardDeploys} diff --git a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala index 23a2512b197..e700e34b6aa 100644 --- a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala +++ b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala @@ -1,6 +1,7 @@ package coop.rchain.node.perf import cats.Parallel +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -76,8 +77,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll override def afterAll: Unit = tempDir.deleteRecursively - def storeLMDB[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]() - : F[KeyValueStore[F]] = + def storeLMDB[F[_]: Async: Parallel: Log: Metrics: Span](): F[KeyValueStore[F]] = for { lmdbHistoryManager <- LmdbStoreManager( tempPath.resolve(Random.nextString(32)), @@ -106,7 +106,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll def create(root: Blake2b256Hash): F[HistoryType[F]] } - case class CreateRadixHistory[F[_]: Sync: Async: ContextShift: Parallel: Log: Metrics: Span]() + case class CreateRadixHistory[F[_]: Sync: Async: Parallel: Log: Metrics: Span]() extends CreateHistory[F] { def create(root: Blake2b256Hash): F[HistoryType[F]] = Settings.typeStore match { @@ -131,7 +131,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll } } - case class CreateDefaultHistory[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]() + case class CreateDefaultHistory[F[_]: Async: Parallel: Log: Metrics: Span]() extends CreateHistory[F] { def create(root: Blake2b256Hash): F[HistoryType[F]] = Settings.typeStore match { @@ -154,7 +154,7 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll } } - class Experiment[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span: Sync] { + class Experiment[F[_]: Async: Parallel: Log: Metrics: Span: Sync] { def getHistory(root: Blake2b256Hash): F[HistoryType[F]] = Settings.typeHistory match { @@ -410,5 +410,6 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll val t = new Experiment[IO] t.test.unsafeRunSync + } } diff --git a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala index 3aaf9a8c67f..02301712d8f 100644 --- a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala +++ b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala @@ -1,5 +1,6 @@ package coop.rchain.node.revvaultexport +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO} import coop.rchain.casper.genesis.contracts.{Registry, StandardDeploys} import coop.rchain.casper.helper.TestNode.Effect diff --git a/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala b/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala index ca3f564ed83..28d1861bce5 100644 --- a/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala +++ b/node/src/test/scala/coop/rchain/node/revvaultexport/VaultBalanceGetterTest.scala @@ -1,5 +1,6 @@ package coop.rchain.node.revvaultexport +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.casper.helper.TestNode import coop.rchain.casper.rholang.BlockRandomSeed @@ -7,7 +8,6 @@ import coop.rchain.models.syntax._ import coop.rchain.casper.util.GenesisBuilder.{buildGenesis, buildGenesisParameters} import coop.rchain.node.revvaultexport.mainnet1.StateBalanceMain import coop.rchain.rholang.interpreter.util.RevAddress - import org.scalatest.flatspec.AnyFlatSpec class VaultBalanceGetterTest extends AnyFlatSpec { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala index bf85aa1f299..2f9485786e2 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala @@ -420,7 +420,7 @@ object RhoRuntime { replayReducer } - def setupMapsAndRefs[F[_]: Sync]( + def setupMapsAndRefs[F[_]: Async]( extraSystemProcesses: Seq[Definition[F]] = Seq.empty ): F[ (Ref[F, BlockData], Map[String, Name], Seq[(Name, Arity, Remainder, BodyRef)]) @@ -554,7 +554,7 @@ object RhoRuntime { } yield runtime } - def createRuntimes[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( + def createRuntimes[F[_]: Async: Parallel: Log: Metrics: Span]( space: RhoISpace[F], replaySpace: RhoReplayISpace[F], initRegistry: Boolean, @@ -580,7 +580,7 @@ object RhoRuntime { * Create from KeyValueStore's */ - def createRuntime[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( + def createRuntime[F[_]: Async: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], mergeableTagName: Par, rholangEC: ExecutionContext, diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala index 0675d8d3d93..cc06c00bd51 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats._ +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO, Sync} import cats.syntax.all._ import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -57,15 +58,13 @@ object RholangCLI { } def main(args: Array[String]): Unit = { - import scala.concurrent.ExecutionContext.Implicits.global - implicit val cs: ContextShift[IO] = IO.contextShift(global) val conf = new Conf(args.toList) implicit val log: Log[IO] = Log.log[IO] implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() implicit val spanF: Span[IO] = NoopSpan[IO]() - implicit val parF: Parallel[IO] = IO.ioParallel + implicit val parF: Parallel[IO] = IO.parallelForIO val kvm = mkRSpaceStoreManager[IO](conf.dataDir(), conf.mapSize()).unsafeRunSync diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala index 234eb74d51f..b9c30313eee 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/CostAccounting.scala @@ -2,7 +2,6 @@ package coop.rchain.rholang.interpreter.accounting import cats.data._ import cats.effect.Async -import cats.effect.concurrent._ import cats.syntax.all._ import cats.mtl._ import cats.Monad diff --git a/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala index 498b3ea5d13..4dc68c22408 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/InterpreterSpec.scala @@ -13,7 +13,7 @@ import coop.rchain.rholang.syntax._ import coop.rchain.shared.Log import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.shared.RChainScheduler._ +import cats.effect.unsafe.implicits.global import scala.concurrent.duration._ diff --git a/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala index 48a5d382d66..ef78f1150e9 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang import cats.effect.IO +import cats.effect.unsafe.implicits.global import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import coop.rchain.metrics.{Metrics, NoopSpan, Span} diff --git a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala index fba700161f0..d786054fe6b 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala @@ -2,6 +2,7 @@ package coop.rchain.rholang import cats.Eval import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Connective.ConnectiveInstance.ConnNotBody diff --git a/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala index cd199a025d1..3d3ee9ecb54 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/StoragePrinterSpec.scala @@ -12,9 +12,7 @@ import coop.rchain.rholang.syntax._ import coop.rchain.shared.{Base16, Log} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.shared.RChainScheduler._ - -import scala.concurrent.duration._ +import cats.effect.unsafe.implicits.global class StoragePrinterSpec extends AnyFlatSpec with Matchers { private val tmpPrefix = "rspace-store-" diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala index 843b311d3e2..0c03b1a0f9c 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala @@ -24,7 +24,7 @@ import org.scalatest.matchers.should.Matchers // import coop.rchain.shared.RChainScheduler._ // val outcomeCh = "ret" // -// private def execute[F[_]: Async: Parallel: ContextShift: Metrics: Span: Log]( +// private def execute[F[_]: Async: Parallel: Metrics: Span: Log]( // source: String // ): F[Either[InterpreterError, BigInt]] = // mkRuntime[F]("rholang-bigint") diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala index 902cd13e014..e3e1c20bce7 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala @@ -24,6 +24,7 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import cats.effect.Ref +import cats.effect.unsafe.implicits.global class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqualsSupport { diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala index 87e80d04c52..8378e8688ee 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.crypto.hash.{Blake2b256, Blake2b512Random, Keccak256, Sha256} import coop.rchain.crypto.signatures.{Ed25519, Secp256k1} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala index 2c20e51bc14..ee14d28d781 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.{BindPattern, ListParWithRandom, Par, TaggedContinuation} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala index 3f23b42d86a..0a19b41452f 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats.effect.IO +import cats.effect.unsafe.implicits.global import cats.syntax.all._ import com.google.protobuf.ByteString import coop.rchain.crypto.hash.Blake2b512Random diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala index 7b2b259f0c4..136a6f5176d 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats.effect.IO +import cats.effect.unsafe.implicits.global import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala index 20f80a2aaa5..0967935645c 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rholang.Resources.mkRuntime diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala index 41322e59477..19e38cdfd63 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.Expr.ExprInstance.GString diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala index 3650870c40a..dfc7299a13f 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala @@ -2,6 +2,7 @@ package coop.rchain.rholang.interpreter.accounting import cats._ import cats.effect._ +import cats.effect.unsafe.implicits.global import cats.syntax.all._ import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -80,7 +81,7 @@ object CostAccountingPropertyTest { tasks.toList .sequence[IO, A] .map { _.sliding(2).forall { case List(r1, r2) => r1 == r2 } } - .unsafeRunTimed(duration) + .unsafeRunTimed(FiniteDuration(duration._1, duration._2)) .get def execute[F[_]: Sync](runtime: RhoRuntime[F], p: Proc): F[Long] = diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala index 5969cfcb4e0..596e149b066 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala @@ -3,6 +3,7 @@ package coop.rchain.rholang.interpreter.accounting import cats.Parallel import cats.data.Chain import cats.effect._ +import cats.effect.unsafe.implicits.global import cats.mtl.FunctorTell import cats.syntax.all._ import coop.rchain.crypto.hash.Blake2b512Random @@ -64,7 +65,7 @@ class CostAccountingSpec }.unsafeRunSync } - private def createRuntimesWithCostLog[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( + private def createRuntimesWithCostLog[F[_]: Async: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], costLog: FunctorTell[F, Chain[Cost]], initRegistry: Boolean = false, diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala index c91c8c79273..ee62312ca12 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter.accounting import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala index 0d55f9ead94..f24b89287c5 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/package.scala @@ -12,7 +12,7 @@ package object utils { def costLog[M[_]: Sync](): M[FunctorListen[M, Chain[Cost]]] = for { - ref <- Ref.of(Chain.empty[Cost]) + ref <- Ref[M].of(Chain.empty[Cost]) } yield (new DefaultFunctorListen[M, Chain[Cost]] { override val functor: Functor[M] = implicitly[Functor[M]] def tell(l: Chain[Cost]): M[Unit] = ref.modify(c => (c.concat(l), ())) diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala index 6cdb8ec2d8d..5b310886b54 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatchTest.scala @@ -1,9 +1,9 @@ package coop.rchain.rholang.interpreter.matcher -import cats.effect.Async.catsStateTAsync import cats.effect._ import cats.mtl.implicits._ import cats.Eval +import cats.effect.unsafe.implicits.global import com.google.protobuf.ByteString import coop.rchain.catscontrib.MonadError_._ import coop.rchain.models.Connective.ConnectiveInstance._ diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala index 3cb5d549851..dcb389da778 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/MatcherMonadSpec.scala @@ -1,6 +1,7 @@ package coop.rchain.rholang.interpreter.matcher import cats.effect._ +import cats.effect.unsafe.implicits.global import cats.mtl.implicits._ import cats.syntax.all._ import cats.{Alternative, Foldable, MonoidK, SemigroupK} @@ -16,7 +17,6 @@ import org.scalatest.matchers.should.Matchers class MatcherMonadSpec extends AnyFlatSpec with Matchers { implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO] implicit val ms: Metrics.Source = Metrics.BaseSource - import coop.rchain.shared.RChainScheduler._ type F[A] = MatcherMonadT[IO, A] diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala index 7823774ad63..d8da177c533 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/storage/ChargingRSpaceTest.scala @@ -1,5 +1,6 @@ package coop.rchain.rholang.interpreter.storage +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import com.google.protobuf.ByteString import coop.rchain.crypto.hash.Blake2b512Random @@ -306,7 +307,6 @@ class ChargingRSpaceTest extends FixtureAnyFlatSpec with TripleEqualsSupport wit override type FixtureParam = TestFixture protected override def withFixture(test: OneArgTest): Outcome = { - import coop.rchain.shared.RChainScheduler._ val cost: _cost[IO] = CostAccounting.emptyCost[IO].unsafeRunSync implicit val span = NoopSpan[IO] implicit val kvm = InMemoryStoreManager[IO] diff --git a/rholang/src/test/scala/rholang/rosette/CompilerTests.scala b/rholang/src/test/scala/rholang/rosette/CompilerTests.scala index 5bb03995338..68a36378012 100644 --- a/rholang/src/test/scala/rholang/rosette/CompilerTests.scala +++ b/rholang/src/test/scala/rholang/rosette/CompilerTests.scala @@ -1,6 +1,7 @@ package rholang.rosette import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rholang.Resources.mkRuntime diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala index 055a807a195..0f66bc13864 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala @@ -1,6 +1,7 @@ package coop.rchain.rspace.bench -import cats.effect.Sync +import cats.effect.unsafe.implicits.global +import cats.effect.{IO, Sync} import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -13,8 +14,6 @@ import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rspace.{Match, RSpace, _} import coop.rchain.shared.Log import coop.rchain.shared.PathOps.RichPath -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global import org.openjdk.jmh.annotations.{State => _, _} import org.openjdk.jmh.infra.Blackhole import org.scalacheck.Gen.Parameters @@ -46,22 +45,22 @@ class BasicBench { state.tc.head, false ) - .runSyncUnsafe() + .unsafeRunSync() assert(c1.isEmpty) bh.consume(c1) val r2 = - space.produce(state.channels(i), state.data(i), false).runSyncUnsafe() + space.produce(state.channels(i), state.data(i), false).unsafeRunSync() assert(r2.nonEmpty) bh.consume(r2) if (state.debug) { - assert(space.toMap.runSyncUnsafe().isEmpty) + assert(space.toMap.unsafeRunSync().isEmpty) } } if (state.debug) { - assert(space.createCheckpoint().runSyncUnsafe().log.size == 303) + assert(space.createCheckpoint().unsafeRunSync().log.size == 303) } } @@ -71,7 +70,7 @@ class BasicBench { val space = state.testSpace for (i <- 0 to 100) { val r2 = - space.produce(state.channels(i), state.data(i), false).runSyncUnsafe() + space.produce(state.channels(i), state.data(i), false).unsafeRunSync assert(r2.isEmpty) bh.consume(r2) @@ -83,16 +82,16 @@ class BasicBench { state.tc.head, false ) - .runSyncUnsafe() + .unsafeRunSync() assert(c1.nonEmpty) bh.consume(c1) if (state.debug) { - assert(space.toMap.runSyncUnsafe().isEmpty) + assert(space.toMap.unsafeRunSync().isEmpty) } } if (state.debug) { - assert(space.createCheckpoint().runSyncUnsafe().log.size == 303) + assert(space.createCheckpoint().unsafeRunSync().log.size == 303) } } } @@ -104,19 +103,17 @@ object BasicBench { val debug: Boolean = false import coop.rchain.rholang.interpreter.storage._ - implicit val syncF: Sync[Task] = Task.catsEffect - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val m: Match[Task, BindPattern, ListParWithRandom] = matchListPar[Task] - implicit val contextShiftF: ContextShift[Task] = Task.contextShift - implicit val ms: Metrics.Source = Metrics.BaseSource - private val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") - implicit val kvm = RholangCLI.mkRSpaceStoreManager[Task](dbDir).runSyncUnsafe() - val rSpaceStore = kvm.rSpaceStores.runSyncUnsafe() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val m: Match[IO, BindPattern, ListParWithRandom] = matchListPar[IO] + implicit val ms: Metrics.Source = Metrics.BaseSource + private val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") + implicit val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() + val rSpaceStore = kvm.rSpaceStores.unsafeRunSync() import coop.rchain.shared.RChainScheduler._ val testSpace: ISpace[ - Task, + IO, Par, BindPattern, ListParWithRandom, @@ -124,14 +121,14 @@ object BasicBench { ] = RSpace .create[ - Task, + IO, Par, BindPattern, ListParWithRandom, TaggedContinuation - ](rSpaceStore, rholangEC) - .runSyncUnsafe() - implicit val cost = CostAccounting.initialCost[Task](Cost.UNSAFE_MAX).runSyncUnsafe() + ](rSpaceStore) + .unsafeRunSync() + implicit val cost = CostAccounting.initialCost[IO](Cost.UNSAFE_MAX).unsafeRunSync() val initSeed = 123456789L diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala index 251d0f680ff..10385e86fcb 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBenchStateBase.scala @@ -1,6 +1,7 @@ package coop.rchain.rspace.bench -import coop.rchain.catscontrib.TaskContrib._ +import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -10,24 +11,22 @@ import coop.rchain.rholang.interpreter.{ParBuilderUtil, RhoRuntime, RholangCLI} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.shared.Log -import monix.eval.{Coeval, Task} -import monix.execution.Scheduler.Implicits.global import org.openjdk.jmh.annotations.{Setup, TearDown} import java.io.{FileNotFoundException, InputStreamReader} import java.nio.file.{Files, Path} trait EvalBenchStateBase { - private lazy val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") - implicit val logF: Log[Task] = new Log.NOPLog[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val kvm = RholangCLI.mkRSpaceStoreManager[Task](dbDir).runSyncUnsafe() + private lazy val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() val rhoScriptSource: String - val store = kvm.rSpaceStores.runSyncUnsafe() - lazy val spaces = Resources.createRuntimes[Task](store).runSyncUnsafe() + val store = kvm.rSpaceStores.unsafeRunSync() + lazy val spaces = Resources.createRuntimes[IO](store).unsafeRunSync() val (runtime, replayRuntime, _) = spaces val rand: Blake2b512Random = Blake2b512Random.defaultRandom @@ -37,10 +36,11 @@ trait EvalBenchStateBase { def doSetup(): Unit = { deleteOldStorage(dbDir) - term = Compiler[Coeval].sourceToADT(resourceFileReader(rhoScriptSource)).runAttempt match { - case Right(par) => Some(par) - case Left(err) => throw err - } + term = + Compiler[IO].sourceToADT(resourceFileReader(rhoScriptSource)).attempt.unsafeRunSync() match { + case Right(par) => Some(par) + case Left(err) => throw err + } } @TearDown diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala index f66f1ce07b7..915ef3a5865 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala @@ -1,6 +1,9 @@ package coop.rchain.rspace.bench import cats.Id +import cats.effect.IO +import cats.effect.unsafe.implicits.global +import cats.effect.unsafe.{IORuntime, IORuntimeConfig} import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rholang.interpreter.RholangCLI @@ -11,21 +14,18 @@ import coop.rchain.rspace.util._ import coop.rchain.rspace.{RSpace, _} import coop.rchain.shared.Log import coop.rchain.shared.PathOps._ -import monix.eval.Task -import monix.execution.Scheduler import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole import java.nio.file.Files -import java.util.concurrent.TimeUnit -import scala.concurrent.ExecutionContext.Implicits.global +import java.util.concurrent.{Executors, TimeUnit} import scala.concurrent.duration.Duration -import scala.concurrent.{Await, Future} +import scala.concurrent.{Await, ExecutionContext, Future} @org.openjdk.jmh.annotations.State(Scope.Thread) trait RSpaceBenchBase { - var space: ISpace[Id, Channel, Pattern, Entry, EntriesCaptor] = null + var space: ISpace[IO, Channel, Pattern, Entry, EntriesCaptor] = null val channel = Channel("friends#" + 1.toString) val channels = List(channel) @@ -45,24 +45,21 @@ trait RSpaceBenchBase { bh.consume(r) } - def createTask(taskIndex: Int, iterations: Int): Task[Unit] = - Task.delay { + def createIO(IOIndex: Int, iterations: Int): IO[Unit] = + IO.delay { for (_ <- 1 to iterations) { - val r1 = unpackOption(space.produce(channel, bob, persist = false)) + val r1 = unpackOption(space.produce(channel, bob, persist = false).unsafeRunSync()) runK(r1) getK(r1).results } } - val tasksCount = 200 + val IOsCount = 200 val iterationsCount = 10 - val tasks = (1 to tasksCount).map(idx => { - val task = createTask(idx, iterationsCount) - task + val IOs = (1 to IOsCount).map(idx => { + val IO = createIO(idx, iterationsCount) + IO }) - - val dupePool = Scheduler.fixedPool("dupe-pool", 3) - @Benchmark @BenchmarkMode(Array(Mode.SingleShotTime)) @OutputTimeUnit(TimeUnit.MILLISECONDS) @@ -70,6 +67,9 @@ trait RSpaceBenchBase { @Threads(1) def simulateDupe(bh: Blackhole) = { + val compute = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(3)) + val sheduler = cats.effect.unsafe.Scheduler.createDefaultScheduler() + space.consume( channels, matches, @@ -77,9 +77,11 @@ trait RSpaceBenchBase { persist = true ) - val results: IndexedSeq[Future[Unit]] = - tasks.map(f => f.executeOn(dupePool).runToFuture(dupePool)) + implicit val ior = IORuntime(compute, compute, sheduler._1, sheduler._2, IORuntimeConfig()) + + val results: IndexedSeq[Future[Unit]] = IOs.map(f => f.unsafeToFuture()(ior)) + implicit val a = scala.concurrent.ExecutionContext.global bh.consume(Await.ready(Future.sequence(results), Duration.Inf)) } } @@ -90,18 +92,20 @@ trait RSpaceBenchBase { @Measurement(iterations = 10) class RSpaceBench extends RSpaceBenchBase { - implicit val logF: Log[Id] = new Log.NOPLog[Id] - implicit val noopMetrics: Metrics[Id] = new metrics.Metrics.MetricsNOP[Id] - implicit val noopSpan: Span[Id] = NoopSpan[Id]() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() val dbDir = Files.createTempDirectory("rchain-rspace-bench-") - val kvm = RholangCLI.mkRSpaceStoreManager(dbDir) + val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() val rspaceStores = kvm.rSpaceStores import coop.rchain.shared.RChainScheduler._ @Setup def setup() = - space = RSpace.create[Id, Channel, Pattern, Entry, EntriesCaptor](rspaceStores, rholangEC) + space = RSpace + .create[IO, Channel, Pattern, Entry, EntriesCaptor](rspaceStores.unsafeRunSync()) + .unsafeRunSync() @TearDown def tearDown() = { diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala index 627190ccb13..84d28fd1ba8 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala @@ -1,6 +1,8 @@ package coop.rchain.rspace.bench import cats.Id +import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rholang.interpreter.RholangCLI @@ -27,7 +29,7 @@ class ReplayRSpaceBench { @Fork(value = 1) @Measurement(iterations = 1) def singleProduce(bh: Blackhole, state: ProduceInMemBenchState) = { - val res = state.replaySpace.produce(state.produceChannel, bob, persist = true) + val res = state.replaySpace.produce(state.produceChannel, bob, persist = true).unsafeRunSync() assert(res.isDefined) bh.consume(res) } @@ -39,12 +41,14 @@ class ReplayRSpaceBench { @Fork(value = 1) @Measurement(iterations = 1) def singleConsume(bh: Blackhole, state: ConsumeInMemBenchState) = { - val res = state.replaySpace.consume( - List(state.consumeChannel), - state.matches, - state.captor, - persist = true - ) + val res = state.replaySpace + .consume( + List(state.consumeChannel), + state.matches, + state.captor, + persist = true + ) + .unsafeRunSync() assert(res.isDefined) bh.consume(res) } @@ -52,15 +56,15 @@ class ReplayRSpaceBench { object ReplayRSpaceBench { - import scala.concurrent.ExecutionContext.Implicits.global + import cats.effect.unsafe.implicits.global abstract class ReplayRSpaceBenchState { - var space: ISpace[Id, Channel, Pattern, Entry, EntriesCaptor] = null - var replaySpace: IReplaySpace[cats.Id, Channel, Pattern, Entry, EntriesCaptor] = + var space: ISpace[IO, Channel, Pattern, Entry, EntriesCaptor] = null + var replaySpace: IReplaySpace[IO, Channel, Pattern, Entry, EntriesCaptor] = null - implicit val logF: Log[Id] = new Log.NOPLog[Id] - implicit val noopMetrics: Metrics[Id] = new metrics.Metrics.MetricsNOP[Id] - implicit val noopSpan: Span[Id] = NoopSpan[Id]() + implicit val logF: Log[IO] = new Log.NOPLog[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() val consumeChannel = Channel("consume") val produceChannel = Channel("produce") val matches = List(CityMatch(city = "Crystal Lake")) @@ -68,7 +72,7 @@ object ReplayRSpaceBench { def initSpace() = { val rigPoint = space.createCheckpoint() - replaySpace.rigAndReset(rigPoint.root, rigPoint.log) + replaySpace.rigAndReset(rigPoint.unsafeRunSync().root, rigPoint.unsafeRunSync().log) } private var dbDir: Path = null @@ -77,10 +81,12 @@ object ReplayRSpaceBench { def setup() = { import coop.rchain.shared.RChainScheduler._ dbDir = Files.createTempDirectory("replay-rspace-bench-") - val kvm = RholangCLI.mkRSpaceStoreManager[Id](dbDir) + val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() val store = kvm.rSpaceStores val (space, replaySpace) = - RSpace.createWithReplay[Id, Channel, Pattern, Entry, EntriesCaptor](store, rholangEC) + RSpace + .createWithReplay[IO, Channel, Pattern, Entry, EntriesCaptor](store.unsafeRunSync()) + .unsafeRunSync() this.space = space this.replaySpace = replaySpace } @@ -113,7 +119,7 @@ object ReplayRSpaceBench { override def setup() = { super.setup() prepareConsume() - initSpace + initSpace.unsafeRunSync() } } @@ -137,7 +143,7 @@ object ReplayRSpaceBench { override def setup() = { super.setup() prepareProduce() - initSpace + initSpace.unsafeRunSync() } } } diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala index 8bd29ccc7f7..355f0e0bb02 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoBenchBaseState.scala @@ -1,7 +1,8 @@ package coop.rchain.rspace.bench +import cats.effect.IO +import cats.effect.unsafe.implicits.global import coop.rchain.rholang.interpreter.{ReplayRhoRuntime, RhoRuntime, RholangCLI} -import coop.rchain.catscontrib.TaskContrib._ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} @@ -10,7 +11,6 @@ import coop.rchain.rholang.Resources import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rholang.interpreter.compiler.Compiler import coop.rchain.shared.Log -import monix.eval.{Coeval, Task} import monix.execution.Scheduler import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole @@ -28,33 +28,33 @@ abstract class RhoBenchBaseState { val r = (for { result <- runTask _ <- runtime.createCheckpoint - } yield result).runSyncUnsafe() + } yield result).unsafeRunSync() bh.consume(r) } implicit val scheduler: Scheduler = Scheduler.fixedPool(name = "rho-1", poolSize = 100) lazy val dbDir: Path = Files.createTempDirectory(BenchStorageDirPrefix) - var runtime: RhoRuntime[Task] = null - var replayRuntime: ReplayRhoRuntime[Task] = null - var setupTerm: Option[Par] = None - var term: Par = _ - var randSetup: Blake2b512Random = null - var randRun: Blake2b512Random = null + var runtime: RhoRuntime[IO] = null + var replayRuntime: ReplayRhoRuntime[IO] = null + var setupTerm: Option[Par] = None + var term: Par = _ + var randSetup: Blake2b512Random = null + var randRun: Blake2b512Random = null - var runTask: Task[Unit] = null + var runTask: IO[Unit] = null - implicit val logF: Log[Task] = Log.log[Task] - implicit val noopMetrics: Metrics[Task] = new metrics.Metrics.MetricsNOP[Task] - implicit val noopSpan: Span[Task] = NoopSpan[Task]() - implicit val ms: Metrics.Source = Metrics.BaseSource - def rand: Blake2b512Random = Blake2b512Random.defaultRandom + implicit val logF: Log[IO] = Log.log[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + implicit val ms: Metrics.Source = Metrics.BaseSource + def rand: Blake2b512Random = Blake2b512Random.defaultRandom def createRuntime = for { - kvm <- RholangCLI.mkRSpaceStoreManager[Task](dbDir) + kvm <- RholangCLI.mkRSpaceStoreManager[IO](dbDir) store <- kvm.rSpaceStores - spaces <- Resources.createRuntimes[Task](store) + spaces <- Resources.createRuntimes[IO](store) (runtime, replayRuntime, _) = spaces } yield (runtime, replayRuntime) @@ -62,25 +62,25 @@ abstract class RhoBenchBaseState { def doSetup(): Unit = { deleteOldStorage(dbDir) setupTerm = setupRho.flatMap { p => - Compiler[Coeval].sourceToADT(p).runAttempt match { + Compiler[IO].sourceToADT(p).attempt.unsafeRunSync() match { case Right(par) => Some(par) case Left(err) => throw err } } - term = Compiler[Coeval].sourceToADT(testedRho).runAttempt match { + term = Compiler[IO].sourceToADT(testedRho).attempt.unsafeRunSync() match { case Right(par) => par case Left(err) => throw err } - val runtimes = createRuntime.runSyncUnsafe() + val runtimes = createRuntime.unsafeRunSync() runtime = runtimes._1 replayRuntime = runtimes._2 randSetup = rand randRun = rand Await .result( - createTest(setupTerm)(runtime, randSetup).runToFuture, + createTest(setupTerm)(runtime, randSetup).unsafeToFuture(), Duration.Inf ) runTask = createTest(Some(term))(runtime, randRun) diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala index 5f842492bf5..9708fc3975a 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RhoReplayBenchBaseState.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace.bench +import cats.effect.unsafe.implicits.global import coop.rchain.catscontrib.TaskContrib._ import org.openjdk.jmh.annotations.{Level, Setup} import org.openjdk.jmh.infra.Blackhole @@ -10,7 +11,7 @@ abstract class RhoReplayBenchBaseState extends RhoBenchBaseState { val r = (for { result <- runTask _ <- replayRuntime.createCheckpoint - } yield result).runSyncUnsafe() + } yield result).unsafeRunSync() bh.consume(r) } @@ -18,13 +19,13 @@ abstract class RhoReplayBenchBaseState extends RhoBenchBaseState { override def doSetup(): Unit = { super.doSetup() - runTask.runSyncUnsafe() + runTask.unsafeRunSync() (for { executionCheckpoint <- replayRuntime.createCheckpoint _ <- replayRuntime.rig(executionCheckpoint.log) _ <- replayRuntime.reset(executionCheckpoint.root) _ <- createTest(setupTerm)(replayRuntime, randSetup) _ = runTask = createTest(Some(term))(replayRuntime, randRun) - } yield ()).runSyncUnsafe() + } yield ()).unsafeRunSync() } } diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/package.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/package.scala index be6f7af061b..1af8954ffb1 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/package.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/package.scala @@ -1,8 +1,9 @@ package coop.rchain.rspace +import cats.effect.IO + import java.io.{FileNotFoundException, InputStreamReader} import java.nio.file.Path - import com.google.common.io.CharStreams import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.models.Par @@ -10,7 +11,6 @@ import coop.rchain.rholang.interpreter.{Reduce, RhoRuntime} import scala.collection.immutable.Seq import coop.rchain.shared.PathOps.RichPath -import monix.eval.Task import org.scalacheck._ import org.scalacheck.rng.Seed import org.scalacheck.Gen.Parameters @@ -74,11 +74,11 @@ package object bench { } def createTest(t: Option[Par])( - implicit runtime: RhoRuntime[Task], + implicit runtime: RhoRuntime[IO], rand: Blake2b512Random - ): Task[Unit] = + ): IO[Unit] = t match { case Some(par) => runtime.inj(par) - case None => Task.delay(()) + case None => IO.delay(()) } } diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala index 3a0cb7a3b92..93707f42019 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala @@ -18,7 +18,7 @@ import monix.execution.atomic.AtomicAny import scala.collection.SortedSet import scala.concurrent.ExecutionContext -class RSpace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( +class RSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], storeAtom: AtomicAny[HotStore[F, C, P, A, K]], rholangEC: ExecutionContext @@ -235,7 +235,7 @@ object RSpace { /** * Creates [[RSpace]] from [[HistoryRepository]] and [[HotStore]]. */ - def apply[F[_]: Async: ContextShift: Span: Metrics: Log, C, P, A, K]( + def apply[F[_]: Async: Span: Metrics: Log, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], store: HotStore[F, C, P, A, K], rholangEC: ExecutionContext @@ -252,7 +252,7 @@ object RSpace { /** * Creates [[RSpace]] from [[KeyValueStore]]'s, */ - def create[F[_]: Async: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( + def create[F[_]: Async: Parallel: Span: Metrics: Log, C, P, A, K]( store: RSpaceStore[F], rholangEC: ExecutionContext )( @@ -272,7 +272,7 @@ object RSpace { /** * Creates [[RSpace]] and [[ReplayRSpace]] from [[KeyValueStore]]'s. */ - def createWithReplay[F[_]: Async: Parallel: ContextShift: Span: Metrics: Log, C, P, A, K]( + def createWithReplay[F[_]: Async: Parallel: Span: Metrics: Log, C, P, A, K]( store: RSpaceStore[F], rholangEC: ExecutionContext )( diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala index 7347344df8a..cc60b9be851 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala @@ -21,7 +21,7 @@ import scala.concurrent.{ExecutionContext, SyncVar} import scala.util.Random import cats.effect.Ref -abstract class RSpaceOps[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( +abstract class RSpaceOps[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], val storeAtom: AtomicAny[HotStore[F, C, P, A, K]], rholangEC: ExecutionContext diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala index ef34510e4b4..e30e403e820 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala @@ -18,7 +18,7 @@ import scala.collection.JavaConverters._ import scala.collection.{immutable, SortedSet} import scala.concurrent.ExecutionContext -class ReplayRSpace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( +class ReplayRSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], storeAtom: AtomicAny[HotStore[F, C, P, A, K]], rholangEC: ExecutionContext @@ -317,7 +317,7 @@ object ReplayRSpace { /** * Creates [[ReplayRSpace]] from [[HistoryRepository]] and [[HotStore]]. */ - def apply[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( + def apply[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], store: HotStore[F, C, P, A, K], rholangEC: ExecutionContext diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala index 6c1d64852ec..b0d80f3ae0b 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReportingRspace.scala @@ -15,13 +15,11 @@ import coop.rchain.rspace.ReportingRspace.{ import coop.rchain.rspace.history.HistoryRepository import coop.rchain.rspace.internal._ import coop.rchain.rspace.trace._ -import coop.rchain.shared.RChainScheduler.rholangEC import coop.rchain.shared.{Log, Serialize} import coop.rchain.store.KeyValueStore import monix.execution.atomic.AtomicAny import scala.collection.SortedSet -import scala.concurrent.ExecutionContext import cats.effect.Ref /** @@ -54,7 +52,7 @@ object ReportingRspace { /** * Creates [[ReportingRspace]] from [[HistoryRepository]] and [[HotStore]]. */ - def apply[F[_]: Async: ContextShift: Span: Metrics: Log, C, P, A, K]( + def apply[F[_]: Async: Span: Metrics: Log, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], store: HotStore[F, C, P, A, K] )( @@ -72,7 +70,7 @@ object ReportingRspace { /** * Creates [[RSpace]] from [[KeyValueStore]]'s, */ - def create[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span, C, P, A, K]( + def create[F[_]: Async: Parallel: Log: Metrics: Span, C, P, A, K]( store: RSpaceStore[F] )( implicit sc: Serialize[C], @@ -88,7 +86,7 @@ object ReportingRspace { } yield reportingRSpace } -class ReportingRspace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K]( +class ReportingRspace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], storeAtom: AtomicAny[HotStore[F, C, P, A, K]] )( @@ -98,7 +96,7 @@ class ReportingRspace[F[_]: Async: ContextShift: Log: Metrics: Span, C, P, A, K] serializeA: Serialize[A], serializeK: Serialize[K], m: Match[F, P, A] -) extends ReplayRSpace[F, C, P, A, K](historyRepository, storeAtom, rholangEC) { +) extends ReplayRSpace[F, C, P, A, K](historyRepository, storeAtom) { protected[this] override val logger: Logger = Logger[this.type] diff --git a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala index 2ecda12611b..b45ea994749 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala @@ -1,7 +1,9 @@ package coop.rchain.rspace.examples +import cats.effect.kernel.Concurrent +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO} -import cats.{Applicative, Id} +import cats.Applicative import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rspace.util.{runKs, unpackOption, unpackSeq} @@ -81,14 +83,6 @@ object AddressBookExample { object implicits { - implicit val concurrentF: Async[Id] = coop.rchain.catscontrib.effect.implicits.concurrentId - - implicit val contextShiftId: ContextShift[Id] = - new ContextShift[Id] { - def shift: Id[Unit] = ??? - def evalOn[A](ec: ExecutionContext)(fa: Id[A]): Id[A] = fa - } - /* Now I will troll Greg... */ /* Serialize instances */ @@ -175,14 +169,14 @@ object AddressBookExample { // Let's define some Entries val alice = Entry( name = Name("Alice", "Lincoln"), - address = Address("777 Ford St.", "Crystal Lake", "Idaho", "223322"), + address = Address("777 Ford St.", "Crystal Lake", "IOaho", "223322"), email = "alicel@ringworld.net", phone = "787-555-1212" ) val bob = Entry( name = Name("Bob", "Lahblah"), - address = Address("1000 Main St", "Crystal Lake", "Idaho", "223322"), + address = Address("1000 Main St", "Crystal Lake", "IOaho", "223322"), email = "blablah@tenex.net", phone = "698-555-1212" ) @@ -195,14 +189,15 @@ object AddressBookExample { ) def exampleOne(): Unit = { - implicit val log: Log[Id] = Log.log - implicit val metricsF: Metrics[Id] = new Metrics.MetricsNOP[Id]() - implicit val spanF: Span[Id] = NoopSpan[Id]() - implicit val keyValueStoreManager = InMemoryStoreManager[Id] + implicit val log: Log[IO] = Log.log + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val keyValueStoreManager = InMemoryStoreManager[IO] // Let's define our store val store = keyValueStoreManager.rSpaceStores - val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store, rholangEC) + val space = + RSpace.create[IO, Channel, Pattern, Entry, Printer](store.unsafeRunSync()).unsafeRunSync() Console.printf("\nExample One: Let's consume and then produce...\n") @@ -213,13 +208,14 @@ object AddressBookExample { Seq(CityMatch(city = "Crystal Lake")), new Printer, persist = true - ) // it should be fine to do that -- type of left side is Nothing (no invalid states) + ) + .unsafeRunSync() // it should be fine to do that -- type of left sIOe is Nothing (no invalIO states) assert(cres.isEmpty) - val pres1 = space.produce(Channel("friends"), alice, persist = false) - val pres2 = space.produce(Channel("friends"), bob, persist = false) - val pres3 = space.produce(Channel("friends"), carol, persist = false) + val pres1 = space.produce(Channel("friends"), alice, persist = false).unsafeRunSync() + val pres2 = space.produce(Channel("friends"), bob, persist = false).unsafeRunSync() + val pres3 = space.produce(Channel("friends"), carol, persist = false).unsafeRunSync() assert(pres1.nonEmpty) assert(pres2.nonEmpty) @@ -230,20 +226,21 @@ object AddressBookExample { def exampleTwo(): Unit = { - implicit val log: Log[Id] = Log.log - implicit val metricsF: Metrics[Id] = new Metrics.MetricsNOP[Id]() - implicit val spanF: Span[Id] = NoopSpan[Id]() - implicit val keyValueStoreManager = InMemoryStoreManager[Id] + implicit val log: Log[IO] = Log.log + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val keyValueStoreManager = InMemoryStoreManager[IO] // Let's define our store val store = keyValueStoreManager.rSpaceStores - val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store, rholangEC) + val space = + RSpace.create[IO, Channel, Pattern, Entry, Printer](store.unsafeRunSync()).unsafeRunSync() Console.printf("\nExample Two: Let's produce and then consume...\n") - val pres1 = space.produce(Channel("friends"), alice, persist = false) - val pres2 = space.produce(Channel("friends"), bob, persist = false) - val pres3 = space.produce(Channel("friends"), carol, persist = false) + val pres1 = space.produce(Channel("friends"), alice, persist = false).unsafeRunSync() + val pres2 = space.produce(Channel("friends"), bob, persist = false).unsafeRunSync() + val pres3 = space.produce(Channel("friends"), carol, persist = false).unsafeRunSync() assert(pres1.isEmpty) assert(pres2.isEmpty) @@ -258,9 +255,9 @@ object AddressBookExample { persist = false ) - val cres1 = consumer() - val cres2 = consumer() - val cres3 = consumer() + val cres1 = consumer().unsafeRunSync() + val cres2 = consumer().unsafeRunSync() + val cres3 = consumer().unsafeRunSync() assert(cres1.isDefined) assert(cres2.isDefined) @@ -282,14 +279,15 @@ object AddressBookExample { new Printer, persist = false ) + .unsafeRunSync() assert(cres.isEmpty) println("Rollback example: And create a checkpoint...") - val checkpointHash = space.createCheckpoint().root + val checkpointHash = space.createCheckpoint().unsafeRunSync().root def produceAlice(): Option[(Printer, Seq[Entry])] = - unpackOption(space.produce(Channel("friends"), alice, persist = false)) + unpackOption(space.produce(Channel("friends"), alice, persist = false).unsafeRunSync()) println("Rollback example: First produce result should return some data") assert(produceAlice.isDefined) @@ -303,7 +301,7 @@ object AddressBookExample { println( "Rollback example: Let's reset RSpace to the state from before running the produce operations" ) - space.reset(checkpointHash) + space.reset(checkpointHash).unsafeRunSync() println("Rollback example: Again, first produce result should return some data") assert(produceAlice.isDefined) @@ -314,17 +312,18 @@ object AddressBookExample { } private[this] def withSpace( - f: ISpace[Id, Channel, Pattern, Entry, Printer] => Unit + f: ISpace[IO, Channel, Pattern, Entry, Printer] => Unit ) = { - implicit val log: Log[Id] = Log.log - implicit val metricsF: Metrics[Id] = new Metrics.MetricsNOP[Id]() - implicit val spanF: Span[Id] = NoopSpan[Id]() - implicit val keyValueStoreManager = InMemoryStoreManager[Id] + implicit val log: Log[IO] = Log.log + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val keyValueStoreManager = InMemoryStoreManager[IO] // Let's define our store val store = keyValueStoreManager.rSpaceStores - val space = RSpace.create[Id, Channel, Pattern, Entry, Printer](store, rholangEC) + val space = + RSpace.create[IO, Channel, Pattern, Entry, Printer](store.unsafeRunSync()).unsafeRunSync() try { f(space) } finally { diff --git a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala index badc277bf9c..5c285b3b6ca 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/merger/StateChange.scala @@ -125,7 +125,7 @@ object StateChange { // compute all changes allChanges = (computeProduceChanges ++ computeConsumeChanges ++ computeJoinsMap) .map(Stream.eval) - _ <- fs2.Stream.fromIterator(allChanges.iterator).parJoinProcBounded.compile.drain + _ <- fs2.Stream.fromIterator(allChanges.iterator, 1).parJoinProcBounded.compile.drain produceChanges <- datumsDiffRef.get _ <- new Exception("State change compute logic error: empty channel change for produce.") .raiseError[F, StateChange] diff --git a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala index 5252b087f44..d50dbe735b7 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala @@ -17,7 +17,6 @@ import monix.execution.atomic.AtomicAny import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector -import coop.rchain.shared.RChainScheduler._ import cats.effect.Ref class ExportImportTests @@ -247,7 +246,7 @@ class ExportImportTests } trait InMemoryExportImportTestsBase[C, P, A, K] { - import SchedulerPools.global + import cats.effect.unsafe.implicits.global def fixture[S]( f: ( ISpace[IO, C, P, A, K], diff --git a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala index 567fa571a4e..ab557610c2d 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala @@ -20,6 +20,7 @@ import scala.collection.SortedSet import scala.concurrent.duration._ import scala.util.Random import cats.effect.Ref +import cats.effect.unsafe.implicits.global trait HotStoreSpec[F[_]] extends AnyFlatSpec with Matchers with ScalaCheckDrivenPropertyChecks { @@ -1118,7 +1119,7 @@ trait InMemHotStoreSpec extends HotStoreSpec[IO] { import coop.rchain.shared.RChainScheduler._ protected type F[A] = IO[A] implicit override val S: Sync[F] = implicitly[Async[IO]] - implicit override val P: Parallel[IO] = IO.ioParallel + implicit override val P: Parallel[IO] = IO.parallelForIO def C( c: HotStoreState[String, Pattern, String, StringsCaptor] = HotStoreState() ): F[Ref[F, HotStoreState[String, Pattern, String, StringsCaptor]]] diff --git a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala index f59da743f09..a49401e59fc 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala @@ -26,17 +26,13 @@ import scala.collection.SortedSet import scala.util.Random import scala.util.Random.shuffle import cats.effect.Ref - -object SchedulerPools { - implicit val global = Scheduler.fixedPool("GlobalPool", 20) - val rspacePool = Scheduler.fixedPool("RSpacePool", 5) -} +import cats.effect.unsafe.implicits.global //noinspectTaskn ZeroIndexToHead,NameBooleanParameters trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, String] { import coop.rchain.shared.RChainScheduler._ - implicit val pIO = IO.ioParallel + implicit val pIO = IO.parallelForIO implicit val log: Log[IO] = new Log.NOPLog[IO] val arbitraryRangeSize: Gen[Int] = Gen.chooseNum[Int](1, 10) @@ -1256,7 +1252,6 @@ trait ReplayRSpaceTestsBase[C, P, A, K] } trait InMemoryReplayRSpaceTestsBase[C, P, A, K] extends ReplayRSpaceTestsBase[C, P, A, K] { - import SchedulerPools.global override def fixture[S]( f: ( AtomicAny[HotStore[IO, C, P, A, K]], diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala index b7188991feb..6e4d6c79934 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageActionsTests.scala @@ -1188,5 +1188,5 @@ class InMemoryHotStoreStorageActionsTests with TaskTests[String, Pattern, Nothing, String, StringsCaptor] with StorageActionsTests[IO] with StorageTestsBase[IO, String, Pattern, String, StringsCaptor] { - implicit val parF: Parallel[IO] = IO.ioParallel + implicit val parF: Parallel[IO] = IO.parallelForIO } diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala index 5e75b0bd4d7..bfa628a8f14 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala @@ -13,7 +13,7 @@ import coop.rchain.shared.RChainScheduler import monix.execution.atomic.AtomicAny import scodec.Codec -import scala.concurrent.ExecutionContext.Implicits.global +import cats.effect.unsafe.implicits.global trait StorageExamplesTests[F[_]] extends StorageTestsBase[F, Channel, Pattern, Entry, EntriesCaptor] { @@ -289,5 +289,5 @@ class InMemoryHotStoreStorageExamplesTests extends InMemoryHotStoreStorageExamplesTestsBase[IO] with TaskTests[Channel, Pattern, Entry, Entry, EntriesCaptor] with StorageExamplesTests[IO] { - implicit val parF: Parallel[IO] = IO.ioParallel + implicit val parF: Parallel[IO] = IO.parallelForIO } diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala index 9577c784b88..d6563319dce 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala @@ -1,6 +1,5 @@ package coop.rchain.rspace -import cats.effect._ import cats.syntax.all._ import cats.{Parallel, _} import com.typesafe.scalalogging.Logger @@ -17,9 +16,8 @@ import monix.execution.atomic.AtomicAny import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - -import scala.concurrent.ExecutionContext.Implicits.global -import cats.effect.Ref +import cats.effect.{Async, IO, Ref} +import cats.effect.unsafe.implicits.global trait StorageTestsBase[F[_], C, P, A, K] extends AnyFlatSpec with Matchers with OptionValues { type T = ISpace[F, C, P, A, K] @@ -33,7 +31,6 @@ trait StorageTestsBase[F[_], C, P, A, K] extends AnyFlatSpec with Matchers with implicit def metricsF: Metrics[F] implicit def spanF: Span[F] implicit def monadF: Monad[F] - implicit def contextShiftF: ContextShift[F] val logger: Logger = Logger(this.getClass.getName.stripSuffix("$")) @@ -89,13 +86,12 @@ trait StorageTestsBase[F[_], C, P, A, K] extends AnyFlatSpec with Matchers with } trait TaskTests[C, P, A, R, K] extends StorageTestsBase[IO, C, P, R, K] { - implicit val logF: Log[IO] = Log.log[IO] - implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() - implicit val spanF: Span[IO] = NoopSpan[IO]() - implicit val contextShiftF: ContextShift[IO] = coop.rchain.shared.RChainScheduler.csIO - implicit val concurrentF: Async[IO] = Async[IO] - implicit val monadF: Monad[IO] = Monad[IO] - override def run[RES](f: IO[RES]): RES = f.unsafeRunSync + implicit val logF: Log[IO] = Log.log[IO] + implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() + implicit val spanF: Span[IO] = NoopSpan[IO]() + implicit val concurrentF: Async[IO] = Async[IO] + implicit val monadF: Monad[IO] = Monad[IO] + override def run[RES](f: IO[RES]): RES = f.unsafeRunSync } abstract class InMemoryHotStoreTestsBase[F[_]] diff --git a/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala b/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala index 3670080f839..5d4c54eb19b 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/TestImplicitHelpers.scala @@ -1,14 +1,16 @@ package coop.rchain.rspace import cats.Id +import cats.effect.IO +import cats.effect.unsafe.implicits.global import org.scalatest.enablers.Definition //noinspectTaskn ConvertExpressionToSAM trait TestImplicitHelpers { // Some helpers for usage only in the tests -- save us A LOT of explicit casting from Either to Option // it is safe because left type of `Either` is `Nothing` -- we don't expect any invalid states from the matcher - implicit def eitherDefinitionScalatest[E, A]: Definition[Id[Either[E, Option[A]]]] = - new Definition[Id[Either[E, Option[A]]]] { - override def isDefined(thing: Id[Either[E, Option[A]]]): Boolean = - thing.right.get.isDefined + implicit def eitherDefinitionScalatest[E, A]: Definition[IO[Either[E, Option[A]]]] = + new Definition[IO[Either[E, Option[A]]]] { + override def isDefined(thing: IO[Either[E, Option[A]]]): Boolean = + thing.unsafeRunSync().right.get.isDefined } } diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala index b224bc3c6ff..bbdbde4d5a3 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace.concurrent +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala index 854345770be..e44b61df832 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace.concurrent +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import coop.rchain.metrics.Metrics import org.scalatest.flatspec.AnyFlatSpec diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala index 3f69e696902..a3c7b90ca99 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala @@ -1,6 +1,7 @@ package coop.rchain.rspace.history import cats.effect.IO +import cats.effect.unsafe.implicits.global import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.history.TestData._ diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala index df3a1de6d98..4e00a1c79cf 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala @@ -18,6 +18,7 @@ import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks +import cats.effect.unsafe.implicits.global import java.nio.file.{Files, Path} import scala.concurrent.duration._ diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala index 221ac74b4ff..f0a6633d63d 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala @@ -21,6 +21,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.OptionValues import scodec.bits.ByteVector +import cats.effect.unsafe.implicits.global import java.nio.ByteBuffer import scala.collection.SortedSet diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala index c53efe46400..a7488a01bd4 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.rspace.history +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.rspace.hashing.Blake2b256Hash diff --git a/shared/src/main/scala/coop/rchain/shared/Log.scala b/shared/src/main/scala/coop/rchain/shared/Log.scala index 812e1c8fbe2..e600e9ef8f8 100644 --- a/shared/src/main/scala/coop/rchain/shared/Log.scala +++ b/shared/src/main/scala/coop/rchain/shared/Log.scala @@ -99,6 +99,4 @@ sealed abstract class LogInstances { def error(msg: => String, cause: Throwable)(implicit ev: LogSource): F[Unit] = Sync[F].delay(Logger(ev.clazz).error(msg, cause)) } - - val logId: Log[Id] = log } diff --git a/shared/src/main/scala/coop/rchain/shared/Time.scala b/shared/src/main/scala/coop/rchain/shared/Time.scala deleted file mode 100644 index 681e59e184f..00000000000 --- a/shared/src/main/scala/coop/rchain/shared/Time.scala +++ /dev/null @@ -1,47 +0,0 @@ -package coop.rchain.shared - -import cats.Monad -import cats.data.EitherT -import cats.tagless._ -import coop.rchain.catscontrib.Catscontrib._ -import coop.rchain.catscontrib._ - -import scala.concurrent.duration.{FiniteDuration, MILLISECONDS, NANOSECONDS} -import cats.effect.Temporal - -// TODO: there is no reason for custom Timer definition, remove it -// - for testing TestScheduler (monix) ot TestContext (cats-laws) (TestControl cats.effect 3) should be used -@autoFunctorK -@autoSemigroupalK -@autoProductNK -trait Time[F[_]] { - def currentMillis: F[Long] - def nanoTime: F[Long] - def sleep(duration: FiniteDuration): F[Unit] -} - -object Time extends TimeInstances { - def apply[F[_]](implicit L: Time[F]): Time[F] = L - - def forTrans[F[_]: Monad, T[_[_], _]: MonadTrans](implicit TM: Time[F]): Time[T[F, *]] = - new Time[T[F, *]] { - def currentMillis: T[F, Long] = TM.currentMillis.liftM[T] - def nanoTime: T[F, Long] = TM.nanoTime.liftM[T] - def sleep(duration: FiniteDuration): T[F, Unit] = TM.sleep(duration).liftM[T] - } - - /** - * Default implementation from cats [[Timer]] - */ - def fromTimer[F[_]](implicit timer: Temporal[F]): Time[F] = - new Time[F] { - def currentMillis: F[Long] = timer.clock.realTime(MILLISECONDS) - def nanoTime: F[Long] = timer.clock.monotonic(NANOSECONDS) - def sleep(duration: FiniteDuration): F[Unit] = timer.sleep(duration) - } -} - -sealed abstract class TimeInstances { - implicit def eitherTTime[E, F[_]: Monad: Time[*[_]]]: Time[EitherT[F, E, *]] = - Time.forTrans[F, EitherT[*[_], E, *]] -} diff --git a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala index 0c28022009a..89c5d520a85 100644 --- a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala +++ b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala @@ -11,6 +11,7 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.util.Success import RChainScheduler._ +import cats.effect.unsafe.implicits.global import cats.effect.{Ref, Temporal} class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { @@ -18,9 +19,9 @@ class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { /** * Creates a Stream of 2 elements creating String "11", if timeout occurs it will insert zeroes e.g. "101" */ - def test[F[_]: Async: Temporal](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { st => + def test[F[_]: Async](timeout: FiniteDuration): F[String] = Ref.of("") flatMap { st => val addOne = Stream.eval(st.updateAndGet(_ + "1")) - val pause = Stream.sleep(1.second)(Temporal[F]).drain + val pause = Stream.sleep[F](1.second).drain val addZero = st.update(_ + "0") (addOne ++ pause ++ addOne).evalOnIdle(addZero, timeout).compile.lastOrError diff --git a/shared/src/test/scala/coop/rchain/shared/StreamTSpec.scala b/shared/src/test/scala/coop/rchain/shared/StreamTSpec.scala index a1f6f2aef34..a5b89a0614d 100644 --- a/shared/src/test/scala/coop/rchain/shared/StreamTSpec.scala +++ b/shared/src/test/scala/coop/rchain/shared/StreamTSpec.scala @@ -1,8 +1,8 @@ package coop.rchain.shared import cats._ +import cats.effect.IO import cats.syntax.all._ - import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks @@ -15,7 +15,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty describe("StreamT") { it("should be able to be constructed from lists") { forAll { (list: List[Int]) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.toList[Int] shouldBe list } @@ -24,7 +24,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should correctly compute heads") { forAll { (list: List[Int]) => whenever(list.nonEmpty) { - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.head[Int](mErrId) shouldBe list.head } @@ -34,7 +34,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should correctly compute tails") { forAll { (list: List[Int]) => whenever(list.nonEmpty) { - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.tail(mErrId).toList[Int] shouldBe list.tail } @@ -43,8 +43,8 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should be able to zip with other StreamTs") { forAll { (listA: List[Int], listB: List[String]) => - val streamA = StreamT.fromList[Id, Int](listA) - val streamB = StreamT.fromList[Id, String](listB) + val streamA = StreamT.fromList[IO, Int](listA.pure[IO]) + val streamB = StreamT.fromList[IO, String](listB.pure[IO]) listA.zip(listB) shouldBe streamA.zip(streamB).toList } @@ -52,7 +52,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should allow taking a finite number of terms") { forAll { (list: List[Int], n: Int) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.take(n).toList[Int] shouldBe list.take(n) } @@ -63,7 +63,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty "should allow taking the longest prefix of this StreamT whose elements satisfy the predicate" ) { forAll { list: List[Int] => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.takeWhile(_ < 100).toList[Int] shouldBe list.takeWhile(_ < 100) } @@ -72,7 +72,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should allow dropping a finite number of terms") { forAll { (list: List[Int], n: Int) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.drop(n).toList[Int] shouldBe list.drop(n) } @@ -81,7 +81,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should allow dropping a finite number of terms until a term doesn't satisfy the predicate") { forAll { list: List[Int] => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.dropWhile(_ < 100).toList[Int] shouldBe list.dropWhile(_ < 100) } @@ -89,7 +89,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should find elements properly in") { forAll { (list: List[Int]) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.find[Int](_ % 2 == 0) shouldBe list.find(_ % 2 == 0) } @@ -97,7 +97,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should foldLeft properly in") { forAll { (list: List[Int]) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.foldLeft[Int](0)(_ + _) shouldBe list.sum } @@ -105,7 +105,7 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should map properly in") { forAll { (list: List[Int]) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream.map(_ * 2).toList[Int] shouldBe list.map(_ * 2) } @@ -113,15 +113,15 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty it("should flatMap properly in") { forAll { (list: List[Int]) => - val stream: StreamT[Id, Int] = StreamT.fromList[Id, Int](list) + val stream: StreamT[IO, Int] = StreamT.fromList[IO, Int](list.pure[IO]) stream - .flatMap(i => StreamT.fromList[Id, Int](List(i + 1, i + 2, i + 3))) + .flatMap(i => StreamT.fromList[IO, Int](List(i + 1, i + 2, i + 3).pure[IO])) .toList[Int] shouldBe list.flatMap(i => List(i + 1, i + 2, i + 3)) } } it("should be able lazily construct infinite sequences") { - lazy val fibs: StreamT[Id, Long] = + lazy val fibs: StreamT[IO, Long] = StreamT.cons( 0L, Eval.now(pure(StreamT.cons(1L, Eval.later(pure(fibs.zip(fibs.tail(mErrId)).map { @@ -133,11 +133,11 @@ class StreamTSpec extends AnyFunSpec with Matchers with ScalaCheckDrivenProperty } } - private def pure[A](value: A): Id[A] = Applicative[Id].pure(value) + private def pure[A](value: A): IO[A] = Applicative[IO].pure(value) } object StreamTSpec { - val mErrId = unsafeMErr[Id] + val mErrId = unsafeMErr[IO] def unsafeMErr[F[_]: Monad]: MonadError[F, Throwable] = new MonadError[F, Throwable] { diff --git a/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala b/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala index a694953dd3d..6f15f783b48 100644 --- a/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala +++ b/shared/src/test/scala/coop/rchain/shared/scalatestcontrib.scala @@ -2,6 +2,7 @@ package coop.rchain.shared import cats.Functor import cats.effect.IO +import cats.effect.unsafe.implicits.global import cats.syntax.functor._ import monix.execution.Scheduler import org.scalatest.Assertion diff --git a/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala b/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala index 7fdd7c4b217..6b202ac60d1 100644 --- a/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala +++ b/shared/src/test/scala/coop/rchain/store/InMemoryKeyValueStoreSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.store +import cats.effect.unsafe.implicits.global import cats.effect.{IO, Sync} import cats.syntax.all._ import coop.rchain.shared.syntax._ From 99c67e365493d70030cc71691e962232b3cf7033 Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Sat, 8 Apr 2023 08:42:14 +0400 Subject: [PATCH 15/17] Remove special exectior for RSpace --- .../merging/MergeNumberChannelSpec.scala | 2 - .../rchain/casper/rholang/DeployIdTest.scala | 1 - .../casper/rholang/DeployerIdTest.scala | 1 - .../casper/rholang/InterpreterUtilTest.scala | 1 - .../rchain/casper/rholang/Resources.scala | 4 +- .../rchain/casper/rholang/RuntimeSpec.scala | 7 +-- .../sync/BlockRetrieverRequesAllSpec.scala | 1 - .../casper/sync/BlockRetrieverSpec.scala | 2 +- .../rchain/casper/util/GenesisBuilder.scala | 5 +- .../util/scalatest/Fs2StreamMatchers.scala | 1 - .../comm/transport/GrpcTransportClient.scala | 5 +- .../comm/discovery/GrpcKademliaRPCSpec.scala | 8 +-- .../comm/transport/GrpcTransportSpec.scala | 1 - .../node/revvaultexport/StateBalances.scala | 4 +- .../mainnet1/StateBalanceMain.scala | 2 +- .../mainnet1/reporting/MergeBalanceMain.scala | 4 +- .../reporting/TransactionBalanceMain.scala | 1 - .../reporting/TransactionBalances.scala | 4 +- .../coop/rchain/node/runtime/NodeMain.scala | 2 - .../rchain/node/runtime/NodeRuntime.scala | 23 +------ .../coop/rchain/node/runtime/Setup.scala | 7 +-- .../coop/rchain/node/TransactionAPISpec.scala | 2 +- .../node/mergeablity/MergeabilityRules.scala | 2 +- .../TreeHashMapMergeabilitySpec.scala | 3 +- .../rchain/node/perf/HistoryGenKeySpec.scala | 1 - .../revvaultexport/RhoTrieTraverserTest.scala | 1 - .../rholang/interpreter/RhoRuntime.scala | 4 +- .../rholang/interpreter/RholangCLI.scala | 3 +- .../scala/coop/rchain/rholang/PeekSpec.scala | 1 - .../coop/rchain/rholang/StackSafetySpec.scala | 2 +- .../interpreter/BigIntNormalizerSpec.scala | 2 +- .../CostAccountingReducerTest.scala | 6 +- .../interpreter/CryptoChannelsSpec.scala | 1 - .../interpreter/PersistentStoreTester.scala | 3 +- .../rholang/interpreter/ReduceSpec.scala | 1 - .../rholang/interpreter/ReplaySpec.scala | 1 - .../rholang/interpreter/RuntimeSpec.scala | 4 +- .../interpreter/ShortCircuitBooleanSpec.scala | 2 +- .../CostAccountingPropertyTest.scala | 2 +- .../accounting/CostAccountingSpec.scala | 4 +- .../accounting/RholangMethodsCostsSpec.scala | 4 +- .../merging/RholangMergingLogicSpec.scala | 1 - .../scala/rholang/rosette/CompilerTests.scala | 4 +- .../coop/rchain/rspace/bench/BasicBench.scala | 2 +- .../rchain/rspace/bench/RSpaceBench.scala | 1 - .../rspace/bench/ReplayRSpaceBench.scala | 2 +- .../scala/coop/rchain/rspace/RSpace.scala | 24 +++---- .../scala/coop/rchain/rspace/RSpaceOps.scala | 63 +++++++++---------- .../coop/rchain/rspace/ReplayRSpace.scala | 12 ++-- .../rspace/examples/AddressBookExample.scala | 2 - .../rchain/rspace/ExportImportTests.scala | 6 +- .../coop/rchain/rspace/HotStoreSpec.scala | 1 - .../rchain/rspace/ReplayRSpaceTests.scala | 9 +-- .../rchain/rspace/StorageExamplesTests.scala | 4 +- .../coop/rchain/rspace/StorageTestsBase.scala | 5 +- .../rspace/concurrent/MultiLockTest.scala | 1 - .../rspace/concurrent/TwoStepLockTest.scala | 1 - .../rspace/history/HistoryActionTests.scala | 2 - .../HistoryRepositoryGenerativeSpec.scala | 1 - .../history/HistoryRepositorySpec.scala | 1 - .../rchain/rspace/history/RadixTreeSpec.scala | 2 +- .../coop/rchain/shared/RChainScheduler.scala | 20 ------ .../rchain/shared/Fs2ExtensionsSpec.scala | 3 +- 63 files changed, 90 insertions(+), 212 deletions(-) delete mode 100644 shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala index 6cd27843b6b..858e51a3ba6 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala @@ -305,8 +305,6 @@ class MergeNumberChannelSpec extends AnyFlatSpec { implicit val logEff = Log.log[IO] implicit val spanEff = Span.noop[IO] - import coop.rchain.shared.RChainScheduler._ - "multiple branches" should "reject deploy when mergeable number channels got negative number" in effectTest { testCase[IO]( baseTerms = Seq(rhoST, rhoChange(10)), diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala index 6880b48c153..1b8d89b4620 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployIdTest.scala @@ -24,7 +24,6 @@ import org.scalatest.matchers.should.Matchers class DeployIdTest extends AnyFlatSpec with Matchers { implicit val log: Log[IO] = new Log.NOPLog[IO]() private val dummyMergeableName = BlockRandomSeed.nonNegativeMergeableTagName("dummy") - import coop.rchain.shared.RChainScheduler._ private val runtimeManager: Resource[IO, RuntimeManager[IO]] = mkRuntimeManager[IO]("deploy-id-runtime-manager-test", dummyMergeableName) diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala index 2f1e2840088..18d0d2f05cb 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala @@ -24,7 +24,6 @@ class DeployerIdTest extends AnyFlatSpec with Matchers { implicit val time = new LogicalTime[IO] implicit val log: Log[IO] = new Log.NOPLog[IO]() private val dummyMergeableName = BlockRandomSeed.nonNegativeMergeableTagName("dummy") - import coop.rchain.shared.RChainScheduler._ val runtimeManager: Resource[IO, RuntimeManager[IO]] = mkRuntimeManager[IO]("deployer-id-runtime-manager-test", dummyMergeableName) diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala index 0153ef65bc5..697365490e2 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/InterpreterUtilTest.scala @@ -41,7 +41,6 @@ class InterpreterUtilTest implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val span: Span[IO] = new NoopSpan[IO] implicit val logSource: LogSource = LogSource(this.getClass) - import coop.rchain.shared.RChainScheduler._ val genesisContext = GenesisBuilder.buildGenesis() val genesis = genesisContext.genesisBlock diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala index 4c12e1e5304..d45d881becf 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/Resources.scala @@ -53,7 +53,6 @@ object Resources { implicit val log = Log.log[F] implicit val metricsEff = new metrics.Metrics.MetricsNOP[F] implicit val noopSpan: Span[F] = NoopSpan[F]() - import coop.rchain.shared.RChainScheduler._ for { rStore <- kvm.rSpaceStores @@ -62,8 +61,7 @@ object Resources { rStore, mStore, mergeableTagName, - RuntimeManager.noOpExecutionTracker[F], - rholangEC + RuntimeManager.noOpExecutionTracker[F] ) } yield runtimeManager } diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala index 43dd4e1581b..a773090deb6 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeSpec.scala @@ -15,7 +15,6 @@ import coop.rchain.shared.Log import coop.rchain.store.InMemoryStoreManager import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.shared.RChainScheduler._ // TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) //class RuntimeSpec extends AsyncFlatSpec with AsyncIOSpec with Matchers { @@ -32,8 +31,7 @@ import coop.rchain.shared.RChainScheduler._ // store <- kvm.rSpaceStores // runtime <- RhoRuntime.createRuntime( // store, -// BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId), -// rholangEC +// BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId) // ) // // /** @@ -88,8 +86,7 @@ import coop.rchain.shared.RChainScheduler._ // store <- kvm.rSpaceStores // runtime <- RhoRuntime.createRuntime( // store, -// BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId), -// rholangEC +// BlockRandomSeed.nonNegativeMergeableTagName(dummyShardId) // ) // r <- runtime.evaluate(contract, Cost.UNSAFE_MAX, Map.empty, random) // _ = r.errors should be(Vector.empty) diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala index f596d3604b1..456bb9f21b6 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverRequesAllSpec.scala @@ -30,7 +30,6 @@ class BlockRetrieverRequestAllSpec extends AnyFunSpec with BeforeAndAfterEach wi val hash = ByteString.copyFrom("newHash", "utf-8") val timeout: FiniteDuration = 240.seconds val local: PeerNode = peerNode("src", 40400) - import coop.rchain.shared.RChainScheduler._ implicit val log: Log[IO] = new LogStub implicit val metrics = new Metrics.MetricsNOP[IO] diff --git a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala index 1b77d00911b..d1eb8757d2e 100644 --- a/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/sync/BlockRetrieverSpec.scala @@ -39,7 +39,7 @@ class BlockRetrieverSpec extends AnyFunSpec with BeforeAndAfterEach with Matcher Ref.unsafe[IO, Connections](List(local)) implicit val transportLayer = new TransportLayerStub[IO] implicit val rpConf = createRPConfAsk[IO](local) - import coop.rchain.shared.RChainScheduler._ + implicit val commUtil = CommUtil.of[IO] implicit val blockRetriever = BlockRetriever.of[IO] diff --git a/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala b/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala index 9badf203092..7a63fddb100 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/GenesisBuilder.scala @@ -168,8 +168,6 @@ object GenesisBuilder { implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val spanEff = NoopSpan[IO]() - import coop.rchain.shared.RChainScheduler._ - (for { kvsManager <- mkTestRNodeStoreManager[IO](storageDirectory) rStore <- kvsManager.rSpaceStores @@ -179,8 +177,7 @@ object GenesisBuilder { rStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName(parameters._3.shardId), - t, - rholangEC + t ) // First bonded validator is the creator creator = ValidatorIdentity(parameters._1.head._1) diff --git a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala index f6e87c56ce1..ae1a32f4146 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/scalatest/Fs2StreamMatchers.scala @@ -16,7 +16,6 @@ trait Fs2StreamMatchers { * @param timeout duration to wait for new elements */ class EmptyMatcher[A](timeout: FiniteDuration) extends Matcher[Stream[IO, A]] { - import coop.rchain.shared.RChainScheduler._ def apply(left: Stream[IO, A]) = { val res = left.take(1).timeout(timeout).compile.toList.attempt.unsafeRunSync diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala index 940f7eb667f..1277b9ff5ff 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala @@ -35,7 +35,7 @@ final case class BufferedGrpcStreamChannel[F[_]]( buferSubscriber: Stream[F, Unit] ) -class GrpcTransportClient[F[_]: Async: AsyncEffect: Log: Metrics]( +class GrpcTransportClient[F[_]: Async: Log: Metrics]( networkId: String, cert: String, key: String, @@ -47,9 +47,6 @@ class GrpcTransportClient[F[_]: Async: AsyncEffect: Log: Metrics]( val DefaultSendTimeout: FiniteDuration = 5.seconds - import coop.rchain.shared.RChainScheduler.ioScheduler - val ioEC = ExecutionContext.fromExecutorService(ioScheduler) - implicit val metricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "rp.transport") diff --git a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala index 093f7173c5a..99c17d5cf37 100644 --- a/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/discovery/GrpcKademliaRPCSpec.scala @@ -8,9 +8,8 @@ import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.comm.rp.RPConf import coop.rchain.metrics.Metrics -import coop.rchain.shared.{Log, RChainScheduler} +import coop.rchain.shared.Log import io.grpc -import coop.rchain.shared.RChainScheduler._ import scala.concurrent.duration._ import scala.util.Random @@ -38,7 +37,7 @@ class GrpcKademliaRPCSpec extends KademliaRPCSpec[IO, GrpcEnvironment] { RPConf(local = env.peer, null, null, null, 0, null) ) } - IO.delay(new GrpcKademliaRPC(networkId, 500.millis, RChainScheduler.mainEC)) + IO.delay(new GrpcKademliaRPC(networkId, 500.millis)) } def extract[A](fa: IO[A]): A = fa.unsafeRunSync @@ -52,8 +51,7 @@ class GrpcKademliaRPCSpec extends KademliaRPCSpec[IO, GrpcEnvironment] { networkId, env.port, pingHandler, - lookupHandler, - RChainScheduler.mainEC + lookupHandler ) } diff --git a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala index ea60b492db5..7261dca3292 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/GrpcTransportSpec.scala @@ -21,7 +21,6 @@ import scala.util.Random class GrpcTransportSpec extends AnyWordSpecLike with Matchers with Inside { - import coop.rchain.shared.RChainScheduler._ implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP private val networkId = "test" private val peerLocal = createPeerNode diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala index f8aa1b29797..ef5c061d9d5 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/StateBalances.scala @@ -18,7 +18,6 @@ import coop.rchain.shared.Log import coop.rchain.shared.syntax._ import java.nio.file.Path -import scala.concurrent.ExecutionContext.global object StateBalances { @@ -59,8 +58,7 @@ object StateBalances { store <- rnodeStoreManager.rSpaceStores spaces <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store, - global + store ) (rSpacePlay, rSpaceReplay) = spaces runtimes <- RhoRuntime.createRuntimes[F](rSpacePlay, rSpaceReplay, true, Seq.empty, Par()) diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala index 6e30a4c4e9b..8236000c5e3 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/StateBalanceMain.scala @@ -74,7 +74,7 @@ object StateBalanceMain { } val stateBalancesFile = outputDir.resolve("stateBalances.csv") - import coop.rchain.shared.RChainScheduler._ + implicit val tc = Async[IO] val task: IO[Unit] = for { diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala index d8f23701b57..b68bb0990ad 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/MergeBalanceMain.scala @@ -158,7 +158,6 @@ object MergeBalanceMain { implicit val metrics: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO]() import coop.rchain.rholang.interpreter.storage._ implicit val m: Match[IO, BindPattern, ListParWithRandom] = matchListPar[IO] - import coop.rchain.shared.RChainScheduler._ val task: IO[Vector[Account]] = for { accountMap <- getVaultMap(stateBalanceFile, transactionBalanceFile).pure[IO] @@ -167,8 +166,7 @@ object MergeBalanceMain { store <- rnodeStoreManager.rSpaceStores spaces <- RSpace .createWithReplay[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store, - rholangEC + store ) (rSpacePlay, rSpaceReplay) = spaces runtimes <- RhoRuntime.createRuntimes[IO](rSpacePlay, rSpaceReplay, true, Seq.empty, Par()) diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala index d44aa653a3d..9a68dc8e40b 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/mainnet1/reporting/TransactionBalanceMain.scala @@ -73,7 +73,6 @@ object TransactionBalanceMain { val transactionBalancesFile = outputDir.resolve("transactionBalances.csv") val historyFile = outputDir.resolve("history.csv") - import coop.rchain.shared.RChainScheduler._ implicit val tc = Async[IO] val task: IO[Unit] = for { diff --git a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala index 120a600d01c..c2594c25633 100644 --- a/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala +++ b/node/src/main/scala/coop/rchain/node/revvaultexport/reporting/TransactionBalances.scala @@ -32,7 +32,6 @@ import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.rspace.syntax._ import coop.rchain.rspace.{Match, RSpace} import coop.rchain.models.syntax._ -import coop.rchain.shared.RChainScheduler.rholangEC import coop.rchain.shared.{Base16, Log} import coop.rchain.shared.syntax._ @@ -238,8 +237,7 @@ object TransactionBalances { store <- rnodeStoreManager.rSpaceStores spaces <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store, - rholangEC + store ) (rSpacePlay, rSpaceReplay) = spaces runtimes <- RhoRuntime diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala index 579859baa14..43117c108b7 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeMain.scala @@ -25,8 +25,6 @@ import scala.tools.jline.console.completer.StringsCompleter object NodeMain { - import coop.rchain.shared.RChainScheduler.mainEC // main execution context - /** * Starts RNode instance * diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala index 2c312fc8910..22ac63f93bd 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala @@ -31,10 +31,10 @@ import cats.effect.{Ref, Temporal} object NodeRuntime { type LocalEnvironment[F[_]] = ApplicativeLocal[F, NodeCallCtx] - def start[F[_]: AsyncEffect: Parallel: ContextShift: Temporal: Log]( + def start[F[_]: Async: Parallel: Temporal: Log]( nodeConf: NodeConf, kamonConf: Config - )(implicit mainEC: ExecutionContext): F[Unit] = { + ): F[Unit] = { val nodeCallCtxReader: NodeCallCtxReader[F] = NodeCallCtxReader[F]() import nodeCallCtxReader._ @@ -79,24 +79,7 @@ class NodeRuntime[F[_]: Parallel: Async: LocalEnvironment: Log] private[node] ( nodeConf: NodeConf, kamonConf: Config, id: NodeIdentifier -)(implicit mainEC: ExecutionContext) { - - // TODO: revise use of schedulers for gRPC - private[this] val grpcEC = mainEC - - val ioScheduler = Executors.newCachedThreadPool(new ThreadFactory { - private val counter = new AtomicLong(0L) - - def newThread(r: Runnable) = { - val th = new Thread(r) - th.setName( - "io-thread-" + - counter.getAndIncrement.toString - ) - th.setDaemon(true) - th - } - }) +) { implicit private val logSource: LogSource = LogSource(this.getClass) diff --git a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala index 1d6d9fca860..ca696c3de4d 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/Setup.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/Setup.scala @@ -86,14 +86,12 @@ object Setup { // Runtime for `rnode eval` evalRuntime <- { implicit val sp = span - import RChainScheduler._ - storeManager.evalStores.flatMap(RhoRuntime.createRuntime[F](_, Par(), rholangEC)) + storeManager.evalStores.flatMap(RhoRuntime.createRuntime[F](_, Par())) } // Runtime manager (play and replay runtimes) runtimeManagerWithHistory <- { implicit val sp = span - import RChainScheduler._ for { rStores <- storeManager.rSpaceStores mergeStore <- RuntimeManager.mergeableStore(storeManager) @@ -102,8 +100,7 @@ object Setup { rStores, mergeStore, BlockRandomSeed.nonNegativeMergeableTagName(conf.casper.shardName), - executionTracker, - rholangEC + executionTracker ) } yield rm } diff --git a/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala b/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala index 7c586a3f861..77741212579 100644 --- a/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala +++ b/node/src/test/scala/coop/rchain/node/TransactionAPISpec.scala @@ -25,7 +25,7 @@ class TransactionAPISpec extends AnyFlatSpec with Matchers with Inspectors { TestNode.networkEff(genesis, networkSize = 1, withReadOnlySize = 1).use { nodes => val validator = nodes(0) val readonly = nodes(1) - import coop.rchain.shared.RChainScheduler._ + import readonly._ for { kvm <- Resources.mkTestRNodeStoreManager[IO](readonly.dataDir) diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala b/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala index 98f8b1c498f..5206dee5bcd 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/MergeabilityRules.scala @@ -98,7 +98,7 @@ object OperationOn0Ch { implicit val logger: Log[IO] = Log.log[IO] implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() - import coop.rchain.shared.RChainScheduler._ + rhoRuntimeEff[IO](initRegistry = false).use { case (runtime, _, _) => for { diff --git a/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala b/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala index f9e0280797d..f88b0c20a59 100644 --- a/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala +++ b/node/src/test/scala/coop/rchain/node/mergeablity/TreeHashMapMergeabilitySpec.scala @@ -16,7 +16,7 @@ import coop.rchain.node.revvaultexport.RhoTrieTraverser import coop.rchain.rholang.interpreter.RhoRuntime import coop.rchain.rholang.interpreter.accounting.Cost import coop.rchain.rspace.hashing.Blake2b256Hash -import coop.rchain.shared.{Log, RChainScheduler} +import coop.rchain.shared.Log import org.scalacheck.Gen import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -480,7 +480,6 @@ class TreeHashMapMergeabilitySpec implicit val metricsEff: Metrics[IO] = new Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() implicit val logger: Log[IO] = Log.log[IO] - import RChainScheduler._ val baseDeploy = ConstructDeploy.sourceDeploy(base, 1L, phloLimit = Cost.UNSAFE_MAX.value) val leftDeploy = diff --git a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala index e700e34b6aa..3ff9b2d239d 100644 --- a/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala +++ b/node/src/test/scala/coop/rchain/node/perf/HistoryGenKeySpec.scala @@ -402,7 +402,6 @@ class HistoryGenKeySpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll } it should "execute with monix" in { - import coop.rchain.shared.RChainScheduler._ implicit val log: Log.NOPLog[IO] = new Log.NOPLog[IO]() implicit val met: Metrics.MetricsNOP[IO] = new Metrics.MetricsNOP[IO]() diff --git a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala index 02301712d8f..d18757a8e7d 100644 --- a/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala +++ b/node/src/test/scala/coop/rchain/node/revvaultexport/RhoTrieTraverserTest.scala @@ -20,7 +20,6 @@ import scala.util.Random class RhoTrieTraverserTest extends AnyFlatSpec { private val SHARD_ID = "root-shard" private val registry = Registry(GenesisBuilder.defaultSystemContractPubKey) - import coop.rchain.shared.RChainScheduler._ "traverse the TreeHashMap" should "work" in { val total = 100 diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala index 2f9485786e2..563434d8c53 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RhoRuntime.scala @@ -583,7 +583,6 @@ object RhoRuntime { def createRuntime[F[_]: Async: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], mergeableTagName: Par, - rholangEC: ExecutionContext, initRegistry: Boolean = false, additionalSystemProcesses: Seq[Definition[F]] = Seq.empty ): F[RhoRuntime[F]] = { @@ -592,8 +591,7 @@ object RhoRuntime { for { space <- RSpace .create[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - stores, - rholangEC + stores ) runtime <- createRhoRuntime[F]( space, diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala index cc06c00bd51..410d6026619 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/RholangCLI.scala @@ -13,7 +13,6 @@ import coop.rchain.rholang.interpreter.storage.StoragePrinter import coop.rchain.rholang.syntax._ import coop.rchain.rspace.syntax._ import coop.rchain.shared.Log -import coop.rchain.shared.RChainScheduler.rholangEC import coop.rchain.store.LmdbDirStoreManager.{mb, Db, LmdbEnvConfig} import coop.rchain.store.{KeyValueStoreManager, LmdbDirStoreManager} import monix.execution.{CancelableFuture, Scheduler} @@ -70,7 +69,7 @@ object RholangCLI { val runtime = (for { store <- kvm.rSpaceStores - runtime <- RhoRuntime.createRuntime[IO](store, Par(), rholangEC) + runtime <- RhoRuntime.createRuntime[IO](store, Par()) } yield runtime).unsafeRunSync val problems = try { diff --git a/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala index ef78f1150e9..90a3a0d67fd 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/PeekSpec.scala @@ -9,7 +9,6 @@ import coop.rchain.models.Expr.ExprInstance.{GInt, GString} import coop.rchain.models.rholang.implicits._ import coop.rchain.shared.Log import coop.rchain.rholang.interpreter.InterpreterUtil -import coop.rchain.shared.RChainScheduler._ import scala.concurrent.duration._ diff --git a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala index d786054fe6b..761504debfd 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala @@ -203,7 +203,7 @@ class StackSafetySpec extends AnyFlatSpec with TableDrivenPropertyChecks with Ma val ast = Compiler[Eval].sourceToADT(rho).value PrettyPrinter().buildString(ast) checkSuccess(rho) { - import coop.rchain.shared.RChainScheduler._ + mkRuntime[IO](tmpPrefix).use { runtime => runtime.evaluate(rho) } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala index 0c03b1a0f9c..761e73d5ac3 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/BigIntNormalizerSpec.scala @@ -21,7 +21,7 @@ import org.scalatest.matchers.should.Matchers // implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] // implicit val noopSpan: Span[IO] = NoopSpan[IO]() // -// import coop.rchain.shared.RChainScheduler._ +// // val outcomeCh = "ret" // // private def execute[F[_]: Async: Parallel: Metrics: Span: Log]( diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala index e3e1c20bce7..201e5cee6a6 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CostAccountingReducerTest.scala @@ -55,7 +55,7 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu val substTerm = term(Expr(GString("1"))) val termCost = Chargeable[Par].cost(substTerm) val initCost = Cost(1000) - import coop.rchain.shared.RChainScheduler._ + (for { cost <- CostAccounting.initialCost[IO](initCost) res <- { @@ -73,7 +73,6 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu val varTerm = term(Expr(EVarBody(EVar(Var(FreeVar(0)))))) val originalTermCost = Chargeable[Par].cost(varTerm) val initCost = Cost(1000) - import coop.rchain.shared.RChainScheduler._ (for { cost <- CostAccounting.initialCost[IO](initCost) @@ -111,7 +110,7 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu (ContResult[Par, BindPattern, TaggedContinuation], Seq[Result[Par, ListParWithRandom]]) ]](OutOfPhlogistonsError) } - import coop.rchain.shared.RChainScheduler._ + implicit val rand = Blake2b512Random.defaultRandom implicit val cost = CostAccounting.initialCost[IO](Cost(1000)).unsafeRunSync val (_, chargingReducer) = createDispatcher(iSpace, Map.empty, Map.empty) @@ -136,7 +135,6 @@ class CostAccountingReducerTest extends AnyFlatSpec with Matchers with TripleEqu implicit val rand = Blake2b512Random(Array.empty[Byte]) implicit val logF: Log[IO] = Log.log[IO] implicit val kvm = InMemoryStoreManager[IO] - import coop.rchain.shared.RChainScheduler._ def testImplementation(pureRSpace: RhoISpace[IO]): IO[ ( diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala index 8378e8688ee..df7b24c8a5d 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/CryptoChannelsSpec.scala @@ -221,7 +221,6 @@ class CryptoChannelsSpec implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() implicit val kvm = InMemoryStoreManager[IO] - import coop.rchain.shared.RChainScheduler._ val runtime = (for { store <- kvm.rSpaceStores diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala index ee14d28d781..b7a7a7cd433 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PersistentStoreTester.scala @@ -20,7 +20,6 @@ final case class TestFixture(space: RhoISpace[IO], reducer: DebruijnInterpreter[ trait PersistentStoreTester { implicit val ms: Metrics.Source = Metrics.BaseSource - import coop.rchain.shared.RChainScheduler._ def withTestSpace[R](f: TestFixture => R): R = { implicit val logF: Log[IO] = new Log.NOPLog[IO] @@ -32,7 +31,7 @@ trait PersistentStoreTester { implicit val kvm = InMemoryStoreManager[IO] val store = kvm.rSpaceStores.unsafeRunSync val space = RSpace - .create[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation](store, rholangEC) + .create[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation](store) .unsafeRunSync val reducer = RholangOnlyDispatcher(space)._2 cost.set(Cost.UNSAFE_MAX).unsafeRunSync diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala index 0a19b41452f..61a06d0669f 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReduceSpec.scala @@ -874,7 +874,6 @@ class ReduceSpec extends AnyFlatSpec with Matchers with AppendedClues with Persi val result = withTestSpace { case TestFixture(space, _) => - import coop.rchain.shared.RChainScheduler._ implicit val cost = CostAccounting.emptyCost[IO].unsafeRunSync def byteName(b: Byte): Par = GPrivate(ByteString.copyFrom(Array[Byte](b))) val reducer = RholangOnlyDispatcher(space, Map("rho:test:foo" -> byteName(42)))._2 diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala index 136a6f5176d..a53122ca29d 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ReplaySpec.scala @@ -110,7 +110,6 @@ class ReplaySpec extends AnyFlatSpec with Matchers { implicit val logF: Log[IO] = new Log.NOPLog[IO] implicit val metricsEff: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() - import coop.rchain.shared.RChainScheduler._ val resources = for { res <- Resources.mkRuntimes[IO]("cost-accounting-spec-") diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala index 0967935645c..007938706dc 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/RuntimeSpec.scala @@ -50,10 +50,8 @@ class RuntimeSpec extends AnyFlatSpec with Matchers { private def checkError(rho: String, error: String): Unit = assert(execute(rho).errors.nonEmpty, s"Expected $rho to fail - it didn't.") - private def execute(source: String): EvaluateResult = { - import coop.rchain.shared.RChainScheduler._ + private def execute(source: String): EvaluateResult = mkRuntime[IO](tmpPrefix).use { runtime => runtime.evaluate(source) }.unsafeRunSync - } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala index 19e38cdfd63..13cc4656bf8 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/ShortCircuitBooleanSpec.scala @@ -16,7 +16,7 @@ import coop.rchain.models.rholang.implicits._ import coop.rchain.rholang.interpreter.errors.{InterpreterError, ReduceError} class ShortCircuitBooleanSpec extends AnyWordSpec with Matchers { - import coop.rchain.shared.RChainScheduler._ + implicit val logF: Log[IO] = Log.log[IO] implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala index dfc7299a13f..a4ade346bfb 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingPropertyTest.scala @@ -100,7 +100,7 @@ object CostAccountingPropertyTest { } def costOfExecution(procs: Proc*): IO[Long] = { - import coop.rchain.shared.RChainScheduler._ + implicit val logF: Log[IO] = new Log.NOPLog[IO] implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] implicit val noopSpan: Span[IO] = NoopSpan[IO]() diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala index 596e149b066..fa54e5b7646 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala @@ -32,7 +32,6 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ -import coop.rchain.shared.RChainScheduler._ class CostAccountingSpec extends AnyFlatSpec @@ -78,8 +77,7 @@ class CostAccountingSpec for { hrstores <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - stores, - rholangEC + stores ) (space, replay) = hrstores rhoRuntime <- RhoRuntime diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala index ee62312ca12..16f681c53c3 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/RholangMethodsCostsSpec.scala @@ -24,7 +24,6 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks._ import java.nio.file.{Files, Path} import scala.collection.immutable.BitSet import scala.concurrent.duration._ -import coop.rchain.shared.RChainScheduler._ class RholangMethodsCostsSpec extends AnyWordSpec @@ -1043,14 +1042,13 @@ class RholangMethodsCostsSpec implicit val ms: Metrics.Source = Metrics.BaseSource implicit val kvm = InMemoryStoreManager[IO] val rSpaceStore = kvm.rSpaceStores.unsafeRunSync - import coop.rchain.shared.RChainScheduler._ protected override def beforeAll(): Unit = { import coop.rchain.rholang.interpreter.storage._ implicit val m: Match[IO, BindPattern, ListParWithRandom] = matchListPar[IO] dbDir = Files.createTempDirectory("rholang-interpreter-test-") space = RSpace - .create[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation](rSpaceStore, rholangEC) + .create[IO, Par, BindPattern, ListParWithRandom, TaggedContinuation](rSpaceStore) .unsafeRunSync } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala index 9f35be2b246..bc0191a8ea8 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/merging/RholangMergingLogicSpec.scala @@ -39,7 +39,6 @@ class RholangMergingLogicSpec extends AnyFlatSpec with Matchers { def getDataOnHash[F[_]: Applicative](hash: String): F[Option[Long]] = initValues.get(hash).pure[F] - import coop.rchain.shared.RChainScheduler._ RholangMergingLogic.calculateNumChannelDiff(input, getDataOnHash[IO]).map { res => res shouldBe Seq(Map(("A", 10)), Map(("B", 3)), Map(("A", -5), ("C", -10))) } diff --git a/rholang/src/test/scala/rholang/rosette/CompilerTests.scala b/rholang/src/test/scala/rholang/rosette/CompilerTests.scala index 68a36378012..4a5fa3ca304 100644 --- a/rholang/src/test/scala/rholang/rosette/CompilerTests.scala +++ b/rholang/src/test/scala/rholang/rosette/CompilerTests.scala @@ -43,8 +43,7 @@ class CompilerTests extends AnyFunSuite with Matchers { } } - private def execute(file: Path): EvaluateResult = { - import coop.rchain.shared.RChainScheduler._ + private def execute(file: Path): EvaluateResult = mkRuntime[IO](tmpPrefix).use { runtime => Using.resource(Source.fromFile(file.toString))( fileContents => { @@ -52,6 +51,5 @@ class CompilerTests extends AnyFunSuite with Matchers { } ) }.unsafeRunSync - } } diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala index 0f66bc13864..7edcf542bc1 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/BasicBench.scala @@ -111,7 +111,7 @@ object BasicBench { private val dbDir: Path = Files.createTempDirectory("rchain-storage-test-") implicit val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() val rSpaceStore = kvm.rSpaceStores.unsafeRunSync() - import coop.rchain.shared.RChainScheduler._ + val testSpace: ISpace[ IO, Par, diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala index 915ef3a5865..c7f3cdd6260 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/RSpaceBench.scala @@ -100,7 +100,6 @@ class RSpaceBench extends RSpaceBenchBase { val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() val rspaceStores = kvm.rSpaceStores - import coop.rchain.shared.RChainScheduler._ @Setup def setup() = space = RSpace diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala index 84d28fd1ba8..9f60f133d2a 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/ReplayRSpaceBench.scala @@ -79,7 +79,7 @@ object ReplayRSpaceBench { @Setup def setup() = { - import coop.rchain.shared.RChainScheduler._ + dbDir = Files.createTempDirectory("replay-rspace-bench-") val kvm = RholangCLI.mkRSpaceStoreManager[IO](dbDir).unsafeRunSync() val store = kvm.rSpaceStores diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala index 93707f42019..692a33944cb 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpace.scala @@ -20,8 +20,7 @@ import scala.concurrent.ExecutionContext class RSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - storeAtom: AtomicAny[HotStore[F, C, P, A, K]], - rholangEC: ExecutionContext + storeAtom: AtomicAny[HotStore[F, C, P, A, K]] )( implicit serializeC: Serialize[C], @@ -29,7 +28,7 @@ class RSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( serializeA: Serialize[A], serializeK: Serialize[K], val m: Match[F, P, A] -) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom, rholangEC) +) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom) with ISpace[F, C, P, A, K] { protected[this] override val logger: Logger = Logger[this.type] @@ -215,7 +214,7 @@ class RSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( nextHistory <- historyRepo.reset(historyRepo.history.root) historyReader <- nextHistory.getHistoryReader(nextHistory.root) hotStore <- HotStore(historyReader.base) - rSpace <- RSpace(nextHistory, hotStore, rholangEC) + rSpace <- RSpace(nextHistory, hotStore) _ <- rSpace.restoreInstalls() } yield rSpace } @@ -237,8 +236,7 @@ object RSpace { */ def apply[F[_]: Async: Span: Metrics: Log, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - store: HotStore[F, C, P, A, K], - rholangEC: ExecutionContext + store: HotStore[F, C, P, A, K] )( implicit sc: Serialize[C], @@ -247,14 +245,13 @@ object RSpace { sk: Serialize[K], m: Match[F, P, A] ): F[RSpace[F, C, P, A, K]] = - Sync[F].delay(new RSpace[F, C, P, A, K](historyRepository, AtomicAny(store), rholangEC)) + Sync[F].delay(new RSpace[F, C, P, A, K](historyRepository, AtomicAny(store))) /** * Creates [[RSpace]] from [[KeyValueStore]]'s, */ def create[F[_]: Async: Parallel: Span: Metrics: Log, C, P, A, K]( - store: RSpaceStore[F], - rholangEC: ExecutionContext + store: RSpaceStore[F] )( implicit sc: Serialize[C], @@ -266,15 +263,14 @@ object RSpace { for { setup <- createHistoryRepo[F, C, P, A, K](store) (historyReader, store) = setup - space <- RSpace(historyReader, store, rholangEC) + space <- RSpace(historyReader, store) } yield space /** * Creates [[RSpace]] and [[ReplayRSpace]] from [[KeyValueStore]]'s. */ def createWithReplay[F[_]: Async: Parallel: Span: Metrics: Log, C, P, A, K]( - store: RSpaceStore[F], - rholangEC: ExecutionContext + store: RSpaceStore[F] )( implicit sc: Serialize[C], sp: Serialize[P], @@ -286,11 +282,11 @@ object RSpace { setup <- createHistoryRepo[F, C, P, A, K](store) (historyRepo, store) = setup // Play - space <- RSpace(historyRepo, store, rholangEC) + space <- RSpace(historyRepo, store) // Replay historyReader <- historyRepo.getHistoryReader(historyRepo.root) replayStore <- HotStore(historyReader.base) - replay <- ReplayRSpace(historyRepo, replayStore, rholangEC) + replay <- ReplayRSpace(historyRepo, replayStore) } yield (space, replay) /** diff --git a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala index cc60b9be851..66b21466364 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/RSpaceOps.scala @@ -23,8 +23,7 @@ import cats.effect.Ref abstract class RSpaceOps[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - val storeAtom: AtomicAny[HotStore[F, C, P, A, K]], - rholangEC: ExecutionContext + val storeAtom: AtomicAny[HotStore[F, C, P, A, K]] )( implicit serializeC: Serialize[C], @@ -182,30 +181,28 @@ abstract class RSpaceOps[F[_]: Async: Log: Metrics: Span, C, P, A, K]( persist: Boolean, peeks: SortedSet[Int] = SortedSet.empty ): F[MaybeActionResult] = - ContextShift[F].evalOn(rholangEC) { - if (channels.isEmpty) { - val msg = "channels can't be empty" - Log[F].error(msg) >> Sync[F] - .raiseError[MaybeActionResult](new IllegalArgumentException(msg)) - } else if (channels.length =!= patterns.length) { - val msg = "channels.length must equal patterns.length" - Log[F].error(msg) >> Sync[F] - .raiseError[MaybeActionResult](new IllegalArgumentException(msg)) - } else - (for { - consumeRef <- Sync[F].delay(Consume(channels, patterns, continuation, persist)) - result <- consumeLockF(channels) { - lockedConsume( - channels, - patterns, - continuation, - persist, - peeks, - consumeRef - ) - } - } yield result).timer(consumeTimeCommLabel)(Metrics[F], MetricsSource) - } + if (channels.isEmpty) { + val msg = "channels can't be empty" + Log[F].error(msg) >> Sync[F] + .raiseError[MaybeActionResult](new IllegalArgumentException(msg)) + } else if (channels.length =!= patterns.length) { + val msg = "channels.length must equal patterns.length" + Log[F].error(msg) >> Sync[F] + .raiseError[MaybeActionResult](new IllegalArgumentException(msg)) + } else + (for { + consumeRef <- Sync[F].delay(Consume(channels, patterns, continuation, persist)) + result <- consumeLockF(channels) { + lockedConsume( + channels, + patterns, + continuation, + persist, + peeks, + consumeRef + ) + } + } yield result).timer(consumeTimeCommLabel)(Metrics[F], MetricsSource) protected[this] def lockedConsume( channels: Seq[C], @@ -221,14 +218,12 @@ abstract class RSpaceOps[F[_]: Async: Log: Metrics: Span, C, P, A, K]( data: A, persist: Boolean ): F[MaybeActionResult] = - ContextShift[F].evalOn(rholangEC) { - (for { - produceRef <- Sync[F].delay(Produce(channel, data, persist)) - result <- produceLockF(channel)( - lockedProduce(channel, data, persist, produceRef) - ) - } yield result).timer(produceTimeCommLabel)(Metrics[F], MetricsSource) - } + (for { + produceRef <- Sync[F].delay(Produce(channel, data, persist)) + result <- produceLockF(channel)( + lockedProduce(channel, data, persist, produceRef) + ) + } yield result).timer(produceTimeCommLabel)(Metrics[F], MetricsSource) protected[this] def lockedProduce( channel: C, diff --git a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala index e30e403e820..0e753a114a5 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/ReplayRSpace.scala @@ -20,8 +20,7 @@ import scala.concurrent.ExecutionContext class ReplayRSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - storeAtom: AtomicAny[HotStore[F, C, P, A, K]], - rholangEC: ExecutionContext + storeAtom: AtomicAny[HotStore[F, C, P, A, K]] )( implicit serializeC: Serialize[C], @@ -29,7 +28,7 @@ class ReplayRSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( serializeA: Serialize[A], serializeK: Serialize[K], val m: Match[F, P, A] -) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom, rholangEC) +) extends RSpaceOps[F, C, P, A, K](historyRepository, storeAtom) with IReplaySpace[F, C, P, A, K] { protected override def logF: Log[F] = Log[F] @@ -306,7 +305,7 @@ class ReplayRSpace[F[_]: Async: Log: Metrics: Span, C, P, A, K]( nextHistory <- historyRepo.reset(historyRepo.history.root) historyReader <- nextHistory.getHistoryReader(nextHistory.root) hotStore <- HotStore(historyReader.base) - rSpaceReplay <- ReplayRSpace(nextHistory, hotStore, rholangEC) + rSpaceReplay <- ReplayRSpace(nextHistory, hotStore) _ <- rSpaceReplay.restoreInstalls() } yield rSpaceReplay } @@ -319,8 +318,7 @@ object ReplayRSpace { */ def apply[F[_]: Async: Log: Metrics: Span, C, P, A, K]( historyRepository: HistoryRepository[F, C, P, A, K], - store: HotStore[F, C, P, A, K], - rholangEC: ExecutionContext + store: HotStore[F, C, P, A, K] )( implicit sc: Serialize[C], @@ -329,7 +327,7 @@ object ReplayRSpace { sk: Serialize[K], m: Match[F, P, A] ): F[ReplayRSpace[F, C, P, A, K]] = Sync[F].delay { - new ReplayRSpace[F, C, P, A, K](historyRepository, AtomicAny(store), rholangEC) + new ReplayRSpace[F, C, P, A, K](historyRepository, AtomicAny(store)) } } diff --git a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala index b45ea994749..cf11a225b85 100644 --- a/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala +++ b/rspace/src/main/scala/coop/rchain/rspace/examples/AddressBookExample.scala @@ -19,8 +19,6 @@ import scala.concurrent.ExecutionContext @SuppressWarnings(Array("org.wartremover.warts.EitherProjectionPartial")) object AddressBookExample { - import coop.rchain.shared.RChainScheduler._ - /* Here we define a type for channels */ final case class Channel(name: String) diff --git a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala index d50dbe735b7..1fb853f8062 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ExportImportTests.scala @@ -286,8 +286,7 @@ trait InMemoryExportImportTestsBase[C, P, A, K] { } space1 = new RSpace[IO, C, P, A, K]( historyRepository1, - store1, - rholangEC + store1 ) exporter1 <- historyRepository1.exporter importer1 <- historyRepository1.importer @@ -308,8 +307,7 @@ trait InMemoryExportImportTestsBase[C, P, A, K] { } space2 = new RSpace[IO, C, P, A, K]( historyRepository2, - store2, - rholangEC + store2 ) exporter2 <- historyRepository2.exporter importer2 <- historyRepository2.importer diff --git a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala index ab557610c2d..da0a9c492d0 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/HotStoreSpec.scala @@ -1116,7 +1116,6 @@ class History[F[_]: Sync, C, P, A, K](R: Ref[F, HotStoreState[C, P, A, K]]) trait InMemHotStoreSpec extends HotStoreSpec[IO] { - import coop.rchain.shared.RChainScheduler._ protected type F[A] = IO[A] implicit override val S: Sync[F] = implicitly[Async[IO]] implicit override val P: Parallel[IO] = IO.parallelForIO diff --git a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala index a49401e59fc..d5cead9cd3b 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/ReplayRSpaceTests.scala @@ -31,7 +31,6 @@ import cats.effect.unsafe.implicits.global //noinspectTaskn ZeroIndexToHead,NameBooleanParameters trait ReplayRSpaceTests extends ReplayRSpaceTestsBase[String, Pattern, String, String] { - import coop.rchain.shared.RChainScheduler._ implicit val pIO = IO.parallelForIO implicit val log: Log[IO] = new Log.NOPLog[IO] @@ -1267,7 +1266,7 @@ trait InMemoryReplayRSpaceTestsBase[C, P, A, K] extends ReplayRSpaceTestsBase[C, sk: Serialize[K], m: Match[IO, P, A] ): S = { - import coop.rchain.shared.RChainScheduler._ + implicit val log: Log[IO] = Log.log[IO] implicit val metricsF: Metrics[IO] = new Metrics.MetricsNOP[IO]() implicit val spanF: Span[IO] = NoopSpan[IO]() @@ -1291,8 +1290,7 @@ trait InMemoryReplayRSpaceTestsBase[C, P, A, K] extends ReplayRSpaceTestsBase[C, space = new RSpace[IO, C, P, A, K]( historyRepository, - store, - rholangEC + store ) historyCache <- Ref[IO].of(HotStoreState[C, P, A, K]()) replayStore <- { @@ -1301,8 +1299,7 @@ trait InMemoryReplayRSpaceTestsBase[C, P, A, K] extends ReplayRSpaceTestsBase[C, } replaySpace = new ReplayRSpace[IO, C, P, A, K]( historyRepository, - replayStore, - rholangEC + replayStore ) res <- f(store, replayStore, space, replaySpace) } yield { res }).unsafeRunSync diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala index bfa628a8f14..d6afa238899 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageExamplesTests.scala @@ -9,7 +9,6 @@ import coop.rchain.rspace.examples.AddressBookExample._ import coop.rchain.rspace.examples.AddressBookExample.implicits._ import coop.rchain.rspace.test._ import coop.rchain.rspace.util.{getK, runK, unpackOption} -import coop.rchain.shared.RChainScheduler import monix.execution.atomic.AtomicAny import scodec.Codec @@ -276,8 +275,7 @@ abstract class InMemoryHotStoreStorageExamplesTestsBase[F[_]] val space = new RSpace[F, Channel, Pattern, Entry, EntriesCaptor]( hr, - atomicStore, - RChainScheduler.rholangEC + atomicStore ) Applicative[F].pure((ts, atomicStore, space)) } diff --git a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala index d6563319dce..cf1598c78e3 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/StorageTestsBase.scala @@ -9,7 +9,7 @@ import coop.rchain.rspace.examples.StringExamples._ import coop.rchain.rspace.examples.StringExamples.implicits._ import coop.rchain.rspace.history.{HistoryRepository, HistoryRepositoryInstances} import coop.rchain.rspace.syntax._ -import coop.rchain.shared.{Log, RChainScheduler, Serialize} +import coop.rchain.shared.{Log, Serialize} import coop.rchain.store.InMemoryStoreManager import monix.eval._ import monix.execution.atomic.AtomicAny @@ -105,8 +105,7 @@ abstract class InMemoryHotStoreTestsBase[F[_]] val space = new RSpace[F, String, Pattern, String, StringsCaptor]( hr, - atomicStore, - RChainScheduler.rholangEC + atomicStore ) Applicative[F].pure((ts, atomicStore, space)) } diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala index bbdbde4d5a3..6389836de26 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/MultiLockTest.scala @@ -9,7 +9,6 @@ import scala.collection._ import scala.collection.immutable.Seq import coop.rchain.metrics import coop.rchain.metrics.Metrics -import coop.rchain.shared.RChainScheduler._ class MultiLockTest extends AnyFlatSpec with Matchers { diff --git a/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala b/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala index e44b61df832..499ef85e908 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/concurrent/TwoStepLockTest.scala @@ -9,7 +9,6 @@ import cats.syntax.all._ class TwoStepLockTest extends AnyFlatSpec with Matchers { - import coop.rchain.shared.RChainScheduler._ implicit val metrics = new Metrics.MetricsNOP[IO] "DefaultTwoStepLock" should "gate concurrent access to shared resources" in { diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala index a3c7b90ca99..9fe556ecf37 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryActionTests.scala @@ -276,8 +276,6 @@ class HistoryActionTests extends AnyFlatSpec with Matchers { } yield () } - import coop.rchain.shared.RChainScheduler._ - protected def withEmptyHistory(f: IO[History[IO]] => IO[Unit]): Unit = { val emptyHistory = History.create(History.emptyRootHash, InMemoryKeyValueStore[IO]) f(emptyHistory).unsafeRunTimed(1.minute) diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala index 4e00a1c79cf..bc54e1b8ba6 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositoryGenerativeSpec.scala @@ -22,7 +22,6 @@ import cats.effect.unsafe.implicits.global import java.nio.file.{Files, Path} import scala.concurrent.duration._ -import coop.rchain.shared.RChainScheduler._ class LMDBHistoryRepositoryGenerativeSpec extends HistoryRepositoryGenerativeDefinition diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala index f0a6633d63d..0638265b9e2 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/HistoryRepositorySpec.scala @@ -178,7 +178,6 @@ class HistoryRepositorySpec val pastRoots = rootRepository implicit val log: Log[IO] = new NOPLog() implicit val span: Span[IO] = new NoopSpan[IO]() - import coop.rchain.shared.RChainScheduler._ (for { emptyHistory <- History.create(History.emptyRootHash, InMemoryKeyValueStore[IO]) diff --git a/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala b/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala index a7488a01bd4..018f1dc5fd0 100644 --- a/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala +++ b/rspace/src/test/scala/coop/rchain/rspace/history/RadixTreeSpec.scala @@ -802,7 +802,7 @@ class RadixTreeSpec extends AnyFlatSpec with Matchers with OptionValues { InMemoryKeyValueStore[IO] ) => IO[Unit] ): Unit = { - import coop.rchain.shared.RChainScheduler._ + val store = InMemoryKeyValueStore[IO] val typedStore = store.toTypedStore(RadixHistory.codecBlakeHash, scodec.codecs.bytes) val radixTreeImpl = new RadixTreeImpl[IO](typedStore) diff --git a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala b/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala deleted file mode 100644 index 6f6d74c953f..00000000000 --- a/shared/src/main/scala/coop/rchain/shared/RChainScheduler.scala +++ /dev/null @@ -1,20 +0,0 @@ -package coop.rchain.shared - -import java.util.concurrent.atomic.AtomicLong -import java.util.concurrent.{Executors, ThreadFactory} - -object RChainScheduler { - val ioScheduler = Executors.newCachedThreadPool(new ThreadFactory { - private val counter = new AtomicLong(0L) - - def newThread(r: Runnable) = { - val th = new Thread(r) - th.setName( - "io-thread-" + - counter.getAndIncrement.toString - ) - th.setDaemon(true) - th - } - }) -} diff --git a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala index 89c5d520a85..b0c894b73c6 100644 --- a/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala +++ b/shared/src/test/scala/coop/rchain/shared/Fs2ExtensionsSpec.scala @@ -10,9 +10,8 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.util.Success -import RChainScheduler._ import cats.effect.unsafe.implicits.global -import cats.effect.{Ref, Temporal} +import cats.effect.Ref class Fs2ExtensionsSpec extends AnyFlatSpec with Matchers { From 5f409d9a163e5866c65efc7bf7b84124ac5b083c Mon Sep 17 00:00:00 2001 From: nutzipper <1746367+nzpr@users.noreply.github.com> Date: Sat, 8 Apr 2023 09:28:47 +0400 Subject: [PATCH 16/17] WIP1 --- .../casper/dag/BlockDagKeyValueStorage.scala | 4 +- .../rchain/casper/engine/NodeSyncing.scala | 18 +- .../protocol/client/DeployService.scala | 220 +++---- .../protocol/client/ProposeService.scala | 28 +- .../casper/rholang/InterpreterUtil.scala | 8 +- .../casper/rholang/RuntimeManager.scala | 11 +- .../rchain/casper/addblock/ProposerSpec.scala | 1 - .../api/BlockQueryResponseAPITest.scala | 553 +++++++++--------- .../casper/api/BlocksResponseAPITest.scala | 2 - .../casper/api/BondedStatusAPITest.scala | 53 +- .../casper/api/ExploratoryDeployAPITest.scala | 1 - .../casper/api/LastFinalizedAPITest.scala | 1 - .../casper/api/ListeningNameAPITest.scala | 1 - .../batch1/MultiParentCasperDeploySpec.scala | 7 - .../MultiParentCasperReportingSpec.scala | 1 - .../batch2/BlockReceiverEffectsSpec.scala | 1 - .../casper/batch2/LmdbKeyValueStoreSpec.scala | 1 - .../rchain/casper/batch2/ValidateTest.scala | 5 +- .../engine/LfsBlockRequesterEffectsSpec.scala | 29 +- .../engine/LfsStateRequesterEffectsSpec.scala | 35 +- .../engine/RunningHandleHasBlockSpec.scala | 1 - .../rchain/casper/genesis/GenesisTest.scala | 4 +- .../helper/BlockDagStorageFixture.scala | 2 - .../coop/rchain/casper/helper/RhoSpec.scala | 2 - .../coop/rchain/casper/helper/TestNode.scala | 4 +- .../comm/discovery/GrpcKademliaRPC.scala | 36 +- .../coop/rchain/comm/discovery/package.scala | 31 +- .../comm/transport/GrpcTransportClient.scala | 49 +- .../transport/GrpcTransportReceiver.scala | 49 +- .../SslSessionClientInterceptor.scala | 36 +- .../SslSessionServerInterceptor.scala | 28 +- .../comm/transport/StreamObservable.scala | 10 +- .../transport/TcpTransportLayerSpec.scala | 6 +- .../transport/TransportLayerRuntime.scala | 11 +- .../main/scala/coop/rchain/node/Main.scala | 28 +- .../scala/coop/rchain/node/api/package.scala | 76 ++- .../diagnostics/BatchInfluxDBReporter.scala | 115 ++-- .../coop/rchain/node/effects/ReplClient.scala | 30 +- .../coop/rchain/node/effects/package.scala | 5 +- .../node/instances/ProposerInstance.scala | 7 +- .../rchain/node/runtime/NetworkServers.scala | 29 +- .../rchain/node/runtime/NodeCallCtx.scala | 113 ++-- .../rchain/node/runtime/NodeRuntime.scala | 9 +- .../rchain/node/web/https4s/RouterFix.scala | 7 +- .../scala/coop/rchain/node/web/package.scala | 25 +- project/Dependencies.scala | 40 +- project/plugins.sbt | 6 +- .../interpreter/accounting/package.scala | 15 +- .../rholang/interpreter/matcher/StreamT.scala | 39 +- .../scala/coop/rchain/rholang/Resources.scala | 22 +- .../interpreter/matcher/StreamTSpec.scala | 43 +- .../coop/rchain/rspace/bench/EvalBench.scala | 50 +- .../effect/implicits/package.scala | 98 +--- .../rchain/metrics/MetricsSemaphore.scala | 17 +- 54 files changed, 1015 insertions(+), 1008 deletions(-) diff --git a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala index 84a7360ea4d..a93252021d1 100644 --- a/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala +++ b/casper/src/main/scala/coop/rchain/casper/dag/BlockDagKeyValueStorage.scala @@ -124,11 +124,11 @@ final class BlockDagKeyValueStorage[F[_]: Async: Log] private ( ) } yield () - lock.withPermit( + lock.permit.use { _ => blockMetadataIndex .contains(blockMetadata.blockHash) .ifM(logAlreadyStored, doInsert) - ) + } } /** diff --git a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala index 1468f234cb5..26bf004eb4b 100644 --- a/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala +++ b/casper/src/main/scala/coop/rchain/casper/engine/NodeSyncing.scala @@ -75,17 +75,31 @@ class NodeSyncing[F[_] tupleSpaceQueue: Channel[F, StoreItemsMessage], trimState: Boolean = true ) { + @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) def handle(peer: PeerNode, msg: CasperMessage): F[Unit] = msg match { case ab: FinalizedFringe => onFinalizedFringeMessage(peer, ab) case s: StoreItemsMessage => - Log[F].info(s"Received ${s.pretty} from $peer.") *> tupleSpaceQueue.enqueue1(s) + Log[F].info(s"Received ${s.pretty} from $peer.") *> + tupleSpaceQueue + .send(s) + .map( + _.leftTraverse( + _ => new Exception("Channel received store item is closed").raiseError[F, Unit] + ).map(_.merge) + ) case b: BlockMessage => Log[F] .info(s"BlockMessage received ${PrettyPrinter.buildString(b, short = true)} from $peer.") *> - incomingBlocksQueue.enqueue1(b) + incomingBlocksQueue + .send(b) + .map( + _.leftTraverse( + _ => new Exception("Channel received block message is closed").raiseError[F, Unit] + ).map(_.merge) + ) case _ => ().pure } diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala index 25190f96ed8..63276cab6c2 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/DeployService.scala @@ -51,152 +51,166 @@ class GrpcDeployService[F[_]: Async](host: String, port: Int, maxMessageSize: In .usePlaintext() .build - private val stub = DeployServiceFs2Grpc.stub(channel) + private val stub = Dispatcher[F].map(d => DeployServiceFs2Grpc.stub(d, channel)) def deploy(d: Signed[DeployData]): F[Either[Seq[String], String]] = - stub - .doDeploy(DeployData.toProto(d), new Metadata) - .toEitherF( - _.message.error, - _.message.result - ) + stub.use( + _.doDeploy(DeployData.toProto(d), new Metadata) + .toEitherF( + _.message.error, + _.message.result + ) + ) def deployStatus(deployId: FindDeployQuery): F[Either[Seq[String], DeployExecStatus]] = - stub - .deployStatus(deployId, new Metadata) - .toEitherF( - _.message.error, - _.message.deployExecStatus - ) + stub.use( + _.deployStatus(deployId, new Metadata) + .toEitherF( + _.message.error, + _.message.deployExecStatus + ) + ) def getBlock(q: BlockQuery): F[Either[Seq[String], String]] = - stub - .getBlock(q, new Metadata) - .toEitherF( - _.message.error, - _.message.blockInfo.map(_.toProtoString) - ) + stub.use( + _.getBlock(q, new Metadata) + .toEitherF( + _.message.error, + _.message.blockInfo.map(_.toProtoString) + ) + ) def findDeploy(q: FindDeployQuery): F[Either[Seq[String], String]] = - stub - .findDeploy(q, new Metadata) - .toEitherF( - _.message.error, - _.message.blockInfo.map(_.toProtoString) - ) + stub.use( + _.findDeploy(q, new Metadata) + .toEitherF( + _.message.error, + _.message.blockInfo.map(_.toProtoString) + ) + ) def visualizeDag(q: VisualizeDagQuery): F[Either[Seq[String], String]] = - stub - .visualizeDag(q, new Metadata) - .evalMap(_.pure[F].toEitherF(_.message.error, _.message.content)) - .compile - .toList - .map { bs => - val (l, r) = bs.partition(_.isLeft) - if (l.isEmpty) Right(r.map(_.right.get).mkString) - else Left(l.flatMap(_.left.get)) - } + stub.use( + _.visualizeDag(q, new Metadata) + .evalMap(_.pure[F].toEitherF(_.message.error, _.message.content)) + .compile + .toList + .map { bs => + val (l, r) = bs.partition(_.isLeft) + if (l.isEmpty) Right(r.map(_.right.get).mkString) + else Left(l.flatMap(_.left.get)) + } + ) def machineVerifiableDag(q: MachineVerifyQuery): F[Either[Seq[String], String]] = - stub - .machineVerifiableDag(q, new Metadata) - .toEitherF( - _.message.error, - _.message.content - ) + stub.use( + _.machineVerifiableDag(q, new Metadata) + .toEitherF( + _.message.error, + _.message.content + ) + ) def getBlocks(q: BlocksQuery): F[Either[Seq[String], String]] = - stub - .getBlocks(q, new Metadata) - .evalMap(_.pure[F].toEitherF(_.message.error, _.message.blockInfo)) - .map(_.map { bi => - s""" + stub.use( + _.getBlocks(q, new Metadata) + .evalMap(_.pure[F].toEitherF(_.message.error, _.message.blockInfo)) + .map(_.map { bi => + s""" |------------- block ${bi.blockNumber} --------------- |${bi.toProtoString} |----------------------------------------------------- |""".stripMargin - }) - .compile - .toList - .map { bs => - val (l, r) = bs.partition(_.isLeft) - if (l.isEmpty) { - val showLength = - s""" + }) + .compile + .toList + .map { bs => + val (l, r) = bs.partition(_.isLeft) + if (l.isEmpty) { + val showLength = + s""" |count: ${r.length} |""".stripMargin - Right(r.map(_.right.get).mkString("\n") + "\n" + showLength) - } else Left(l.flatMap(_.left.get)) - } + Right(r.map(_.right.get).mkString("\n") + "\n" + showLength) + } else Left(l.flatMap(_.left.get)) + } + ) def listenForDataAtName( request: DataAtNameQuery ): F[Either[Seq[String], Seq[DataWithBlockInfo]]] = - stub - .listenForDataAtName(request, new Metadata) - .toEitherF( - _.message.error, - _.message.payload.map(_.blockInfo) - ) + stub.use( + _.listenForDataAtName(request, new Metadata) + .toEitherF( + _.message.error, + _.message.payload.map(_.blockInfo) + ) + ) def listenForContinuationAtName( request: ContinuationAtNameQuery ): F[Either[Seq[String], Seq[ContinuationsWithBlockInfo]]] = - stub - .listenForContinuationAtName(request, new Metadata) - .toEitherF( - _.message.error, - _.message.payload.map(_.blockResults) - ) + stub.use( + _.listenForContinuationAtName(request, new Metadata) + .toEitherF( + _.message.error, + _.message.payload.map(_.blockResults) + ) + ) def getDataAtPar( request: DataAtNameByBlockQuery ): F[Either[Seq[String], (Seq[Par], LightBlockInfo)]] = - stub - .getDataAtName(request, new Metadata) - .toEitherF( - _.message.error, - _.message.payload.map(r => (r.par, r.block)) - ) + stub.use( + _.getDataAtName(request, new Metadata) + .toEitherF( + _.message.error, + _.message.payload.map(r => (r.par, r.block)) + ) + ) def lastFinalizedBlock: F[Either[Seq[String], String]] = - stub - .lastFinalizedBlock(LastFinalizedBlockQuery(), new Metadata) - .toEitherF( - _.message.error, - _.message.blockInfo.map(_.toProtoString) - ) + stub.use( + _.lastFinalizedBlock(LastFinalizedBlockQuery(), new Metadata) + .toEitherF( + _.message.error, + _.message.blockInfo.map(_.toProtoString) + ) + ) def isFinalized(request: IsFinalizedQuery): F[Either[Seq[String], String]] = - stub - .isFinalized(request, new Metadata) - .toEitherF(_.message.error, _.message.isFinalized) - .map( - _.ifM( - "Block is finalized".asRight, - Seq("Block is not finalized").asLeft + stub.use( + _.isFinalized(request, new Metadata) + .toEitherF(_.message.error, _.message.isFinalized) + .map( + _.ifM( + "Block is finalized".asRight, + Seq("Block is not finalized").asLeft + ) ) - ) + ) def bondStatus(request: BondStatusQuery): F[Either[Seq[String], String]] = - stub - .bondStatus(request, new Metadata) - .toEitherF(_.message.error, _.message.isBonded) - .map( - _.ifM( - "Validator is bonded".asRight, - Seq("Validator is not bonded").asLeft + stub.use( + _.bondStatus(request, new Metadata) + .toEitherF(_.message.error, _.message.isBonded) + .map( + _.ifM( + "Validator is bonded".asRight, + Seq("Validator is not bonded").asLeft + ) ) - ) + ) def status: F[Either[Seq[String], String]] = - stub - .status(com.google.protobuf.empty.Empty(), new Metadata) - .toEitherF( - _.message.error, - _.message.status.map(_.toProtoString) - ) + stub.use( + _.status(com.google.protobuf.empty.Empty(), new Metadata) + .toEitherF( + _.message.error, + _.message.status.map(_.toProtoString) + ) + ) @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) override def close(): Unit = { diff --git a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala index 9a10890d695..8bff9ab11c6 100644 --- a/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala +++ b/casper/src/main/scala/coop/rchain/casper/protocol/client/ProposeService.scala @@ -31,23 +31,25 @@ class GrpcProposeService[F[_]: Async](host: String, port: Int, maxMessageSize: I .usePlaintext() .build - private val stub = ProposeServiceFs2Grpc.stub(channel) + private val stub = Dispatcher[F].map(d => ProposeServiceFs2Grpc.stub(d, channel)) def propose(isAsync: Boolean): F[Either[Seq[String], String]] = - stub - .propose(ProposeQuery(isAsync), new Metadata) - .toEitherF( - _.message.error, - _.message.result - ) + stub.use( + _.propose(ProposeQuery(isAsync), new Metadata) + .toEitherF( + _.message.error, + _.message.result + ) + ) def proposeResult: F[Either[Seq[String], String]] = - stub - .proposeResult(ProposeResultQuery(), new Metadata) - .toEitherF( - _.message.error, - _.message.result - ) + stub.use( + _.proposeResult(ProposeResultQuery(), new Metadata) + .toEitherF( + _.message.error, + _.message.result + ) + ) @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) override def close(): Unit = { diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala index 699795457e1..afbb746a586 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/InterpreterUtil.scala @@ -140,7 +140,12 @@ object InterpreterUtil { initialStateHash: StateHash, block: BlockMessage, rand: Blake2b512Random - ): F[Either[ReplayFailure, StateHash]] = + ): F[Either[ReplayFailure, StateHash]] = { + // this is only for Retry lib, TODO use fs2 and remove + implicit val sleep = new Sleep[F] { + override def sleep(delay: FiniteDuration): F[Unit] = Temporal[F].sleep(delay) + } + Span[F].trace(ReplayBlockMetricsSource) { val internalDeploys = block.state.deploys val internalSystemDeploys = block.state.systemDeploys @@ -179,6 +184,7 @@ object InterpreterUtil { )(replayResultF) } yield replayResult } + } private def handleErrors[F[_]: Sync: Log]( tsHash: ByteString, diff --git a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala index cdcc6fa0d07..851050535f5 100644 --- a/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala +++ b/casper/src/main/scala/coop/rchain/casper/rholang/RuntimeManager.scala @@ -281,10 +281,9 @@ object RuntimeManager { store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, - executionTracker: BlockExecutionTracker[F], - rholangEC: ExecutionContext + executionTracker: BlockExecutionTracker[F] ): F[RuntimeManagerImpl[F]] = - createWithHistory(store, mergeableStore, mergeableTagName, executionTracker, rholangEC).map( + createWithHistory(store, mergeableStore, mergeableTagName, executionTracker).map( _._1 ) @@ -292,16 +291,14 @@ object RuntimeManager { store: RSpaceStore[F], mergeableStore: MergeableStore[F], mergeableTagName: Par, - executionTracker: BlockExecutionTracker[F], - rholangEC: ExecutionContext + executionTracker: BlockExecutionTracker[F] ): F[(RuntimeManagerImpl[F], RhoHistoryRepository[F])] = { import coop.rchain.rholang.interpreter.storage._ implicit val m: rspace.Match[F, BindPattern, ListParWithRandom] = matchListPar[F] RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store, - rholangEC + store ) .flatMap { case (rSpacePlay, rSpaceReplay) => diff --git a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala index 92a37edb44b..18cc27ce8dd 100644 --- a/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/addblock/ProposerSpec.scala @@ -16,7 +16,6 @@ import coop.rchain.shared.Log import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.shared.RChainScheduler._ import cats.effect.Deferred import cats.effect.unsafe.implicits.global diff --git a/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala index bc341e7b325..629613b1126 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BlockQueryResponseAPITest.scala @@ -1,6 +1,6 @@ package coop.rchain.casper.api -import cats.effect.{IO, Sync} +import cats.effect.{IO, Ref, Sync} import cats.effect.testing.scalatest.AsyncIOSpec import cats.syntax.all._ import com.google.protobuf.ByteString @@ -19,288 +19,285 @@ import coop.rchain.models.block.StateHash.StateHash import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ import coop.rchain.models.{BlockMetadata, FringeData} -import coop.rchain.shared.{Log, Time} +import coop.rchain.shared.Log import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito, Mockito, MockitoSugar} import org.scalatest._ import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap -// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) -//class BlockQueryResponseAPITest -// extends AsyncFlatSpec -// with AsyncIOSpec -// with Matchers -// with EitherValues -// with BlockDagStorageFixture -// with BlockApiFixture -// with IdiomaticMockito -// with IdiomaticMockitoCats -// with ArgumentMatchersSugar { -// implicit val timeEff: Time[IO] = Time.fromTimer[IO] -// implicit val spanEff: NoopSpan[IO] = NoopSpan[IO]() -// implicit val log: Log[IO] = mock[Log[IO]] -// implicit val runtimeManager: RuntimeManager[IO] = mock[RuntimeManager[IO]] -// -// private val tooShortQuery = "12345" -// private val badTestHashQuery = "1234acd" -// private val invalidHexQuery = "No such a hash" -// private val unknownDeploy = ByteString.copyFromUtf8("asdfQwertyUiopxyzcbv") -// -// private val genesisBlock: BlockMessage = getRandomBlock(setJustifications = Seq().some) -// -// private val deployCount = 10 -// private val randomDeploys = -// (0 until deployCount).toList -// .traverse(i => ConstructDeploy.basicProcessedDeploy[IO](i)) -// .unsafeRunSync -// -// private val senderString: String = -// "3456789101112131415161718192345678910111213141516171819261718192113456789101112131415161718192345678910111213141516171819261718192" -// private val sender: ByteString = senderString.unsafeHexToByteString -// private val bondsValidator = (sender, 1L) -// -// private val secondBlock: BlockMessage = -// getRandomBlock( -// setValidator = sender.some, -// setDeploys = randomDeploys.some, -// setJustifications = List(genesisBlock.blockHash).some, -// setBonds = Map(bondsValidator).some -// ) -// -// "getBlock" should "return successful block info response" in { -// implicit val bs = createBlockStore[IO] -// implicit val bds = createBlockDagStorage[IO] -// -// for { -// _ <- prepareDagStorage[IO] -// blockApi <- createBlockApi[IO]("", 1) -// _ = Mockito.clearInvocations(bs, bds) -// hash = secondBlock.blockHash.toHexString -// blockQueryResponse <- blockApi.getBlock(hash) -// } yield { -// blockQueryResponse shouldBe 'right -// val blockInfo = blockQueryResponse.value -// blockInfo.deploys shouldBe randomDeploys.map(_.toDeployInfo) -// blockInfo.blockInfo shouldBe BlockApi.getLightBlockInfo(secondBlock) -// -// bs.get(Seq(secondBlock.blockHash)) wasCalled once -// verifyNoMoreInteractions(bs) -// -// bds.insert(*, *) wasNever called -// bds.getRepresentation wasCalled twice -// bds.lookupByDeployId(*) wasNever called -// } -// } -// -// it should "return error when no block exists" in { -// implicit val bs = createBlockStore[IO] -// implicit val bds = createBlockDagStorage[IO] -// -// for { -// blockApi <- createBlockApi[IO]("", 1) -// hash = badTestHashQuery -// blockQueryResponse <- blockApi.getBlock(hash) -// } yield { -// blockQueryResponse shouldBe 'left -// blockQueryResponse.left.value shouldBe s"Error: Failure to find block with hash: $badTestHashQuery" -// -// bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) wasCalled once -// verifyNoMoreInteractions(bs) -// -// bds.insert(*, *) wasNever called -// bds.getRepresentation wasCalled once -// bds.lookupByDeployId(*) wasNever called -// } -// } -// -// it should "return error when hash is invalid hex string" in { -// implicit val bs = createBlockStore[IO] -// implicit val bds = createBlockDagStorage[IO] -// -// for { -// blockApi <- createBlockApi[IO]("", 1) -// hash = invalidHexQuery -// blockQueryResponse <- blockApi.getBlock(hash) -// } yield { -// blockQueryResponse shouldBe 'left -// blockQueryResponse.left.value shouldBe s"Input hash value is not valid hex string: $invalidHexQuery" -// -// verifyNoMoreInteractions(bs) -// -// bds.insert(*, *) wasNever called -// bds.getRepresentation wasNever called -// bds.lookupByDeployId(*) wasNever called -// } -// } -// -// it should "return error when hash is to short" in { -// implicit val bs = createBlockStore[IO] -// implicit val bds = createBlockDagStorage[IO] -// -// for { -// blockApi <- createBlockApi[IO]("", 1) -// hash = tooShortQuery -// blockQueryResponse <- blockApi.getBlock(hash) -// } yield { -// blockQueryResponse shouldBe 'left -// blockQueryResponse.left.value shouldBe s"Input hash value must be at least 6 characters: $tooShortQuery" -// -// verifyNoMoreInteractions(bs) -// -// bds.insert(*, *) wasNever called -// bds.getRepresentation wasNever called -// bds.lookupByDeployId(*) wasNever called -// } -// } -// -// "findDeploy" should "return successful block info response when a block contains the deploy with given signature" in { -// implicit val bs = createBlockStore[IO] -// implicit val bds = createBlockDagStorage[IO] -// -// for { -// _ <- prepareDagStorage[IO] -// blockApi <- createBlockApi[IO]("", 1) -// _ = Mockito.clearInvocations(bs, bds) -// deployId = randomDeploys.head.deploy.sig -// blockQueryResponse <- blockApi.findDeploy(deployId) -// } yield { -// blockQueryResponse shouldBe 'right -// blockQueryResponse.value shouldBe BlockApi.getLightBlockInfo(secondBlock) -// -// bs.get(Seq(secondBlock.blockHash)) wasCalled once -// verifyNoMoreInteractions(bs) -// -// bds.insert(*, *) wasNever called -// bds.getRepresentation wasNever called -// bds.lookupByDeployId(deployId) wasCalled once -// } -// } -// -// it should "return an error when no block contains the deploy with the given signature" in { -// implicit val bs = createBlockStore[IO] -// implicit val bds = createBlockDagStorage[IO] -// -// for { -// blockApi <- createBlockApi[IO]("", 1) -// blockQueryResponse <- blockApi.findDeploy(unknownDeploy) -// } yield { -// blockQueryResponse shouldBe 'left -// blockQueryResponse.left.value shouldBe -// s"Couldn't find block containing deploy with id: ${PrettyPrinter.buildStringNoLimit(unknownDeploy)}" -// -// verifyNoMoreInteractions(bs) -// -// bds.insert(*, *) wasNever called -// bds.getRepresentation wasNever called -// bds.lookupByDeployId(unknownDeploy) wasCalled once -// } -// } -// -// private def createBlockStore[F[_]: Sync] = { -// val bs = mock[BlockStore[F]] -// bs.put(Seq((genesisBlock.blockHash, genesisBlock))) returns ().pure -// bs.put(Seq((secondBlock.blockHash, secondBlock))) returns ().pure -// bs.get(Seq(secondBlock.blockHash)) returnsF Seq(secondBlock.some) -// bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) returnsF Seq(None) -// bs -// } -// -// private def createBlockDagStorage[F[_]: Sync]: BlockDagStorage[F] = { -// val genesisHash: ByteString = RuntimeManager.emptyStateHashFixed -// -// val state = Ref.unsafe[F, DagRepresentation]( -// DagRepresentation( -// Set(), -// Map(), -// SortedMap(), -// DagMessageState(), -// Map( -// Set(genesisHash) -> FringeData( -// FringeData.fringeHash(Set.empty), -// Set.empty, -// Set.empty, -// genesisHash.toBlake2b256Hash, -// Set.empty, -// Set.empty, -// Set.empty -// ) -// ) -// ) -// ) -// -// val bds = mock[BlockDagStorage[F]] -// -// bds.insert(*, *) answers { (bmd: BlockMetadata, b: BlockMessage) => -// state.update { s => -// val newDagSet = s.dagSet + b.blockHash -// -// val newChildMap = b.justifications.foldLeft(s.childMap) { -// case (m, h) => m + (h -> (m.getOrElse(h, Set.empty) + b.blockHash)) -// } + (b.blockHash -> Set.empty[BlockHash]) -// -// val newHeightMap = s.heightMap + (b.blockNumber -> (s.heightMap -// .getOrElse(b.blockNumber, Set.empty) + b.blockHash)) -// -// val seen = b.justifications -// .flatMap(h => s.dagMessageState.msgMap(h).seen) -// .toSet ++ b.justifications + b.blockHash -// -// val newMsgMap = s.dagMessageState.msgMap + (b.blockHash -> toMessage(b, seen)) -// -// val newLatestMsgs = newMsgMap.foldLeft(Set.empty[Message[BlockHash, Validator]]) { -// case (acc, (_, msg)) => -// acc + acc -// .find(_.sender == msg.sender) -// .map(m => if (msg.height > m.height) msg else m) -// .getOrElse(msg) -// } -// val newDagMessageState = s.dagMessageState.copy(newLatestMsgs, newMsgMap) -// -// s.copy( -// dagSet = newDagSet, -// childMap = newChildMap, -// heightMap = newHeightMap, -// dagMessageState = newDagMessageState -// ) -// } -// } -// -// bds.getRepresentation returns state.get -// -// bds.lookupByDeployId(randomDeploys.head.deploy.sig) returnsF secondBlock.blockHash.some -// bds.lookupByDeployId(unknownDeploy) returnsF None -// -// bds -// } -// -// // Default args only available for public method in Scala 2.12 (https://github.com/scala/bug/issues/12168) -// def toMessage( -// m: BlockMessage, -// seen: Set[BlockHash] = Set.empty[BlockHash] -// ): Message[BlockHash, Validator] = -// Message[BlockHash, Validator]( -// m.blockHash, -// m.blockNumber, -// m.sender, -// m.seqNum, -// m.bonds, -// m.justifications.toSet, -// Set(), -// seen -// ) -// -// private def prepareDagStorage[F[_]: Sync: BlockDagStorage: BlockStore]: F[Unit] = { -// import coop.rchain.blockstorage.syntax._ -// def insertToDag(b: BlockMessage, stateHash: StateHash): F[Unit] = -// BlockDagStorage[F].insert(BlockMetadata.fromBlock(b).copy(fringeStateHash = stateHash), b) -// for { -// _ <- List(genesisBlock, secondBlock).traverse(BlockStore[F].put(_)) -// _ <- insertToDag(genesisBlock, genesisBlock.postStateHash) -// _ <- insertToDag(secondBlock, RuntimeManager.emptyStateHashFixed) -// } yield () -// } -//} +class BlockQueryResponseAPITest + extends AsyncFlatSpec + with AsyncIOSpec + with Matchers + with EitherValues + with BlockDagStorageFixture + with BlockApiFixture + with IdiomaticMockito + with IdiomaticMockitoCats + with ArgumentMatchersSugar { + implicit val spanEff: NoopSpan[IO] = NoopSpan[IO]() + implicit val log: Log[IO] = mock[Log[IO]] + implicit val runtimeManager: RuntimeManager[IO] = mock[RuntimeManager[IO]] + + private val tooShortQuery = "12345" + private val badTestHashQuery = "1234acd" + private val invalidHexQuery = "No such a hash" + private val unknownDeploy = ByteString.copyFromUtf8("asdfQwertyUiopxyzcbv") + + private val genesisBlock: BlockMessage = getRandomBlock(setJustifications = Seq().some) + + private val deployCount = 10 + private val randomDeploys = + (0 until deployCount).toList + .traverse(i => ConstructDeploy.basicProcessedDeploy[IO](i)) + .unsafeRunSync + + private val senderString: String = + "3456789101112131415161718192345678910111213141516171819261718192113456789101112131415161718192345678910111213141516171819261718192" + private val sender: ByteString = senderString.unsafeHexToByteString + private val bondsValidator = (sender, 1L) + + private val secondBlock: BlockMessage = + getRandomBlock( + setValidator = sender.some, + setDeploys = randomDeploys.some, + setJustifications = List(genesisBlock.blockHash).some, + setBonds = Map(bondsValidator).some + ) + + "getBlock" should "return successful block info response" in { + implicit val bs = createBlockStore[IO] + implicit val bds = createBlockDagStorage[IO] + + for { + _ <- prepareDagStorage[IO] + blockApi <- createBlockApi[IO]("", 1) + _ = Mockito.clearInvocations(bs, bds) + hash = secondBlock.blockHash.toHexString + blockQueryResponse <- blockApi.getBlock(hash) + } yield { + blockQueryResponse shouldBe 'right + val blockInfo = blockQueryResponse.value + blockInfo.deploys shouldBe randomDeploys.map(_.toDeployInfo) + blockInfo.blockInfo shouldBe BlockApi.getLightBlockInfo(secondBlock) + + bs.get(Seq(secondBlock.blockHash)) wasCalled once + verifyNoMoreInteractions(bs) + + bds.insert(*, *) wasNever called + bds.getRepresentation wasCalled twice + bds.lookupByDeployId(*) wasNever called + } + } + + it should "return error when no block exists" in { + implicit val bs = createBlockStore[IO] + implicit val bds = createBlockDagStorage[IO] + + for { + blockApi <- createBlockApi[IO]("", 1) + hash = badTestHashQuery + blockQueryResponse <- blockApi.getBlock(hash) + } yield { + blockQueryResponse shouldBe 'left + blockQueryResponse.left.value shouldBe s"Error: Failure to find block with hash: $badTestHashQuery" + + bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) wasCalled once + verifyNoMoreInteractions(bs) + + bds.insert(*, *) wasNever called + bds.getRepresentation wasCalled once + bds.lookupByDeployId(*) wasNever called + } + } + + it should "return error when hash is invalid hex string" in { + implicit val bs = createBlockStore[IO] + implicit val bds = createBlockDagStorage[IO] + + for { + blockApi <- createBlockApi[IO]("", 1) + hash = invalidHexQuery + blockQueryResponse <- blockApi.getBlock(hash) + } yield { + blockQueryResponse shouldBe 'left + blockQueryResponse.left.value shouldBe s"Input hash value is not valid hex string: $invalidHexQuery" + + verifyNoMoreInteractions(bs) + + bds.insert(*, *) wasNever called + bds.getRepresentation wasNever called + bds.lookupByDeployId(*) wasNever called + } + } + + it should "return error when hash is to short" in { + implicit val bs = createBlockStore[IO] + implicit val bds = createBlockDagStorage[IO] + + for { + blockApi <- createBlockApi[IO]("", 1) + hash = tooShortQuery + blockQueryResponse <- blockApi.getBlock(hash) + } yield { + blockQueryResponse shouldBe 'left + blockQueryResponse.left.value shouldBe s"Input hash value must be at least 6 characters: $tooShortQuery" + + verifyNoMoreInteractions(bs) + + bds.insert(*, *) wasNever called + bds.getRepresentation wasNever called + bds.lookupByDeployId(*) wasNever called + } + } + + "findDeploy" should "return successful block info response when a block contains the deploy with given signature" in { + implicit val bs = createBlockStore[IO] + implicit val bds = createBlockDagStorage[IO] + + for { + _ <- prepareDagStorage[IO] + blockApi <- createBlockApi[IO]("", 1) + _ = Mockito.clearInvocations(bs, bds) + deployId = randomDeploys.head.deploy.sig + blockQueryResponse <- blockApi.findDeploy(deployId) + } yield { + blockQueryResponse shouldBe 'right + blockQueryResponse.value shouldBe BlockApi.getLightBlockInfo(secondBlock) + + bs.get(Seq(secondBlock.blockHash)) wasCalled once + verifyNoMoreInteractions(bs) + + bds.insert(*, *) wasNever called + bds.getRepresentation wasNever called + bds.lookupByDeployId(deployId) wasCalled once + } + } + + it should "return an error when no block contains the deploy with the given signature" in { + implicit val bs = createBlockStore[IO] + implicit val bds = createBlockDagStorage[IO] + + for { + blockApi <- createBlockApi[IO]("", 1) + blockQueryResponse <- blockApi.findDeploy(unknownDeploy) + } yield { + blockQueryResponse shouldBe 'left + blockQueryResponse.left.value shouldBe + s"Couldn't find block containing deploy with id: ${PrettyPrinter.buildStringNoLimit(unknownDeploy)}" + + verifyNoMoreInteractions(bs) + + bds.insert(*, *) wasNever called + bds.getRepresentation wasNever called + bds.lookupByDeployId(unknownDeploy) wasCalled once + } + } + + private def createBlockStore[F[_]: Sync] = { + val bs = mock[BlockStore[F]] + bs.put(Seq((genesisBlock.blockHash, genesisBlock))) returns ().pure + bs.put(Seq((secondBlock.blockHash, secondBlock))) returns ().pure + bs.get(Seq(secondBlock.blockHash)) returnsF Seq(secondBlock.some) + bs.get(Seq(badTestHashQuery.unsafeHexToByteString)) returnsF Seq(None) + bs + } + + private def createBlockDagStorage[F[_]: Sync]: BlockDagStorage[F] = { + val genesisHash: ByteString = RuntimeManager.emptyStateHashFixed + + val state = Ref.unsafe[F, DagRepresentation]( + DagRepresentation( + Set(), + Map(), + SortedMap(), + DagMessageState(), + Map( + Set(genesisHash) -> FringeData( + FringeData.fringeHash(Set.empty), + Set.empty, + Set.empty, + genesisHash.toBlake2b256Hash, + Set.empty, + Set.empty, + Set.empty + ) + ) + ) + ) + + val bds = mock[BlockDagStorage[F]] + + bds.insert(*, *) answers { (bmd: BlockMetadata, b: BlockMessage) => + state.update { s => + val newDagSet = s.dagSet + b.blockHash + + val newChildMap = b.justifications.foldLeft(s.childMap) { + case (m, h) => m + (h -> (m.getOrElse(h, Set.empty) + b.blockHash)) + } + (b.blockHash -> Set.empty[BlockHash]) + + val newHeightMap = s.heightMap + (b.blockNumber -> (s.heightMap + .getOrElse(b.blockNumber, Set.empty) + b.blockHash)) + + val seen = b.justifications + .flatMap(h => s.dagMessageState.msgMap(h).seen) + .toSet ++ b.justifications + b.blockHash + + val newMsgMap = s.dagMessageState.msgMap + (b.blockHash -> toMessage(b, seen)) + + val newLatestMsgs = newMsgMap.foldLeft(Set.empty[Message[BlockHash, Validator]]) { + case (acc, (_, msg)) => + acc + acc + .find(_.sender == msg.sender) + .map(m => if (msg.height > m.height) msg else m) + .getOrElse(msg) + } + val newDagMessageState = s.dagMessageState.copy(newLatestMsgs, newMsgMap) + + s.copy( + dagSet = newDagSet, + childMap = newChildMap, + heightMap = newHeightMap, + dagMessageState = newDagMessageState + ) + } + } + + bds.getRepresentation returns state.get + + bds.lookupByDeployId(randomDeploys.head.deploy.sig) returnsF secondBlock.blockHash.some + bds.lookupByDeployId(unknownDeploy) returnsF None + + bds + } + + // Default args only available for public method in Scala 2.12 (https://github.com/scala/bug/issues/12168) + def toMessage( + m: BlockMessage, + seen: Set[BlockHash] = Set.empty[BlockHash] + ): Message[BlockHash, Validator] = + Message[BlockHash, Validator]( + m.blockHash, + m.blockNumber, + m.sender, + m.seqNum, + m.bonds, + m.justifications.toSet, + Set(), + seen + ) + + private def prepareDagStorage[F[_]: Sync: BlockDagStorage: BlockStore]: F[Unit] = { + import coop.rchain.blockstorage.syntax._ + def insertToDag(b: BlockMessage, stateHash: StateHash): F[Unit] = + BlockDagStorage[F].insert(BlockMetadata.fromBlock(b).copy(fringeStateHash = stateHash), b) + for { + _ <- List(genesisBlock, secondBlock).traverse(BlockStore[F].put(_)) + _ <- insertToDag(genesisBlock, genesisBlock.postStateHash) + _ <- insertToDag(secondBlock, RuntimeManager.emptyStateHashFixed) + } yield () + } +} diff --git a/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala index bc8d8d06801..e487d0028bd 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BlocksResponseAPITest.scala @@ -19,7 +19,6 @@ import org.scalatest.EitherValues import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers import cats.effect.testing.scalatest.AsyncIOSpec -import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap @@ -88,7 +87,6 @@ import scala.collection.immutable.SortedMap // ) // } yield List(genesis, b2, b3, b4, b5, b6, b7, b8) // -// import coop.rchain.shared.RChainScheduler._ // "getBlocks" should "return all blocks" in { // implicit val (blockStore, blockDagStorage, runtimeManager) = createMocks[IO] // diff --git a/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala index cf3fb0a775e..a1e95256b73 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/BondedStatusAPITest.scala @@ -20,7 +20,7 @@ import coop.rchain.models.FringeData import coop.rchain.models.Validator.Validator import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ -import coop.rchain.shared.{Log, Time} +import coop.rchain.shared.Log import coop.rchain.shared.scalatestcontrib._ import org.mockito.cats.IdiomaticMockitoCats import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} @@ -30,18 +30,17 @@ import org.scalatest.matchers.should.Matchers import scala.collection.immutable.SortedMap -// TODO enable when CE is migrated to 3 (cats.effect.testing.scalatest is not available for CE2) -//class BondedStatusAPITest -// extends AsyncFlatSpec -// with AsyncIOSpec -// with Matchers -// with EitherValues -// with BlockGenerator -// with BlockDagStorageFixture -// with BlockApiFixture -// with IdiomaticMockito -// with IdiomaticMockitoCats -// with ArgumentMatchersSugar { +class BondedStatusAPITest + extends AsyncFlatSpec + with AsyncIOSpec + with Matchers + with EitherValues + with BlockGenerator + with BlockDagStorageFixture + with BlockApiFixture + with IdiomaticMockito + with IdiomaticMockitoCats + with ArgumentMatchersSugar { // // 4 nodes with 3 validators bonded // private val keys = randomValidatorKeyPairs.take(3).toList // private val initialComputeBondsResult = keys @@ -52,19 +51,18 @@ import scala.collection.immutable.SortedMap // setBonds = initialComputeBondsResult.some, // setValidator = toValidatorOpt(keys.head._1) // ) -// import coop.rchain.shared.RChainScheduler._ // // "bondStatus" should "return true for bonded validator" in { -// implicit val (c, log, bds, bs, rm, sp) = createMocks[IO] +// implicit val (log, bds, bs, rm, sp) = createMocks[IO] // // for { // v1 <- IO.delay(ValidatorIdentity(keys.head._1)) // v2 = ValidatorIdentity(keys(1)._1) // v3 = ValidatorIdentity(keys(2)._1) // -// _ <- bondedStatus(v1, v1.publicKey, gB) shouldBeF true -// _ <- bondedStatus(v2, v2.publicKey, gB) shouldBeF true -// _ <- bondedStatus(v3, v3.publicKey, gB) shouldBeF true +// _ <- bondedStatus[IO](v1, v1.publicKey, gB) shouldBeF true +// _ <- bondedStatus[IO](v2, v2.publicKey, gB) shouldBeF true +// _ <- bondedStatus[IO](v3, v3.publicKey, gB) shouldBeF true // } yield { // bs.get(Seq(gB.blockHash)) wasCalled 3.times // verifyNoMoreInteractions(bs) @@ -74,10 +72,10 @@ import scala.collection.immutable.SortedMap // } // // "bondStatus" should "return false for not bonded validators" in { -// implicit val (c, log, bds, bs, rm, sp) = createMocks[IO] -// val genesisValidator = ValidatorIdentity(keys.head._1) +// implicit val (log, bds, bs, rm, sp) = createMocks[IO] +// val genesisValidator = ValidatorIdentity(keys.head._1) // for { -// _ <- bondedStatus(genesisValidator, createValidator.publicKey, gB) shouldBeF false +// _ <- bondedStatus[IO](genesisValidator, createValidator.publicKey, gB) shouldBeF false // } yield { // bs.get(Seq(gB.blockHash)) wasCalled once // verifyNoMoreInteractions(bs) @@ -87,7 +85,7 @@ import scala.collection.immutable.SortedMap // } // // "bondStatus" should "return true for newly bonded validator" in { -// implicit val (c, log, bds, bs, _, sp) = createMocks[IO] +// implicit val (log, bds, bs, _, sp) = createMocks[IO] // // val genesisValidator = ValidatorIdentity(keys.head._1) // val newValidator = createValidator @@ -100,7 +98,7 @@ import scala.collection.immutable.SortedMap // // for { // _ <- BondingUtil.bondingDeploy[IO](stake, newValidator.privateKey, shardId = gB.shardId) -// _ <- bondedStatus(genesisValidator, newValidator.publicKey, gB) shouldBeF false +// _ <- bondedStatus[IO](genesisValidator, newValidator.publicKey, gB) shouldBeF false // b1 = getRandomBlock( // setJustifications = Seq(gB.blockHash).some, // setBonds = newComputeBondsResult.some, @@ -108,7 +106,7 @@ import scala.collection.immutable.SortedMap // ) // // // b1 is now finalized, hence n4 is now bonded -// _ <- bondedStatus(genesisValidator, newValidator.publicKey, b1) shouldBeF true +// _ <- bondedStatus[IO](genesisValidator, newValidator.publicKey, b1) shouldBeF true // } yield { // bs.get(Seq(gB.blockHash)) wasCalled twice // verifyNoMoreInteractions(bs) @@ -119,8 +117,7 @@ import scala.collection.immutable.SortedMap // } // // private def createMocks[F[_]: Async: Sync] -// : (Concurrent[F], Log[F], BlockDagStorage[F], BlockStore[F], RuntimeManager[F], Span[F]) = { -// val c = Concurrent[F] +// : (Log[F], BlockDagStorage[F], BlockStore[F], RuntimeManager[F], Span[F]) = { // val sp = mock[Span[F]] // // val log = mock[Log[F]] @@ -152,7 +149,7 @@ import scala.collection.immutable.SortedMap // val rm = mock[RuntimeManager[F]] // rm.computeBonds(*) returnsF initialComputeBondsResult // -// (c, log, bds, bs, rm, sp) +// (log, bds, bs, rm, sp) // } // // private def toValidatorOpt(pk: PrivateKey): Option[Validator] = pk.bytes.toByteString.some @@ -183,4 +180,4 @@ import scala.collection.immutable.SortedMap // val (privateKey, _) = Secp256k1.newKeyPair // ValidatorIdentity(privateKey) // } -//} +} diff --git a/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala index cd9da149663..47caa3881f2 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/ExploratoryDeployAPITest.scala @@ -42,7 +42,6 @@ import scala.collection.immutable.SortedMap // with IdiomaticMockito // with IdiomaticMockitoCats // with ArgumentMatchersSugar { -// import coop.rchain.shared.RChainScheduler._ // // private val genesis = getRandomBlock() // private val b1 = getRandomBlock() diff --git a/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala index e448e094db7..197f710c7a5 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/LastFinalizedAPITest.scala @@ -41,7 +41,6 @@ import scala.collection.immutable.SortedMap // // private val createValidator = ValidatorIdentity(randomValidatorKeyPairs.take(1).toList.head._1) // private val createSender = createValidator.publicKey.bytes.toByteString -// import coop.rchain.shared.RChainScheduler._ // // "isFinalized" should "return true for a block placed in the DAG" in { // implicit val (log, sp, rm, bs, bds) = createMocks[IO] diff --git a/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala b/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala index 51af60e0b79..822e694c039 100644 --- a/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala +++ b/casper/src/test/scala/coop/rchain/casper/api/ListeningNameAPITest.scala @@ -27,7 +27,6 @@ import org.scalatest._ import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers import cats.effect.testing.scalatest.AsyncIOSpec -import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala index b59433ea9af..e9261f205f7 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala @@ -18,12 +18,9 @@ class MultiParentCasperDeploySpec import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - val genesis = buildGenesis() it should "not create a block with a repeated deploy" in effectTest { - implicit val timeEff = new LogicalTime[Effect] TestNode.networkEff(genesis, networkSize = 2).use { nodes => val List(node0, node1) = nodes.toList for { @@ -38,8 +35,6 @@ class MultiParentCasperDeploySpec it should "fail when deploying with insufficient phlos" in effectTest { TestNode.standaloneEff(genesis).use { node => - implicit val timeEff = new LogicalTime[Effect] - for { deployData <- ConstructDeploy.sourceDeployNowF[Effect]("Nil", phloLimit = 1) r <- node.createBlock(deployData) @@ -50,8 +45,6 @@ class MultiParentCasperDeploySpec it should "succeed if given enough phlos for deploy" in effectTest { TestNode.standaloneEff(genesis).use { node => - implicit val timeEff = new LogicalTime[Effect] - for { deployData <- ConstructDeploy.sourceDeployNowF[Effect]("Nil", phloLimit = 100) r <- node.createBlock(deployData) diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala index 063e0e49991..cc504c09d32 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala @@ -19,7 +19,6 @@ import org.scalatest.matchers.should.Matchers class MultiParentCasperReportingSpec extends AnyFlatSpec with Matchers with Inspectors { import coop.rchain.casper.util.GenesisBuilder._ - import coop.rchain.shared.RChainScheduler._ implicit val timeEff: LogicalTime[Effect] = new LogicalTime[Effect] diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala index 2bf61d7f261..3c465f5910d 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/BlockReceiverEffectsSpec.scala @@ -22,7 +22,6 @@ import org.scalatest.Assertion import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers import cats.effect.testing.scalatest.AsyncIOSpec -import coop.rchain.shared.RChainScheduler._ import scala.collection.immutable.SortedMap diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala index fcdde639145..df40b8062b6 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/LmdbKeyValueStoreSpec.scala @@ -46,7 +46,6 @@ class LmdbKeyValueStoreSpec } implicit val log: Log[IO] = new Log.NOPLog[IO]() - import coop.rchain.shared.RChainScheduler._ it should "put and get data from the store" in { forAll(genData) { expected => diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala index 76c637e3ee4..9279354e7a1 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala @@ -51,8 +51,6 @@ class ValidateTest implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO]() implicit val s = Sync[IO] - import coop.rchain.shared.RChainScheduler._ - override def beforeEach(): Unit = { log.reset() timeEff.reset() @@ -464,8 +462,7 @@ class ValidateTest BlockRandomSeed.nonNegativeMergeableTagName( genesis.shardId ), - RuntimeManager.noOpExecutionTracker[IO], - rholangEC + RuntimeManager.noOpExecutionTracker[IO] ) result <- { implicit val rm = runtimeManager diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala index c6ad51f5b69..ebdf62acab1 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsBlockRequesterEffectsSpec.scala @@ -1,5 +1,6 @@ package coop.rchain.casper.engine +import cats.effect.unsafe.implicits.global import cats.effect.{Async, IO} import cats.syntax.all._ import com.google.protobuf.ByteString @@ -10,7 +11,7 @@ import coop.rchain.models.BlockHash.BlockHash import coop.rchain.models.blockImplicits import coop.rchain.shared.Log import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -19,8 +20,6 @@ import cats.effect.{Ref, Temporal} class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2StreamMatchers { - import coop.rchain.shared.RChainScheduler._ - def mkHash(s: String) = ByteString.copyFromUtf8(s) def getBlock(hash: BlockHash, number: Long, latestMessages: Seq[BlockHash]) = { @@ -81,7 +80,7 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * @param test test definition */ - def createMock[F[_]: Async: Temporal: Log]( + def createMock[F[_]: Async: Log]( startBlock: BlockMessage, requestTimeout: FiniteDuration )(test: Mock[F] => F[Unit]): F[Unit] = { @@ -96,35 +95,33 @@ class LfsBlockRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str testState <- Ref.of[F, TestST](TestST(blocks = savedBlocks, invalid = Set())) // Queue for received blocks - responseQueue <- Queue.unbounded[F, BlockMessage] + responseQueue <- Channel.unbounded[F, BlockMessage] // Queue for requested block hashes - requestQueue <- Queue.unbounded[F, BlockHash] + requestQueue <- Channel.unbounded[F, BlockHash] // Queue for saved blocks - savedBlocksQueue <- Queue.unbounded[F, (BlockHash, BlockMessage)] + savedBlocksQueue <- Channel.unbounded[F, (BlockHash, BlockMessage)] // Queue for processing the internal state (ST) - processingStream <- LfsBlockRequester.stream( + processingStream <- LfsBlockRequester.stream[F]( finalizedFringe, - responseQueue.dequeue, + responseQueue.stream, blockHeightsBeforeFringe = 0, - requestQueue.enqueue1, + requestQueue.send(_).void, requestTimeout, hash => testState.get.map(_.blocks.contains(hash)), hash => testState.get.map(_.blocks(hash)), - savedBlocksQueue.enqueue1(_, _), + savedBlocksQueue.send(_, _).void, block => testState.get.map(!_.invalid.contains(block.blockHash)) ) mock = new Mock[F] { override def receiveBlock(blocks: BlockMessage*): F[Unit] = - responseQueue.enqueue(Stream.emits(blocks)).compile.drain + Stream.emits(blocks).evalMap(responseQueue.send).compile.drain - override val sentRequests: Stream[F, BlockHash] = - Stream.eval(requestQueue.dequeue1).repeat - override val savedBlocks: Stream[F, (BlockHash, BlockMessage)] = - Stream.eval(savedBlocksQueue.dequeue1).repeat + override val sentRequests: Stream[F, BlockHash] = requestQueue.stream + override val savedBlocks: Stream[F, (BlockHash, BlockMessage)] = savedBlocksQueue.stream override val setup: Ref[F, TestST] = testState diff --git a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala index d9a1c73fd10..099dcb4ee58 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/LfsStateRequesterEffectsSpec.scala @@ -11,14 +11,14 @@ import coop.rchain.rspace.hashing.Blake2b256Hash import coop.rchain.rspace.state.{RSpaceImporter, StateValidationError} import coop.rchain.shared.Log import fs2.Stream -import fs2.concurrent.Queue +import fs2.concurrent.Channel import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector import java.nio.ByteBuffer import scala.concurrent.duration.{DurationInt, FiniteDuration} -import cats.effect.Temporal +import cats.effect.unsafe.implicits.global class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2StreamMatchers { @@ -70,7 +70,7 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str * * @param test test definition */ - def createMock[F[_]: Async: Temporal: Log](requestTimeout: FiniteDuration)( + def createMock[F[_]: Async: Log](requestTimeout: FiniteDuration)( test: Mock[F] => F[Unit] ): F[Unit] = { @@ -96,14 +96,14 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str for { // Queue for received store messages - responseQueue <- Queue.unbounded[F, StoreItemsMessage] + responseQueue <- Channel.unbounded[F, StoreItemsMessage] // Queue for requested state chunks - requestQueue <- Queue.unbounded[F, (StatePartPath, Int)] + requestQueue <- Channel.unbounded[F, (StatePartPath, Int)] // Queues for saved chunks - savedHistoryQueue <- Queue.unbounded[F, SavedStoreItems] - savedDataQueue <- Queue.unbounded[F, SavedStoreItems] + savedHistoryQueue <- Channel.unbounded[F, SavedStoreItems] + savedDataQueue <- Channel.unbounded[F, SavedStoreItems] importer = new RSpaceImporter[F] { override type KeyHash = Blake2b256Hash @@ -113,7 +113,7 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str toBuffer: Value => ByteBuffer ): F[Unit] = { val items = data.map(_.map(toBuffer andThen ByteString.copyFrom)) - savedHistoryQueue.enqueue1(items) + savedHistoryQueue.send(items).void } override def setDataItems[Value]( @@ -121,7 +121,7 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str toBuffer: Value => ByteBuffer ): F[Unit] = { val items = data.map(_.map(toBuffer andThen ByteString.copyFrom)) - savedDataQueue.enqueue1(items) + savedDataQueue.send(items).void } override def setRoot(key: KeyHash): F[Unit] = ().pure[F] @@ -131,10 +131,10 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str } // Queue for processing the internal state (ST) - processingStream <- LfsTupleSpaceRequester.stream( + processingStream <- LfsTupleSpaceRequester.stream[F]( finalizedFringe, responseQueue, - requestQueue.enqueue1(_, _), + requestQueue.send(_, _).void, requestTimeout, importer, mockValidateStateChunk @@ -142,14 +142,11 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str mock = new Mock[F] { override def receive(msgs: StoreItemsMessage*): F[Unit] = - responseQueue.enqueue(Stream.emits(msgs)).compile.drain + Stream.emits(msgs).evalMap(responseQueue.send).compile.drain - override val sentRequests: Stream[F, (StatePartPath, Int)] = - Stream.eval(requestQueue.dequeue1).repeat - override val savedHistory: Stream[F, SavedStoreItems] = - Stream.eval(savedHistoryQueue.dequeue1).repeat - override val savedData: Stream[F, SavedStoreItems] = - Stream.eval(savedDataQueue.dequeue1).repeat + override val sentRequests: Stream[F, (StatePartPath, Int)] = requestQueue.stream + override val savedHistory: Stream[F, SavedStoreItems] = savedHistoryQueue.stream + override val savedData: Stream[F, SavedStoreItems] = savedDataQueue.stream override val stream: Stream[F, ST[StatePartPath]] = processingStream } @@ -161,8 +158,6 @@ class LfsStateRequesterEffectsSpec extends AnyFlatSpec with Matchers with Fs2Str implicit val logEff: Log[IO] = Log.log[IO] - import coop.rchain.shared.RChainScheduler._ - /** * Test runner * diff --git a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala index 14454716bee..3e15cbe5b46 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala @@ -29,7 +29,6 @@ import cats.effect.Ref import cats.effect.unsafe.implicits.global class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { - import coop.rchain.shared.RChainScheduler._ val local: PeerNode = peerNode("src", 40400) diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala index e47d1ee1b03..fddf1203f24 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/GenesisTest.scala @@ -24,7 +24,6 @@ import coop.rchain.shared.syntax._ import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.shared.RChainScheduler._ import java.io.PrintWriter import java.nio.file.{Files, Path} @@ -294,8 +293,7 @@ object GenesisTest { rStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName(rchainShardId), - t, - rholangEC + t ) result <- body(runtimeManager, genesisPath, log) _ <- Sync[F].delay { storePath.recursivelyDelete() } diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala index 33c3de1e0f9..6dc411f0d11 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockDagStorageFixture.scala @@ -25,7 +25,6 @@ trait BlockDagStorageFixture extends BeforeAndAfter { self: Suite => )(f: BlockStore[IO] => BlockDagStorage[IO] => RuntimeManager[IO] => IO[R]): R = { implicit val metrics = new MetricsNOP[IO]() implicit val log = Log.log[IO] - import coop.rchain.shared.RChainScheduler._ def create(dir: Path) = for { @@ -49,7 +48,6 @@ trait BlockDagStorageFixture extends BeforeAndAfter { self: Suite => def withStorage[R](f: BlockStore[IO] => BlockDagStorage[IO] => IO[R]): R = { implicit val metrics = new MetricsNOP[IO]() implicit val log = Log.log[IO] - import coop.rchain.shared.RChainScheduler._ BlockDagStorageTestFixture.withStorageF[IO].use(Function.uncurried(f).tupled).unsafeRunSync } diff --git a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala index 74fb9cd2674..d99423905c4 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/RhoSpec.scala @@ -26,7 +26,6 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.{Duration, FiniteDuration} -import coop.rchain.shared.RChainScheduler._ class RhoSpec( testObject: CompiledRholangSource[_], @@ -145,7 +144,6 @@ class RhoSpec( RhoRuntime.createRuntime( _, BlockRandomSeed.nonNegativeMergeableTagName(shardId), - rholangEC, additionalSystemProcesses = testFrameworkContracts(testResultCollector) ) ) diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala index 0330efb715d..7c394a86aa4 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala @@ -384,7 +384,6 @@ object TestNode { implicit val log = Log.log[F] implicit val metricEff = new Metrics.MetricsNOP[F] implicit val spanEff = new NoopSpan[F] - import RChainScheduler._ for { newStorageDir <- Resources.copyStorage[F](storageDir) kvm <- Resource.eval(Resources.mkTestRNodeStoreManager(newStorageDir)) @@ -398,8 +397,7 @@ object TestNode { rSpaceStore, mStore, BlockRandomSeed.nonNegativeMergeableTagName(genesis.shardId), - RuntimeManager.noOpExecutionTracker[F], - rholangEC + RuntimeManager.noOpExecutionTracker[F] ) ) diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala index e0cf03c171e..08e2a347352 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/GrpcKademliaRPC.scala @@ -1,5 +1,6 @@ package coop.rchain.comm.discovery +import cats.effect.kernel.Resource import cats.effect.std.Dispatcher import cats.effect.{Async, Sync} import cats.syntax.all._ @@ -10,15 +11,14 @@ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.metrics.Metrics import coop.rchain.metrics.implicits.MetricsSyntaxConversion import coop.rchain.shared.syntax._ +import fs2.grpc.syntax.all._ import io.grpc._ import io.grpc.netty._ -import scala.concurrent.ExecutionContext import scala.concurrent.duration._ class GrpcKademliaRPC[F[_]: Async: RPConfAsk: Metrics]( networkId: String, - timeout: FiniteDuration, - grpcEC: ExecutionContext + timeout: FiniteDuration ) extends KademliaRPC[F] { implicit private val metricsSource: Metrics.Source = @@ -52,25 +52,17 @@ class GrpcKademliaRPC[F[_]: Async: RPConfAsk: Metrics]( } } yield peers - private def withClient[A](peer: PeerNode, timeout: FiniteDuration, enforce: Boolean = false)( + private def withClient[A](peer: PeerNode, timeout: FiniteDuration)( f: KademliaRPCServiceFs2Grpc[F, Metadata] => F[A] - ): F[A] = - for { - channel <- clientChannel(peer, timeout) - stub = KademliaRPCServiceFs2Grpc.stub(channel) - result <- f(stub) - _ <- Sync[F].delay(channel.shutdown()) - } yield result + ): F[A] = { + val channelResource = NettyChannelBuilder + .forAddress(peer.endpoint.host, peer.endpoint.udpPort) + .idleTimeout(timeout.toMillis, MILLISECONDS) + .usePlaintext() + .resource[F] - private def clientChannel(peer: PeerNode, timeout: FiniteDuration): F[ManagedChannel] = - for { - c <- Sync[F].delay { - NettyChannelBuilder - .forAddress(peer.endpoint.host, peer.endpoint.udpPort) - .idleTimeout(timeout.toMillis, MILLISECONDS) - .executor(grpcEC.execute) - .usePlaintext() - .build() - } - } yield c + val clientResource = channelResource.flatMap(x => KademliaRPCServiceFs2Grpc.stubResource(x)) + + clientResource.use(f) + } } diff --git a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala index 5e40ecd0c00..08446a38eed 100644 --- a/comm/src/main/scala/coop/rchain/comm/discovery/package.scala +++ b/comm/src/main/scala/coop/rchain/comm/discovery/package.scala @@ -1,35 +1,36 @@ package coop.rchain.comm -import cats.effect.{AsyncEffect, Resource, Sync} +import cats.effect.std.Dispatcher +import cats.effect.{Async, Resource, Sync} import com.google.protobuf.ByteString import coop.rchain.metrics.Metrics import coop.rchain.sdk.syntax.all._ import io.grpc import io.grpc.netty.NettyServerBuilder + import scala.concurrent.ExecutionContext package object discovery { val DiscoveryMetricsSource: Metrics.Source = Metrics.Source(CommMetricsSource, "discovery.kademlia") - def acquireKademliaRPCServer[F[_]: Sync: AsyncEffect]( + def acquireKademliaRPCServer[F[_]: Async]( networkId: String, port: Int, pingHandler: PeerNode => F[Unit], - lookupHandler: (PeerNode, Array[Byte]) => F[Seq[PeerNode]], - grpcEC: ExecutionContext - ): Resource[F, grpc.Server] = { - val server = NettyServerBuilder - .forPort(port) - .executor(grpcEC.execute) - .addService( - KademliaRPCServiceFs2Grpc - .bindService(new GrpcKademliaRPCServer(networkId, pingHandler, lookupHandler)) - ) - .build + lookupHandler: (PeerNode, Array[Byte]) => F[Seq[PeerNode]] + ): Resource[F, grpc.Server] = + Dispatcher[F].flatMap { d => + val server = NettyServerBuilder + .forPort(port) + .addService( + KademliaRPCServiceFs2Grpc + .bindService(d, new GrpcKademliaRPCServer(networkId, pingHandler, lookupHandler)) + ) + .build - Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.void())) - } + Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.void())) + } def toPeerNode(n: Node): PeerNode = PeerNode(NodeIdentifier(n.id.toByteArray), Endpoint(n.host.toStringUtf8, n.tcpPort, n.udpPort)) diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala index 1277b9ff5ff..293f3750d36 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportClient.scala @@ -1,8 +1,10 @@ package coop.rchain.comm.transport import cats.Applicative +import cats.effect.kernel.Resource +import cats.effect.std.Dispatcher import cats.effect.syntax.all._ -import cats.effect.{Async, ConcurrentEffect, Sync} +import cats.effect.{Async, Sync} import cats.syntax.all._ import coop.rchain.comm.CommError.{protocolException, CommErr} import coop.rchain.comm._ @@ -23,6 +25,7 @@ import scala.concurrent.ExecutionContext import scala.concurrent.duration.{FiniteDuration, _} import scala.util._ import cats.effect.{Deferred, Ref} +import fs2.grpc.client.ClientOptions /** * GRPC channel with a message buffer protecting it from resource exhaustion @@ -70,17 +73,19 @@ class GrpcTransportClient[F[_]: Async: Log: Metrics]( case Left(t) => t.raiseError[F, SslContext] } - private def createChannel(peer: PeerNode): F[BufferedGrpcStreamChannel[F]] = + private def createChannel( + peer: PeerNode, + d: Dispatcher[F] + ): F[BufferedGrpcStreamChannel[F]] = for { _ <- Log[F].info(s"Creating new channel to peer ${peer.toAddress}") clientSslContext <- clientSslContextTask grpcChannel = NettyChannelBuilder .forAddress(peer.endpoint.host, peer.endpoint.tcpPort) - .executor(ioEC) .maxInboundMessageSize(maxMessageSize) .negotiationType(NegotiationType.TLS) .sslContext(clientSslContext) - .intercept(new SslSessionClientInterceptor(networkId)) + .intercept(new SslSessionClientInterceptor[F](networkId, d)) .overrideAuthority(peer.id.toString) .build() buffer <- StreamObservable[F](peer, clientQueueSize, cache) @@ -95,7 +100,7 @@ class GrpcTransportClient[F[_]: Async: Log: Metrics]( channel = BufferedGrpcStreamChannel(grpcChannel, buffer, buferSubscriber.interruptWhen(sig)) } yield channel - private def getChannel(peer: PeerNode): F[BufferedGrpcStreamChannel[F]] = + private def getChannel(peer: PeerNode, d: Dispatcher[F]): F[BufferedGrpcStreamChannel[F]] = for { cDefNew <- Deferred[F, BufferedGrpcStreamChannel[F]] ret <- channelsMap.modify[(Deferred[F, BufferedGrpcStreamChannel[F]], Boolean)] { chMap => @@ -107,7 +112,7 @@ class GrpcTransportClient[F[_]: Async: Log: Metrics]( } } (cDef, newChannel) = ret - _ <- Applicative[F].whenA(newChannel)(createChannel(peer) >>= cDef.complete) + _ <- Applicative[F].whenA(newChannel)(createChannel(peer, d) >>= cDef.complete) c <- cDef.get // In case underlying gRPC transport is terminated - clean resources, // remove current record and try one more time @@ -115,7 +120,7 @@ class GrpcTransportClient[F[_]: Async: Log: Metrics]( Log[F].info( s"Channel to peer ${peer.toAddress} is terminated, removing from connections map" ) >> - channelsMap.update(_ - peer) >> getChannel(peer) + channelsMap.update(_ - peer) >> getChannel(peer, d) else c.pure[F] _ <- Sync[F] .start(r.buferSubscriber.compile.drain) @@ -129,32 +134,40 @@ class GrpcTransportClient[F[_]: Async: Log: Metrics]( private def withClient[A](peer: PeerNode, timeout: FiniteDuration)( request: TransportLayerFs2Grpc[F, Metadata] => F[CommErr[A]] - ): F[CommErr[A]] = - (for { - channel <- getChannel(peer) - co = CallOptions.DEFAULT.withDeadlineAfter(timeout.toMillis, MILLISECONDS) - stub = TransportLayerFs2Grpc.stub(channel.grpcTransport, co) - result <- request(stub) - } yield result).attempt.map(_.fold(e => Left(protocolException(e)), identity)) + ): F[CommErr[A]] = { + val co = CallOptions.DEFAULT.withDeadlineAfter(timeout.toMillis, MILLISECONDS) + Dispatcher[F].use { d => + (for { + channel <- getChannel(peer, d) + stub = TransportLayerFs2Grpc.stub( + d, + channel.grpcTransport, + ClientOptions.default.configureCallOptions(_ => co) + ) + result <- request(stub) + } yield result).attempt.map(_.fold(e => Left(protocolException(e)), identity)) + } + } def send(peer: PeerNode, msg: Protocol): F[CommErr[Unit]] = withClient(peer, DefaultSendTimeout)(GrpcTransport.send(_, peer, msg)) def broadcast(peers: Seq[PeerNode], msg: Protocol): F[Seq[CommErr[Unit]]] = Stream - .fromIterator(peers.iterator) + .fromIterator(peers.iterator, 1) .parEvalMapUnorderedProcBounded(send(_, msg)) .compile .to(Seq) - def stream(peers: Seq[PeerNode], blob: Blob): F[Unit] = + def stream(peers: Seq[PeerNode], blob: Blob): F[Unit] = Dispatcher[F].use { d => Stream - .fromIterator(peers.iterator) + .fromIterator(peers.iterator, 1) .parEvalMapUnorderedProcBounded { peer => - getChannel(peer).flatMap(_.buffer._1(blob)) + getChannel(peer, d).flatMap(_.buffer._1(blob)) } .compile .drain + } private def streamBlobFile( key: String, diff --git a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala index bf54308c4b8..eb63911cea2 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/GrpcTransportReceiver.scala @@ -75,7 +75,25 @@ object GrpcTransportReceiver { s"Inbound gRPC channel with ${peer.toAddress} closed because fiber has been cancelled." ) ) - } yield (tellBuffer.offer1 _, blobBuffer.offer1 _, stream) + } yield ( + (x: Send) => + tellBuffer + .trySend(x) + .flatMap( + _.leftTraverse( + _ => new Exception("Send channel is closed").raiseError[F, Boolean] + ).map(_.merge) + ), + (x: StreamMessage) => + blobBuffer + .trySend(x) + .flatMap( + _.leftTraverse( + _ => new Exception("Stream channel is closed").raiseError[F, Boolean] + ).map(_.merge) + ), + stream + ) for { bDefNew <- Deferred[F, MessageBuffers[F]] @@ -161,18 +179,21 @@ object GrpcTransportReceiver { ) } - import coop.rchain.shared.RChainScheduler.mainEC - val server = NettyServerBuilder - .forPort(port) - .executor(mainEC.execute) - .maxInboundMessageSize(maxMessageSize) - .sslContext(serverSslContext) - .addService(TransportLayerFs2Grpc.bindService(service)) - .intercept(new SslSessionServerInterceptor(networkId)) - .build - - val startF = Sync[F].delay(server.start()) - val stopF = Sync[F].delay(server.shutdown().awaitTermination()) - Resource.make(startF)(_ => stopF).map(_ => ()) + Dispatcher[F].flatMap { d => + val startF = Sync[F].delay( + NettyServerBuilder + .forPort(port) + .maxInboundMessageSize(maxMessageSize) + .sslContext(serverSslContext) + .addService(TransportLayerFs2Grpc.bindService(d, service)) + .intercept(new SslSessionServerInterceptor(networkId, d)) + .build + .start + ) + Resource + .make(startF)(server => Sync[F].delay(server.shutdown().awaitTermination())) + .map(_ => ()) + } + } } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/SslSessionClientInterceptor.scala b/comm/src/main/scala/coop/rchain/comm/transport/SslSessionClientInterceptor.scala index caa2c103b0c..92218580743 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/SslSessionClientInterceptor.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/SslSessionClientInterceptor.scala @@ -1,32 +1,41 @@ package coop.rchain.comm.transport +import cats.effect.Async +import cats.effect.std.Dispatcher import coop.rchain.comm.protocol.routing.{Header => RHeader, _} import coop.rchain.comm.protocol.routing.TLResponse.Payload import coop.rchain.crypto.util.CertificateHelper import coop.rchain.shared.{Log, LogSource} - import io.grpc._ + import javax.net.ssl.SSLSession -class SslSessionClientInterceptor(networkID: String) extends ClientInterceptor { +class SslSessionClientInterceptor[F[_]: Async](networkID: String, d: Dispatcher[F]) + extends ClientInterceptor { def interceptCall[ReqT, RespT]( method: MethodDescriptor[ReqT, RespT], callOptions: CallOptions, next: Channel ): ClientCall[ReqT, RespT] = - new SslSessionClientCallInterceptor(next.newCall(method, callOptions), networkID) + new SslSessionClientCallInterceptor[F, ReqT, RespT]( + next.newCall(method, callOptions), + networkID, + d + ) } /** * This wart exists because that's how grpc works */ @SuppressWarnings(Array("org.wartremover.warts.Var")) -class SslSessionClientCallInterceptor[ReqT, RespT](next: ClientCall[ReqT, RespT], networkID: String) - extends ClientCall[ReqT, RespT] { +class SslSessionClientCallInterceptor[F[_]: Async, ReqT, RespT]( + next: ClientCall[ReqT, RespT], + networkID: String, + d: Dispatcher[F] +) extends ClientCall[ReqT, RespT] { self => implicit private val logSource: LogSource = LogSource(this.getClass) - private val log = Log.logId def cancel(message: String, cause: Throwable): Unit = next.cancel(message, cause) def request(numMessages: Int): Unit = next.request(numMessages) @@ -59,7 +68,8 @@ class SslSessionClientCallInterceptor[ReqT, RespT](next: ClientCall[ReqT, RespT] self.getAttributes.get(Grpc.TRANSPORT_ATTR_SSL_SESSION) ) if (sslSession.isEmpty) { - log.warn("No TLS Session. Closing connection") + val logPure = Log.log[F].warn("No TLS Session. Closing connection") + d.unsafeRunSync(logPure) close(Status.UNAUTHENTICATED.withDescription("No TLS Session")) } else { sslSession.foreach { session => @@ -69,14 +79,17 @@ class SslSessionClientCallInterceptor[ReqT, RespT](next: ClientCall[ReqT, RespT] if (verified) next.onMessage(message) else { - log.warn("Certificate verification failed. Closing connection") + val logPure = + Log.log[F].warn("Certificate verification failed. Closing connection") + d.unsafeRunSync(logPure) close(Status.UNAUTHENTICATED.withDescription("Certificate verification failed")) } } } } else { - val nidStr = if (nid.isEmpty) "" else nid - log.warn(s"Wrong network id '$nidStr'. Closing connection") + val nidStr = if (nid.isEmpty) "" else nid + val logPure = Log.log[F].warn(s"Wrong network id '$nidStr'. Closing connection") + d.unsafeRunSync(logPure) close(Status.PERMISSION_DENIED.withDescription(s"Wrong network id '$nidStr'")) } @@ -84,7 +97,8 @@ class SslSessionClientCallInterceptor[ReqT, RespT](next: ClientCall[ReqT, RespT] next.onMessage(message) case TLResponse(_) => - log.warn(s"Malformed response $message") + val logPure = Log.log[F].warn(s"Malformed response $message") + d.unsafeRunSync(logPure) close(Status.INVALID_ARGUMENT.withDescription("Malformed message")) case _ => next.onMessage(message) } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/SslSessionServerInterceptor.scala b/comm/src/main/scala/coop/rchain/comm/transport/SslSessionServerInterceptor.scala index 8c7e0585a81..e547e78453e 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/SslSessionServerInterceptor.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/SslSessionServerInterceptor.scala @@ -1,18 +1,21 @@ package coop.rchain.comm.transport +import cats.effect.Async +import cats.effect.std.Dispatcher import coop.rchain.comm.protocol.routing.{Header => RHeader, _} import coop.rchain.comm.rp.ProtocolHelper import coop.rchain.crypto.util.CertificateHelper import coop.rchain.shared.{Log, LogSource} - import io.grpc._ + import javax.net.ssl.SSLSession /** * This wart exists because that's how gRPC works */ @SuppressWarnings(Array("org.wartremover.warts.Var")) -class SslSessionServerInterceptor(networkID: String) extends ServerInterceptor { +class SslSessionServerInterceptor[F[_]: Async](networkID: String, d: Dispatcher[F]) + extends ServerInterceptor { def interceptCall[ReqT, RespT]( call: ServerCall[ReqT, RespT], @@ -21,7 +24,6 @@ class SslSessionServerInterceptor(networkID: String) extends ServerInterceptor { ): ServerCall.Listener[ReqT] = new InterceptionListener(next.startCall(call, headers), call) implicit private val logSource: LogSource = LogSource(this.getClass) - private val log = Log.logId private class InterceptionListener[ReqT, RespT]( next: ServerCall.Listener[ReqT], @@ -42,16 +44,18 @@ class SslSessionServerInterceptor(networkID: String) extends ServerInterceptor { message match { case TLRequest(Protocol(RHeader(sender, nid), msg)) => if (nid == networkID) { - if (log.isTraceEnabled) { + if (d.unsafeRunSync(Log.log[F].isTraceEnabled)) { val peerNode = ProtocolHelper.toPeerNode(sender) val msgType = msg.getClass.toString - log.trace(s"Request [$msgType] from peer ${peerNode.toAddress}") + val logPure = Log.log[F].trace(s"Request [$msgType] from peer ${peerNode.toAddress}") + d.unsafeRunSync(logPure) } val sslSession: Option[SSLSession] = Option( call.getAttributes.get(Grpc.TRANSPORT_ATTR_SSL_SESSION) ) if (sslSession.isEmpty) { - log.warn("No TLS Session. Closing connection") + val logPure = Log.log[F].warn("No TLS Session. Closing connection") + d.unsafeRunSync(logPure) close(Status.UNAUTHENTICATED.withDescription("No TLS Session")) } else { sslSession.foreach { session => @@ -61,14 +65,17 @@ class SslSessionServerInterceptor(networkID: String) extends ServerInterceptor { if (verified) next.onMessage(message) else { - log.warn("Certificate verification failed. Closing connection") + val logPure = + Log.log[F].warn("Certificate verification failed. Closing connection") + d.unsafeRunSync(logPure) close(Status.UNAUTHENTICATED.withDescription("Certificate verification failed")) } } } } else { - val nidStr = if (nid.isEmpty) "" else nid - log.warn(s"Wrong network id '$nidStr'. Closing connection") + val nidStr = if (nid.isEmpty) "" else nid + val logPure = Log.log[F].warn(s"Wrong network id '$nidStr'. Closing connection") + d.unsafeRunSync(logPure) close( Status.PERMISSION_DENIED .withDescription( @@ -77,7 +84,8 @@ class SslSessionServerInterceptor(networkID: String) extends ServerInterceptor { ) } case TLRequest(_) => - log.warn(s"Malformed message $message") + val logPure = Log.log[F].warn(s"Malformed message $message") + d.unsafeRunSync(logPure) close(Status.INVALID_ARGUMENT.withDescription("Malformed message")) case _ => next.onMessage(message) } diff --git a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala index 767edb4e596..adaee888204 100644 --- a/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala +++ b/comm/src/main/scala/coop/rchain/comm/transport/StreamObservable.scala @@ -32,7 +32,15 @@ class StreamObservableClass[F[_]: Async: Log]( case Left(e) => Log[F].error(e.message) >> none.pure[F] } - def push(key: String): F[Boolean] = subject.offer1(StreamMsgId(key, blob.sender)) + def push(key: String): F[Boolean] = + subject + .trySend(StreamMsgId(key, blob.sender)) + .flatMap( + _.leftTraverse( + _ => new Exception(s"Channel is closed when trying to send.").raiseError[F, Boolean] + ).map(_.merge) + ) + def propose(key: String): F[Unit] = { val processError = Log[F].warn( s"Client stream message queue for $peer is full (${bufferSize} items). Dropping message.)" diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala b/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala index a783a6a42df..4b122e4ce84 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TcpTransportLayerSpec.scala @@ -1,13 +1,13 @@ package coop.rchain.comm.transport import cats.effect.{IO, Sync} -import cats.effect.concurrent.MVar +import cats.effect.std.PQueue +import cats.effect.unsafe.implicits.global import coop.rchain.comm._ import coop.rchain.comm.rp.Connect.RPConfAsk import coop.rchain.crypto.util.{CertificateHelper, CertificatePrinter} import coop.rchain.metrics.Metrics import coop.rchain.p2p.EffectsTestInstances._ -import coop.rchain.shared.RChainScheduler._ import coop.rchain.shared.{Base16, Log} import cats.effect.{Deferred, Ref} @@ -49,7 +49,7 @@ class TcpTransportLayerSpec extends TransportLayerSpec[IO, TcpTlsEnvironment] { def extract[A](fa: IO[A]): A = fa.unsafeRunSync def createDispatcherCallback: IO[DispatcherCallback[IO]] = - MVar.empty[IO, Unit].map(new DispatcherCallback(_)) + PQueue.bounded[IO, Unit](1).map(new DispatcherCallback(_)) def createTransportLayerServer(env: TcpTlsEnvironment): IO[TransportLayerServer[IO]] = IO.delay { diff --git a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala index 81d26b74752..06c8c6a7180 100644 --- a/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala +++ b/comm/src/test/scala/coop/rchain/comm/transport/TransportLayerRuntime.scala @@ -1,8 +1,7 @@ package coop.rchain.comm.transport import cats._ -import cats.effect.concurrent.MVar2 -import cats.effect.Sync +import cats.effect.{Async, Sync, Temporal} import cats.syntax.all._ import coop.rchain.catscontrib.ski._ import coop.rchain.comm.CommError.CommErr @@ -14,9 +13,9 @@ import java.net.ServerSocket import scala.collection.mutable import scala.concurrent.duration._ import scala.util.{Try, Using} -import cats.effect.Temporal +import cats.effect.std.PQueue -abstract class TransportLayerRuntime[F[_]: Sync: Temporal, E <: Environment] { +abstract class TransportLayerRuntime[F[_]: Async, E <: Environment] { val networkId = "test" @@ -229,8 +228,8 @@ trait Environment { def port: Int } -final class DispatcherCallback[F[_]: Functor](state: MVar2[F, Unit]) { - def notifyThatDispatched(): F[Unit] = state.tryPut(()).void +final class DispatcherCallback[F[_]: Functor](state: PQueue[F, Unit]) { + def notifyThatDispatched(): F[Unit] = state.tryOffer(()).void def waitUntilDispatched(): F[Unit] = state.take } diff --git a/node/src/main/scala/coop/rchain/node/Main.scala b/node/src/main/scala/coop/rchain/node/Main.scala index 25a5711994f..037e9143d80 100644 --- a/node/src/main/scala/coop/rchain/node/Main.scala +++ b/node/src/main/scala/coop/rchain/node/Main.scala @@ -1,44 +1,44 @@ package coop.rchain.node -import cats.effect.IO +import cats.effect.{ExitCode, IO, IOApp} import coop.rchain.node.configuration._ import coop.rchain.node.effects._ import coop.rchain.node.runtime.NodeMain import coop.rchain.shared._ -import monix.execution.Scheduler import org.slf4j.LoggerFactory -object Main { +object Main extends IOApp { /** * Main entry point * @param args input args */ @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) - def main(args: Array[String]): Unit = { + override def run(args: List[String]): IO[ExitCode] = { // Catch-all for unhandled exceptions. Use only JDK and SLF4J. Thread.setDefaultUncaughtExceptionHandler((thread, ex) => { LoggerFactory.getLogger(getClass).error("Unhandled exception in thread " + thread.getName, ex) }) - import cats.effect.unsafe.implicits.global - implicit val console: ConsoleIO[IO] = NodeMain.consoleIO implicit val log: Log[IO] = effects.log // Ensure terminal is restored on exit sys.addShutdownHook { - console.close.unsafeRunSync + console.close.unsafeRunSync()(runtime) } // Parse CLI options val options = commandline.Options(args) - if (options.subcommand.contains(options.run)) - // Start the node - NodeMain.startNode[IO](options).unsafeRunSync - //or - else - // Execute CLI command - NodeMain.runCLI[IO](options).unsafeRunSync + val x = + if (options.subcommand.contains(options.run)) + // Start the node + NodeMain.startNode[IO](options) + //or + else + // Execute CLI command + NodeMain.runCLI[IO](options) + + x.map(_ => ExitCode.Success) } } diff --git a/node/src/main/scala/coop/rchain/node/api/package.scala b/node/src/main/scala/coop/rchain/node/api/package.scala index 7d33636f92b..3333ae6aedd 100644 --- a/node/src/main/scala/coop/rchain/node/api/package.scala +++ b/node/src/main/scala/coop/rchain/node/api/package.scala @@ -21,7 +21,6 @@ package object api { def acquireInternalServer[F[_]: Async]( host: String, port: Int, - grpcEC: ExecutionContext, replService: ReplFs2Grpc[F, Metadata], deployService: DeployServiceFs2Grpc[F, Metadata], proposeService: ProposeServiceFs2Grpc[F, Metadata], @@ -32,31 +31,30 @@ package object api { maxConnectionIdle: FiniteDuration, maxConnectionAge: FiniteDuration, maxConnectionAgeGrace: FiniteDuration - ): Resource[F, grpc.Server] = { - val server = NettyServerBuilder - .forAddress(new InetSocketAddress(host, port)) - .executor(grpcEC.execute) - .maxInboundMessageSize(maxMessageSize) - .addService(ReplFs2Grpc.bindService(replService)) - .addService(ProposeServiceFs2Grpc.bindService(proposeService)) - .addService(DeployServiceFs2Grpc.bindService(deployService)) - .keepAliveTime(keepAliveTime.length, keepAliveTime.unit) - .keepAliveTimeout(keepAliveTimeout.length, keepAliveTimeout.unit) - .permitKeepAliveTime(permitKeepAliveTime.length, permitKeepAliveTime.unit) - .maxConnectionIdle(maxConnectionIdle.length, maxConnectionIdle.unit) - .maxConnectionAge(maxConnectionAge.length, maxConnectionAge.unit) - .maxConnectionAgeGrace(maxConnectionAgeGrace.length, maxConnectionAgeGrace.unit) - .addService(ProtoReflectionService.newInstance()) - .compressorRegistry(null) - .build + ): Resource[F, grpc.Server] = + Dispatcher[F].flatMap { d => + val server = NettyServerBuilder + .forAddress(new InetSocketAddress(host, port)) + .maxInboundMessageSize(maxMessageSize) + .addService(ReplFs2Grpc.bindService(d, replService)) + .addService(ProposeServiceFs2Grpc.bindService(d, proposeService)) + .addService(DeployServiceFs2Grpc.bindService(d, deployService)) + .keepAliveTime(keepAliveTime.length, keepAliveTime.unit) + .keepAliveTimeout(keepAliveTimeout.length, keepAliveTimeout.unit) + .permitKeepAliveTime(permitKeepAliveTime.length, permitKeepAliveTime.unit) + .maxConnectionIdle(maxConnectionIdle.length, maxConnectionIdle.unit) + .maxConnectionAge(maxConnectionAge.length, maxConnectionAge.unit) + .maxConnectionAgeGrace(maxConnectionAgeGrace.length, maxConnectionAgeGrace.unit) + .addService(ProtoReflectionService.newInstance()) + .compressorRegistry(null) + .build - Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.awaitTermination())) - } + Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.awaitTermination())) + } - def acquireExternalServer[F[_]: Async: AsyncEffect: Log]( + def acquireExternalServer[F[_]: Async: Log]( host: String, port: Int, - grpcEC: ExecutionContext, deployGrpcService: DeployServiceFs2Grpc[F, Metadata], maxMessageSize: Int, keepAliveTime: FiniteDuration, @@ -65,22 +63,22 @@ package object api { maxConnectionIdle: FiniteDuration, maxConnectionAge: FiniteDuration, maxConnectionAgeGrace: FiniteDuration - ): Resource[F, grpc.Server] = { - val server = NettyServerBuilder - .forAddress(new InetSocketAddress(host, port)) - .executor(grpcEC.execute) - .maxInboundMessageSize(maxMessageSize) - .addService(DeployServiceFs2Grpc.bindService(deployGrpcService)) - .compressorRegistry(null) - .keepAliveTime(keepAliveTime.length, keepAliveTime.unit) - .keepAliveTimeout(keepAliveTimeout.length, keepAliveTimeout.unit) - .permitKeepAliveTime(permitKeepAliveTime.length, permitKeepAliveTime.unit) - .maxConnectionIdle(maxConnectionIdle.length, maxConnectionIdle.unit) - .maxConnectionAge(maxConnectionAge.length, maxConnectionAge.unit) - .maxConnectionAgeGrace(maxConnectionAgeGrace.length, maxConnectionAgeGrace.unit) - .addService(ProtoReflectionService.newInstance()) - .build + ): Resource[F, grpc.Server] = + Dispatcher[F].flatMap { d => + val server = NettyServerBuilder + .forAddress(new InetSocketAddress(host, port)) + .maxInboundMessageSize(maxMessageSize) + .addService(DeployServiceFs2Grpc.bindService(d, deployGrpcService)) + .compressorRegistry(null) + .keepAliveTime(keepAliveTime.length, keepAliveTime.unit) + .keepAliveTimeout(keepAliveTimeout.length, keepAliveTimeout.unit) + .permitKeepAliveTime(permitKeepAliveTime.length, permitKeepAliveTime.unit) + .maxConnectionIdle(maxConnectionIdle.length, maxConnectionIdle.unit) + .maxConnectionAge(maxConnectionAge.length, maxConnectionAge.unit) + .maxConnectionAgeGrace(maxConnectionAgeGrace.length, maxConnectionAgeGrace.unit) + .addService(ProtoReflectionService.newInstance()) + .build - Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.awaitTermination())) - } + Resource.make(Sync[F].delay(server.start))(s => Sync[F].delay(s.shutdown.awaitTermination())) + } } diff --git a/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala b/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala index ef591b34700..4fd805f8a5d 100644 --- a/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala +++ b/node/src/main/scala/coop/rchain/node/diagnostics/BatchInfluxDBReporter.scala @@ -1,47 +1,37 @@ package coop.rchain.node.diagnostics -import java.io.IOException -import java.util.concurrent.atomic.AtomicReference -import scala.concurrent.duration._ -import scala.util.Try -import coop.rchain.node.diagnostics.BatchInfluxDBReporter.Settings +import cats.effect.IO +import cats.effect.unsafe.implicits.global +import cats.implicits.catsSyntaxOptionId import com.typesafe.config.Config -import kamon.{Kamon, MetricReporter} +import coop.rchain.node.diagnostics.BatchInfluxDBReporter.Settings +import fs2.concurrent.Channel import kamon.metric._ import kamon.util.EnvironmentTagBuilder -import monix.eval.Task -import monix.execution.Cancelable -import monix.execution.Scheduler.Implicits.global -import monix.reactive.subjects._ +import kamon.{Kamon, MetricReporter} import okhttp3._ import org.slf4j.LoggerFactory +import java.io.IOException +import scala.concurrent.duration._ +import scala.util.Try + // TODO get rid of monix @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) class BatchInfluxDBReporter(config: Config = Kamon.config()) extends MetricReporter { private val logger = LoggerFactory.getLogger(classOf[BatchInfluxDBReporter]) @SuppressWarnings(Array("org.wartremover.warts.Var")) - private var settings = readSettings(config) - private val client = buildClient(settings) - private val subject = PublishSubject[String] - private val subscription = new AtomicReference(Option.empty[Cancelable]) - - override def start(): Unit = { - subscription.getAndSet(None).foreach(_.cancel()) - val s = - Some( - subject - .bufferTimed(settings.batchInterval) - .mapEval(postMetrics) - .subscribe() - ) - - if (!subscription.compareAndSet(None, s)) - s.get.cancel() - } - - override def stop(): Unit = - subscription.getAndSet(None).foreach(_.cancel()) + private var settings = readSettings(config) + private val client = buildClient(settings) + private val subject = Channel.unbounded[IO, Option[Seq[String]]].unsafeRunSync() + override def start(): Unit = + subject.stream.unNoneTerminate + .evalMap(postMetrics) + .compile + .drain + .unsafeRunSync() + + override def stop(): Unit = subject.send(None) // finish stream override def reconfigure(config: Config): Unit = { stop() @@ -77,41 +67,40 @@ class BatchInfluxDBReporter(config: Config = Kamon.config()) extends MetricRepor } override def reportPeriodSnapshot(snapshot: PeriodSnapshot): Unit = - subject.onNext(translateToLineProtocol(snapshot)) - - private def postMetrics(metrics: Seq[String]): Task[Unit] = - Task.create { (_, cb) => - val body = RequestBody.create(MediaType.parse("text/plain"), metrics.mkString) - val request = new Request.Builder() - .url(settings.url) - .post(body) - .build() - - client - .newCall(request) - .enqueue( - new Callback { - def onFailure(call: Call, e: IOException): Unit = { - logger.error("Failed to POST metrics to InfluxDB", e) - cb.onSuccess(()) - } + subject.send(Seq(translateToLineProtocol(snapshot)).some).unsafeRunSync() + + private def postMetrics(metrics: Seq[String]): IO[Unit] = + IO.async_ { + case cb => + val body = RequestBody.create(MediaType.parse("text/plain"), metrics.mkString) + val request = new Request.Builder() + .url(settings.url) + .post(body) + .build() + + client + .newCall(request) + .enqueue( + new Callback { + def onFailure(call: Call, e: IOException): Unit = { + logger.error("Failed to POST metrics to InfluxDB", e) + cb(Right(())) + } - def onResponse(call: Call, response: Response): Unit = { - if (response.isSuccessful) - logger.trace("Successfully sent metrics to InfluxDB") - else { - logger.error( - "Metrics POST to InfluxDB failed with status code [{}], response body: {}", - response.code(), - response.body().string() - ) + def onResponse(call: Call, response: Response): Unit = { + if (response.isSuccessful) + logger.trace("Successfully sent metrics to InfluxDB") + else { + logger.error( + "Metrics POST to InfluxDB failed with status code [{}], response body: {}", + response.code(), + response.body().string() + ) + } + cb(Right(())) } - cb.onSuccess(()) } - } - ) - - Cancelable.empty + ) } private def translateToLineProtocol(periodSnapshot: PeriodSnapshot): String = { diff --git a/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala b/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala index 0ae19c77850..c232e508870 100644 --- a/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala +++ b/node/src/main/scala/coop/rchain/node/effects/ReplClient.scala @@ -1,6 +1,8 @@ package coop.rchain.node.effects -import cats.effect.{AsyncEffect, Sync} +import cats.effect.Sync +import cats.effect.kernel.Async +import cats.effect.std.Dispatcher import cats.syntax.all._ import coop.rchain.node.model._ import io.grpc.netty.NettyChannelBuilder @@ -24,7 +26,7 @@ object ReplClient { def apply[F[_]](implicit ev: ReplClient[F]): ReplClient[F] = ev } -class GrpcReplClient[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessageSize: Int) +class GrpcReplClient[F[_]: Async](host: String, port: Int, maxMessageSize: Int) extends ReplClient[F] with Closeable { @@ -35,14 +37,15 @@ class GrpcReplClient[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessag .usePlaintext() .build - private val stub = ReplFs2Grpc.stub(channel) + private val stub = Dispatcher[F].map(ReplFs2Grpc.stub(_, channel)) def run(line: String): F[Either[Throwable, String]] = - stub - .run(CmdRequest(line), new Metadata()) - .map(_.output) - .attempt - .map(_.leftMap(processError)) + stub.use( + _.run(CmdRequest(line), new Metadata()) + .map(_.output) + .attempt + .map(_.leftMap(processError)) + ) def eval( fileNames: List[String], @@ -53,11 +56,12 @@ class GrpcReplClient[F[_]: Sync: AsyncEffect](host: String, port: Int, maxMessag def eval(fileName: String, printUnmatchedSendsOnly: Boolean): F[Either[Throwable, String]] = { val filePath = Paths.get(fileName) if (Files.exists(filePath)) - stub - .eval(EvalRequest(readContent(filePath), printUnmatchedSendsOnly), new Metadata()) - .map(_.output) - .attempt - .map(_.leftMap(processError)) + stub.use( + _.eval(EvalRequest(readContent(filePath), printUnmatchedSendsOnly), new Metadata()) + .map(_.output) + .attempt + .map(_.leftMap(processError)) + ) else Sync[F].delay(new FileNotFoundException("File not found").asLeft) } diff --git a/node/src/main/scala/coop/rchain/node/effects/package.scala b/node/src/main/scala/coop/rchain/node/effects/package.scala index e8764db741a..1cc0454e9c8 100644 --- a/node/src/main/scala/coop/rchain/node/effects/package.scala +++ b/node/src/main/scala/coop/rchain/node/effects/package.scala @@ -34,10 +34,9 @@ package object effects { def kademliaRPC[F[_]: Async: RPConfAsk: Metrics]( networkId: String, - timeout: FiniteDuration, - grpcEC: ExecutionContext + timeout: FiniteDuration ): KademliaRPC[F] = - new GrpcKademliaRPC(networkId, timeout, grpcEC) + new GrpcKademliaRPC(networkId, timeout) def transportClient[F[_]: Async: Parallel: Log: Metrics]( networkId: String, diff --git a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala index 06b102d6940..5e6b811b568 100644 --- a/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala +++ b/node/src/main/scala/coop/rchain/node/instances/ProposerInstance.scala @@ -1,7 +1,6 @@ package coop.rchain.node.instances import cats.effect.Async -import cats.effect.concurrent.MVar import cats.syntax.all._ import coop.rchain.casper.PrettyPrinter import coop.rchain.casper.blocks.proposer._ @@ -30,7 +29,7 @@ object ProposerInstance { // propose permit .eval(for { lock <- Semaphore[F](1) - trigger <- MVar[F].of(()) + trigger <- PQueue.bounded[F, Int](1) // initial position for propose trigger - inactive _ <- trigger.take } yield (lock, trigger)) @@ -44,7 +43,7 @@ object ProposerInstance { // if propose is in progress - resolve proposeID to ProposerEmpty result and stop here. // Cock the trigger, so propose is called again after the one that occupies the lock finishes. .evalFilter { v => - (trigger.tryPut(()) >> proposeIDDef.complete(ProposerResult.empty)) + (trigger.tryOffer(1) >> proposeIDDef.complete(ProposerResult.empty)) .unlessA(v) .as(v) } @@ -80,7 +79,7 @@ object ProposerInstance { _ <- trigger.tryTake.flatMap { case Some(_) => Deferred[F, ProposerResult] >>= { d => - proposeRequestsQueue.enqueue1(false, d) + proposeRequestsQueue.send(false, d).void } case None => ().pure[F] } diff --git a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala index bcb01539e76..baa65fc7408 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NetworkServers.scala @@ -30,11 +30,7 @@ import io.grpc.{Metadata, Server} import kamon.Kamon import kamon.system.SystemMetrics import kamon.zipkin.ZipkinReporter -import monix.execution.Scheduler import org.http4s.server -import coop.rchain.shared.RChainScheduler._ - -import scala.concurrent.ExecutionContext import scala.util.{Failure, Success} import cats.effect.Temporal @@ -55,25 +51,24 @@ object NetworkServers { adminWebApi: AdminWebApi[F], reportingRoutes: ReportingHttpRoutes[F], nodeConf: NodeConf, - kamonConf: Config, - grpcEC: ExecutionContext + kamonConf: Config ): Resource[F, Unit] = { val GrpcServices(deploySrv, proposeSrv, replSrv) = grpcServices val host = nodeConf.apiServer.host for { nodeAddress <- Resource.eval(RPConfAsk[F].ask.map(_.local.toAddress)) - intServer <- internalServer(nodeConf, replSrv, deploySrv, proposeSrv, grpcEC) + intServer <- internalServer(nodeConf, replSrv, deploySrv, proposeSrv) _ <- Resource.eval(Log[F].info(s"Internal API server started at $host:${intServer.getPort}.")) - extServer <- externalServer(nodeConf, deploySrv, grpcEC) + extServer <- externalServer(nodeConf, deploySrv) extServerMsg = s"External API server started at $host:${extServer.getPort}." _ <- Resource.eval(Log[F].info(extServerMsg)) _ <- protocolServer(nodeConf, routingMessageQueue) _ <- Resource.eval(Log[F].info(s"Listening for traffic on $nodeAddress.")) - discovery <- discoveryServer(nodeConf, grpcEC) + discovery <- discoveryServer(nodeConf) _ <- Resource.eval(Log[F].info(s"Kademlia RPC server started at $host:${discovery.getPort}.")) prometheusRep = new NewPrometheusReporter() @@ -94,13 +89,11 @@ object NetworkServers { nodeConf: NodeConf, replService: ReplFs2Grpc[F, Metadata], deployService: DeployServiceFs2Grpc[F, Metadata], - proposeService: ProposeServiceFs2Grpc[F, Metadata], - grpcEC: ExecutionContext + proposeService: ProposeServiceFs2Grpc[F, Metadata] ): Resource[F, Server] = api.acquireInternalServer[F]( nodeConf.apiServer.host, nodeConf.apiServer.portGrpcInternal, - grpcEC, replService, deployService, proposeService, @@ -115,13 +108,11 @@ object NetworkServers { def externalServer[F[_]: Async: Log]( nodeConf: NodeConf, - deployService: v1.DeployServiceFs2Grpc[F, Metadata], - grpcEC: ExecutionContext + deployService: v1.DeployServiceFs2Grpc[F, Metadata] ): Resource[F, Server] = api.acquireExternalServer[F]( nodeConf.apiServer.host, nodeConf.apiServer.portGrpcExternal, - grpcEC, deployService, nodeConf.apiServer.grpcMaxRecvMessageSize.toInt, nodeConf.apiServer.keepAliveTime, @@ -152,16 +143,14 @@ object NetworkServers { ) } - def discoveryServer[F[_]: Async: AsyncEffect: KademliaStore: Log: Metrics]( - nodeConf: NodeConf, - grpcEC: ExecutionContext + def discoveryServer[F[_]: Async: KademliaStore: Log: Metrics]( + nodeConf: NodeConf ): Resource[F, Server] = discovery.acquireKademliaRPCServer( nodeConf.protocolServer.networkId, nodeConf.peersDiscovery.port, KademliaHandleRPC.handlePing[F], - KademliaHandleRPC.handleLookup[F], - grpcEC + KademliaHandleRPC.handleLookup[F] ) def webApiServer[F[_]: Async: NodeDiscovery: ConnectionsCell: RPConfAsk: Log]( diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala index d8e59700f06..2958d1e2ad8 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeCallCtx.scala @@ -1,8 +1,7 @@ package coop.rchain.node.runtime import cats.data.ReaderT -import cats.effect.kernel.Async -import cats.effect.{CancelToken, ConcurrentEffect, ExitCase, Fiber, IO, SyncIO} +import cats.effect.Async import cats.~> import coop.rchain.node.diagnostics.Trace import coop.rchain.node.diagnostics.Trace.TraceId @@ -31,60 +30,60 @@ object NodeCallCtx { implicit val localEnvironment = cats.mtl.instances.all.localReader[F, NodeCallCtx] - /** - * Implementation for ConcurrentEffect for ReaderT cannot be constructed automatically so it's - * wired up here from existing [[Concurrent]] and [[ConcurrentEffect]] implementations. - * - * `runCancelable` and `runAsync` are newly provided. - */ - implicit val concurrentReaderNodeCallCtx = new ConcurrentEffect[ReaderNodeCallCtx] { - val c = Async[ReaderNodeCallCtx] - val t = ConcurrentEffect[F] - - // ConcurrentEffect - override def runCancelable[A](fa: ReaderNodeCallCtx[A])( - cb: Either[Throwable, A] => IO[Unit] - ): SyncIO[CancelToken[ReaderNodeCallCtx]] = - t.runCancelable(envToEff(fa))(cb).map(effToEnv(_)) - override def runAsync[A]( - fa: ReaderNodeCallCtx[A] - )(cb: Either[Throwable, A] => IO[Unit]): SyncIO[Unit] = - t.runAsync(envToEff(fa))(cb) - // Async - override def async[A](k: (Either[Throwable, A] => Unit) => Unit): ReaderNodeCallCtx[A] = - c.async_(k) - override def asyncF[A]( - k: (Either[Throwable, A] => Unit) => ReaderNodeCallCtx[Unit] - ): ReaderNodeCallCtx[A] = - c.asyncF(k) - // Concurrent - override def start[A]( - fa: ReaderNodeCallCtx[A] - ): ReaderNodeCallCtx[Fiber[ReaderNodeCallCtx, A]] = c.start(fa) - override def racePair[A, B]( - fa: ReaderNodeCallCtx[A], - fb: ReaderNodeCallCtx[B] - ): ReaderNodeCallCtx[ - Either[(A, Fiber[ReaderNodeCallCtx, B]), (Fiber[ReaderNodeCallCtx, A], B)] - ] = c.racePair(fa, fb) - override def suspend[A](thunk: => ReaderNodeCallCtx[A]): ReaderNodeCallCtx[A] = c.defer(thunk) - override def bracketCase[A, B](acquire: ReaderNodeCallCtx[A])(use: A => ReaderNodeCallCtx[B])( - release: (A, ExitCase[Throwable]) => ReaderNodeCallCtx[Unit] - ): ReaderNodeCallCtx[B] = c.bracketCase(acquire)(use)(release) - override def raiseError[A](e: Throwable): ReaderNodeCallCtx[A] = c.raiseError(e) - override def handleErrorWith[A]( - fa: ReaderNodeCallCtx[A] - )(f: Throwable => ReaderNodeCallCtx[A]): ReaderNodeCallCtx[A] = - c.handleErrorWith(fa)(f) - override def flatMap[A, B]( - fa: ReaderNodeCallCtx[A] - )(f: A => ReaderNodeCallCtx[B]): ReaderNodeCallCtx[B] = - c.flatMap(fa)(f) - override def tailRecM[A, B]( - a: A - )(f: A => ReaderNodeCallCtx[Either[A, B]]): ReaderNodeCallCtx[B] = - c.tailRecM(a)(f) - override def pure[A](x: A): ReaderNodeCallCtx[A] = c.pure(x) - } +// /** +// * Implementation for ConcurrentEffect for ReaderT cannot be constructed automatically so it's +// * wired up here from existing [[Concurrent]] and [[ConcurrentEffect]] implementations. +// * +// * `runCancelable` and `runAsync` are newly provided. +// */ +// implicit val concurrentReaderNodeCallCtx = new ConcurrentEffect[ReaderNodeCallCtx] { +// val c = Async[ReaderNodeCallCtx] +// val t = ConcurrentEffect[F] +// +// // ConcurrentEffect +// override def runCancelable[A](fa: ReaderNodeCallCtx[A])( +// cb: Either[Throwable, A] => IO[Unit] +// ): SyncIO[CancelToken[ReaderNodeCallCtx]] = +// t.runCancelable(envToEff(fa))(cb).map(effToEnv(_)) +// override def runAsync[A]( +// fa: ReaderNodeCallCtx[A] +// )(cb: Either[Throwable, A] => IO[Unit]): SyncIO[Unit] = +// t.runAsync(envToEff(fa))(cb) +// // Async +// override def async[A](k: (Either[Throwable, A] => Unit) => Unit): ReaderNodeCallCtx[A] = +// c.async_(k) +// override def asyncF[A]( +// k: (Either[Throwable, A] => Unit) => ReaderNodeCallCtx[Unit] +// ): ReaderNodeCallCtx[A] = +// c.asyncF(k) +// // Concurrent +// override def start[A]( +// fa: ReaderNodeCallCtx[A] +// ): ReaderNodeCallCtx[Fiber[ReaderNodeCallCtx, A]] = c.start(fa) +// override def racePair[A, B]( +// fa: ReaderNodeCallCtx[A], +// fb: ReaderNodeCallCtx[B] +// ): ReaderNodeCallCtx[ +// Either[(A, Fiber[ReaderNodeCallCtx, B]), (Fiber[ReaderNodeCallCtx, A], B)] +// ] = c.racePair(fa, fb) +// override def suspend[A](thunk: => ReaderNodeCallCtx[A]): ReaderNodeCallCtx[A] = c.defer(thunk) +// override def bracketCase[A, B](acquire: ReaderNodeCallCtx[A])(use: A => ReaderNodeCallCtx[B])( +// release: (A, ExitCase[Throwable]) => ReaderNodeCallCtx[Unit] +// ): ReaderNodeCallCtx[B] = c.bracketCase(acquire)(use)(release) +// override def raiseError[A](e: Throwable): ReaderNodeCallCtx[A] = c.raiseError(e) +// override def handleErrorWith[A]( +// fa: ReaderNodeCallCtx[A] +// )(f: Throwable => ReaderNodeCallCtx[A]): ReaderNodeCallCtx[A] = +// c.handleErrorWith(fa)(f) +// override def flatMap[A, B]( +// fa: ReaderNodeCallCtx[A] +// )(f: A => ReaderNodeCallCtx[B]): ReaderNodeCallCtx[B] = +// c.flatMap(fa)(f) +// override def tailRecM[A, B]( +// a: A +// )(f: A => ReaderNodeCallCtx[Either[A, B]]): ReaderNodeCallCtx[B] = +// c.tailRecM(a)(f) +// override def pure[A](x: A): ReaderNodeCallCtx[A] = c.pure(x) +// } } } diff --git a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala index 22ac63f93bd..7a9dd5a80dc 100644 --- a/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala +++ b/node/src/main/scala/coop/rchain/node/runtime/NodeRuntime.scala @@ -44,8 +44,7 @@ object NodeRuntime { * although they can be generated with cats.tagless @autoFunctorK macros but support is missing for IntelliJ. * https://github.com/typelevel/cats-tagless/issues/60 (Cheers, Marcin!!) */ - implicit val lg: Log[ReaderNodeCallCtx] = Log[F].mapK(effToEnv) - implicit val tm: Temporal[ReaderNodeCallCtx] = Temporal[F].mapK(effToEnv) + implicit val lg: Log[ReaderNodeCallCtx] = Log[F].mapK(effToEnv) for { id <- NodeEnvironment.create[F](nodeConf) @@ -139,8 +138,7 @@ class NodeRuntime[F[_]: Parallel: Async: LocalEnvironment: Log] private[node] ( implicit val (p, m) = (rpConfAsk, metrics) effects.kademliaRPC( nodeConf.protocolServer.networkId, - nodeConf.protocolClient.networkTimeout, - grpcEC + nodeConf.protocolClient.networkTimeout ) } @@ -208,8 +206,7 @@ class NodeRuntime[F[_]: Parallel: Async: LocalEnvironment: Log] private[node] ( adminWebApi, reportRoutes, nodeConf, - kamonConf, - grpcEC + kamonConf ) // Return node launch stream } yield nodeLaunch diff --git a/node/src/main/scala/coop/rchain/node/web/https4s/RouterFix.scala b/node/src/main/scala/coop/rchain/node/web/https4s/RouterFix.scala index ff427bd2771..9a8ec66b539 100644 --- a/node/src/main/scala/coop/rchain/node/web/https4s/RouterFix.scala +++ b/node/src/main/scala/coop/rchain/node/web/https4s/RouterFix.scala @@ -3,6 +3,7 @@ package coop.rchain.node.web.https4s import cats.data.Kleisli import cats.syntax.all._ import cats.{Functor, Monad} +import org.http4s.Uri.Path import org.http4s.server.Router import org.http4s.{HttpRoutes, Request} @@ -39,7 +40,7 @@ object RouterFix { else Kleisli { req => ( - if (toSegments(req.pathInfo).startsWith(prefixSegments)) + if (toSegments(req.pathInfo.renderString).startsWith(prefixSegments)) routes.local(translate(prefix)) <+> acc else acc @@ -52,8 +53,8 @@ object RouterFix { /** * Difference from original http4s [[Router]] is here, prefix is removed from request uri. */ - val path = req.uri.path.replaceAll(s"^$prefix", "") - req.withUri(req.uri.copy(path = path)) + val path = req.uri.path.renderString.replaceAll(s"^$prefix", "") + req.withUri(req.uri.copy(path = Path.unsafeFromString(path))) } private def toSegments(path: String): List[String] = diff --git a/node/src/main/scala/coop/rchain/node/web/package.scala b/node/src/main/scala/coop/rchain/node/web/package.scala index f5c6acbf1f4..c25d34be891 100644 --- a/node/src/main/scala/coop/rchain/node/web/package.scala +++ b/node/src/main/scala/coop/rchain/node/web/package.scala @@ -1,6 +1,6 @@ package coop.rchain.node -import cats.effect.{AsyncEffect, Resource, Sync} +import cats.effect.{Async, Resource, Sync, Temporal} import cats.syntax.all._ import coop.rchain.comm.discovery.NodeDiscovery import coop.rchain.comm.rp.Connect.{ConnectionsCell, RPConfAsk} @@ -9,23 +9,18 @@ import coop.rchain.node.diagnostics.NewPrometheusReporter import coop.rchain.node.web.ReportingRoutes.ReportingHttpRoutes import coop.rchain.node.web.https4s.RouterFix import coop.rchain.shared.Log -import monix.execution.Scheduler import org.http4s.HttpRoutes +import org.http4s.blaze.server.BlazeServerBuilder import org.http4s.implicits._ import org.http4s.server.Server -import org.http4s.server.blaze.BlazeServerBuilder import org.http4s.server.middleware.CORS import scala.concurrent.duration.{DurationInt, FiniteDuration} -import cats.effect.Temporal package object web { - // https://github.com/http4s/http4s/security/advisories/GHSA-52cf-226f-rhr6 - // val corsPolicy = CORS.policy.withAllowCredentials(false) // after http4s v0.22.x - def corsPolicy[F[_]: Sync](routes: HttpRoutes[F]) = - CORS(routes, CORS.DefaultCORSConfig.copy(allowCredentials = false)) + def corsPolicy = CORS.policy.withAllowCredentials(false) - def acquireHttpServer[F[_]: ContextShift: AsyncEffect: Temporal: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( + def acquireHttpServer[F[_]: Async: RPConfAsk: NodeDiscovery: ConnectionsCell: Log]( reporting: Boolean, host: String = "0.0.0.0", httpPort: Int, @@ -33,7 +28,7 @@ package object web { connectionIdleTimeout: FiniteDuration, webApi: WebApi[F], reportingRoutes: ReportingHttpRoutes[F] - ): Resource[F, Server[F]] = { + ): Resource[F, Server] = { val reportingRoutesOpt = if (reporting) reportingRoutes else HttpRoutes.empty val baseRoutes = Map( "/metrics" -> corsPolicy(NewPrometheusReporter.service[F](prometheusReporter)), @@ -51,8 +46,7 @@ package object web { Map.empty val allRoutes = baseRoutes ++ extraRoutes - import coop.rchain.shared.RChainScheduler._ - BlazeServerBuilder[F](mainEC) + BlazeServerBuilder[F] .bindHttp(httpPort, host) .withHttpApp(RouterFix(allRoutes.toList: _*).orNotFound) .withIdleTimeout(connectionIdleTimeout) @@ -60,21 +54,20 @@ package object web { .resource } - def acquireAdminHttpServer[F[_]: ContextShift: AsyncEffect: Temporal: Log]( + def acquireAdminHttpServer[F[_]: Async: Log]( host: String = "0.0.0.0", httpPort: Int, connectionIdleTimeout: FiniteDuration, webApi: WebApi[F], adminWebApiRoutes: AdminWebApi[F], reportingRoutes: ReportingHttpRoutes[F] - ): Resource[F, Server[F]] = { + ): Resource[F, Server] = { val baseRoutes = Map( "/api" -> corsPolicy(AdminWebApiRoutes.service[F](adminWebApiRoutes) <+> reportingRoutes), // Web API v1 (admin) with OpenAPI schema "/api/v1" -> corsPolicy(WebApiRoutesV1.createAdmin[F](webApi, adminWebApiRoutes)) ) - import coop.rchain.shared.RChainScheduler._ - BlazeServerBuilder[F](mainEC) + BlazeServerBuilder[F] .bindHttp(httpPort, host) .withHttpApp(RouterFix(baseRoutes.toList: _*).orNotFound) .withResponseHeaderTimeout(connectionIdleTimeout - 1.second) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index de9d60f5aca..768046c0fbd 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -9,11 +9,11 @@ object Dependencies { val catsMtlVersion = "0.7.1" val fs2Version = "3.6.1" val monixVersion = "3.4.0" - val http4sVersion = "0.21.24" - val endpointsVersion = "1.4.0" - val circeVersion = "0.13.0" + val http4sVersion = "0.23.6" + val endpointsVersion = "1.9.0" + val circeVersion = "0.14.3" val enumeratumVersion = "1.5.13" - val slf4jVersion = "1.7.30" + val slf4jVersion = "2.0.7" val kamonVersion = "1.1.6" // format: off @@ -23,6 +23,8 @@ object Dependencies { val catsLawsTest = "org.typelevel" %% "cats-laws" % catsVersion % "test" val catsLawsTestkitTest = "org.typelevel" %% "cats-testkit" % catsVersion % "test" val catsEffect = "org.typelevel" %% "cats-effect" % catsEffectVersion + val catsEffectStd = "org.typelevel" %% "cats-effect-std" % catsEffectVersion + val catsEffectKernel = "org.typelevel" %% "cats-effect-kernel" % catsEffectVersion val catsEffectLawsTest = "org.typelevel" %% "cats-effect-laws" % catsEffectVersion % "test" val catsMtl = "org.typelevel" %% "cats-mtl-core" % catsMtlVersion val catsMtlLawsTest = "org.typelevel" %% "cats-mtl-laws" % catsMtlVersion % "test" @@ -32,22 +34,22 @@ object Dependencies { val circeGeneric = "io.circe" %% "circe-generic" % circeVersion val circeGenericExtras = "io.circe" %% "circe-generic-extras" % circeVersion val circeLiteral = "io.circe" %% "circe-literal" % circeVersion - val circeParser = "io.circe" %% "circe-parser" % circeVersion + val circeParser = "io.circe" %% "circe-parser" % "0.14.1" val disciplineCore = "org.typelevel" %% "discipline-core" % "1.4.0" val enumeratum = "com.beachape" %% "enumeratum" % enumeratumVersion val endpoints = "org.endpoints4s" %% "algebra" % endpointsVersion - val endpointsAlgCirce = "org.endpoints4s" %% "algebra-circe" % endpointsVersion + val endpointsAlgCirce = "org.endpoints4s" %% "algebra-circe" % "2.3.0" val endpointsAlgJson = "org.endpoints4s" %% "algebra-json-schema" % endpointsVersion val endpointsGeneric = "org.endpoints4s" %% "json-schema-generic" % endpointsVersion - val endpointsCirce = "org.endpoints4s" %% "json-schema-circe" % endpointsVersion - val endpointsHttp4s = "org.endpoints4s" %% "http4s-server" % "6.0.0" - val endpointsOpenApi = "org.endpoints4s" %% "openapi" % "3.0.0" + val endpointsCirce = "org.endpoints4s" %% "json-schema-circe" % "2.3.0" + val endpointsHttp4s = "org.endpoints4s" %% "http4s-server" % "10.1.0" + val endpointsOpenApi = "org.endpoints4s" %% "openapi" % "4.3.0" val fs2Core = "co.fs2" %% "fs2-core" % fs2Version val fs2Io = "co.fs2" %% "fs2-io" % fs2Version val guava = "com.google.guava" % "guava" % "31.1-jre" val hasher = "com.roundeights" %% "hasher" % "1.2.0" - val http4sBlazeClient = "org.http4s" %% "http4s-blaze-client" % http4sVersion - val http4sBlazeServer = "org.http4s" %% "http4s-blaze-server" % http4sVersion + val http4sBlazeClient = "org.http4s" %% "http4s-blaze-client" % "0.23.14" + val http4sBlazeServer = "org.http4s" %% "http4s-blaze-server" % "0.23.14" val http4sCirce = "org.http4s" %% "http4s-circe" % http4sVersion val http4sDSL = "org.http4s" %% "http4s-dsl" % http4sVersion val jaxb = "javax.xml.bind" % "jaxb-api" % "2.3.1" @@ -76,7 +78,7 @@ object Dependencies { val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.9.4" val scalaUri = "io.lemonlabs" %% "scala-uri" % "3.0.0" val scalacheck = "org.scalacheck" %% "scalacheck" % "1.15.0" - val scalacheckShapeless = "com.github.alexarchambault" %% "scalacheck-shapeless_1.15" % "1.3.0" % "test" + val scalacheckShapeless = "com.github.alexarchambault" %% "scalacheck-shapeless_1.16" % "1.3.1" % "test" val scalactic = "org.scalactic" %% "scalactic" % "3.2.13" % "test" val scalapbCompiler = "com.thesamet.scalapb" %% "compilerplugin" % scalapb.compiler.Version.scalapbVersion val scalapbRuntime = "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf" @@ -84,10 +86,10 @@ object Dependencies { val scalapbRuntimegGrpc = "com.thesamet.scalapb" %% "scalapb-runtime-grpc" % scalapb.compiler.Version.scalapbVersion val grpcNetty = "io.grpc" % "grpc-netty" % scalapb.compiler.Version.grpcJavaVersion val grpcServices = "io.grpc" % "grpc-services" % scalapb.compiler.Version.grpcJavaVersion - val nettyBoringSsl = "io.netty" % "netty-tcnative-boringssl-static" % "2.0.59.Final" - val nettyTcnative = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier osClassifier - val nettyTcnativeLinux = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier "linux-x86_64" - val nettyTcnativeFedora = "io.netty" % "netty-tcnative" % "2.0.59.Final" classifier "linux-x86_64-fedora" + val nettyBoringSsl = "io.netty" % "netty-tcnative-boringssl-static" % "2.0.46.Final" + val nettyTcnative = "io.netty" % "netty-tcnative" % "2.0.46.Final" classifier osClassifier + val nettyTcnativeLinux = "io.netty" % "netty-tcnative" % "2.0.46.Final" classifier "linux-x86_64" + val nettyTcnativeFedora = "io.netty" % "netty-tcnative" % "2.0.46.Final" classifier "linux-x86_64-fedora" val scalaCompat = "org.scala-lang.modules" %% "scala-collection-compat" % "2.6.0" val scalatest = "org.scalatest" %% "scalatest" % "3.2.13" % "test" val scalatestPlus = "org.scalatestplus" %% "scalacheck-1-16" % "3.2.13.0" % "test" @@ -109,6 +111,8 @@ object Dependencies { catsCore, catsEffect, catsLawsTest, + catsEffectStd, + catsEffectKernel, fs2Core, fs2Io, guava, @@ -133,6 +137,10 @@ object Dependencies { "com.google.errorprone" % "error_prone_annotations" % "2.18.0", "io.perfmark" % "perfmark-api" % "0.23.0", "org.codehaus.mojo" % "animal-sniffer-annotations" % "1.19", + "org.http4s" %% "http4s-core" % "0.23.6", + "io.circe" %% "circe-jawn" % "0.14.1", + "io.circe" %% "circe-core" % "0.14.1", + "com.comcast" %% "ip4s-core" % "3.0.4", // Strange version conflict, it requires the same version but in square brackets (range?). // e.g. io.grpc:grpc-core:1.37.0 ([1.37.0] wanted) // https://stackoverflow.com/questions/59423185/strange-versions-conflict-in-sbt-strict-mode diff --git a/project/plugins.sbt b/project/plugins.sbt index 9bfdd232364..d617903aa88 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -18,7 +18,5 @@ addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.6") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.1.1") -// TODO replace with addSbtPlugin("org.typelevel" % "sbt-fs2-grpc" % "") -// when migrated top CE3 since latest fs2-grpc is not available for CE2 -addSbtPlugin("org.lyranthe.fs2-grpc" % "sbt-java-gen" % "0.11.2") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") +addSbtPlugin("org.typelevel" % "sbt-fs2-grpc" % "2.5.11") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala index e3032ac58d2..e02e6e803f6 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/accounting/package.scala @@ -3,9 +3,9 @@ package coop.rchain.rholang.interpreter import cats._ import cats.data._ import cats.effect.Sync +import cats.effect.kernel.{MonadCancel, Resource} import cats.syntax.all._ import cats.mtl._ - import coop.rchain.catscontrib.ski.kp import coop.rchain.rholang.interpreter.errors.OutOfPhlogistonsError import cats.effect.std.Semaphore @@ -37,19 +37,21 @@ package object accounting extends Costs { override def count: F[Long] = semaphore.count override def releaseN(n: Long): F[Unit] = semaphore.releaseN(n) override def tryAcquireN(n: Long): F[Boolean] = semaphore.tryAcquireN(n) - override def withPermit[A](t: F[A]): F[A] = semaphore.withPermit(t) + override def permit: Resource[F, Unit] = semaphore.permit + override def mapK[G[_]](f: F ~> G)(implicit G: MonadCancel[G, _]): Semaphore[G] = + semaphore.mapK(f) } def charge[F[_]: Monad]( amount: Cost )(implicit cost: _cost[F], error: _error[F]): F[Unit] = - cost.withPermit( + cost.permit.use { _ => cost.get.flatMap { c => if (c.value < 0) error.raiseError[Unit](OutOfPhlogistonsError) else cost.tell(Chain.one(amount)) >> cost.set(c - amount) } - ) >> error.ensure(cost.get)(OutOfPhlogistonsError)(_.value >= 0).void + } >> error.ensure(cost.get)(OutOfPhlogistonsError)(_.value >= 0).void // TODO: Remove global (dummy) implicit! implicit def noOpCostLog[M[_]: Applicative]: FunctorTell[M, Chain[Cost]] = @@ -78,8 +80,9 @@ package object accounting extends Costs { override def count: G[Long] = nt(C.count) override def releaseN(n: Long): G[Unit] = nt(C.releaseN(n)) override def tryAcquireN(n: Long): G[Boolean] = nt(C.tryAcquireN(n)) - override def withPermit[A](t: G[A]): G[A] = - Sync[G].bracket[Unit, A](acquire)(kp(t))(kp(release)) + override def permit: Resource[G, Unit] = Resource.make(().pure[G])(_ => ().pure[G]) + + override def mapK[K[_]](f: G ~> K)(implicit G: MonadCancel[K, _]): Semaphore[K] = this.mapK(f) } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala index b38d02505b6..2bf6a19c864 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/matcher/StreamT.scala @@ -2,14 +2,15 @@ package coop.rchain.rholang.interpreter.matcher import cats.mtl.lifting.MonadLayerControl import cats.{~>, Alternative, Applicative, Functor, FunctorFilter, Monad, MonadError, MonoidK} import cats.data.OptionT -import cats.effect.Sync +import cats.effect.kernel.{CancelScope, Poll} +import cats.effect.{Outcome, Ref, Sync} import coop.rchain.catscontrib.MonadTrans import coop.rchain.rholang.interpreter.matcher.StreamT.{SCons, SNil, Step} import scala.collection.immutable.Stream import scala.collection.immutable.Stream.Cons +import scala.concurrent.duration.FiniteDuration import scala.util.{Left, Right} -import cats.effect.Ref /** * Shamelessly transcribed minimal version of Gabriel Gonzalez's beginner-friendly ListT @@ -202,6 +203,23 @@ trait StreamTInstances2 { implicit val F = F0 implicit val M = M0 implicit val AL = AL0 + + // TODO + override def suspend[A](hint: Sync.Type)(thunk: => A): StreamT[F, A] = ??? + + override def rootCancelScope: CancelScope = ??? + + override def forceR[A, B](fa: StreamT[F, A])(fb: StreamT[F, B]): StreamT[F, B] = ??? + + override def uncancelable[A](body: Poll[StreamT[F, *]] => StreamT[F, A]): StreamT[F, A] = ??? + + override def canceled: StreamT[F, Unit] = ??? + + override def onCancel[A](fa: StreamT[F, A], fin: StreamT[F, Unit]): StreamT[F, A] = ??? + + override def monotonic: StreamT[F, FiniteDuration] = ??? + + override def realTime: StreamT[F, FiniteDuration] = ??? } } @@ -210,27 +228,26 @@ private trait StreamTSync[F[_]] extends Sync[StreamT[F, *]] with StreamTMonadErr implicit def M: Monad[StreamT[F, *]] implicit def AL: Alternative[StreamT[F, *]] - import cats.effect.ExitCase + import cats.effect.Resource.ExitCase def bracketCase[A, B](acquire: StreamT[F, A])(use: A => StreamT[F, B])( - release: (A, ExitCase[Throwable]) => StreamT[F, Unit] + release: (A, ExitCase) => StreamT[F, Unit] ): StreamT[F, B] = flatMap(StreamT.liftF(Ref.of[F, Boolean](false))) { ref => StreamT(F.flatMap(F.bracketCase[Step[F, A], Step[F, B]](acquire.next) { - case SNil() => F.pure(SNil()) - case SCons(head, tail) => { - AL.combineK(use(head), M.flatMap(tail)(use)).next - } + case SNil() => F.pure(SNil()) + case SCons(head, tail) => AL.combineK(use(head), M.flatMap(tail)(use)).next + } { case (SNil(), _) => F.pure(()) - case (SCons(head, _), ExitCase.Completed) => { - F.flatMap(release(head, ExitCase.Completed).next) { + case (SCons(head, _), Outcome.Succeeded(_)) => { + F.flatMap(release(head, ExitCase.Succeeded).next) { case SNil() => ref.set(true) case SCons(_, _) => F.unit } } case (SCons(head, _), ec) => { - F.map(release(head, ec).next)(_ => ()) + F.map(release(head, ExitCase.fromOutcome(ec)).next)(_ => ()) } }) { case s @ SCons(_, _) => F.map(ref.get)(b => if (b) SNil() else s) diff --git a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala index ed8c67f36aa..9a36a8ff070 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/Resources.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/Resources.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang import cats.Parallel -import cats.effect.ExitCase.Error +import cats.effect.kernel.Resource.ExitCase import cats.effect.{Async, Resource, Sync} import cats.syntax.all._ import com.typesafe.scalalogging.Logger @@ -14,7 +14,7 @@ import coop.rchain.rspace import coop.rchain.rspace.RSpace.RSpaceStore import coop.rchain.rspace.syntax.rspaceSyntaxKeyValueStoreManager import coop.rchain.rspace.{Match, RSpace} -import coop.rchain.shared.{Log, RChainScheduler} +import coop.rchain.shared.Log import coop.rchain.store.KeyValueStoreManager import monix.execution.Scheduler @@ -29,7 +29,7 @@ object Resources { Resource.makeCase(Sync[F].delay(Files.createTempDirectory(prefix)))( (path, exitCase) => Sync[F].delay(exitCase match { - case Error(ex) => + case ExitCase.Errored(ex) => logger .error( s"Exception thrown while using the tempDir '$path'. Temporary dir NOT deleted.", @@ -39,7 +39,7 @@ object Resources { }) ) - def mkRhoISpace[F[_]: Async: Parallel: ContextShift: KeyValueStoreManager: Metrics: Span: Log] + def mkRhoISpace[F[_]: Async: Parallel: KeyValueStoreManager: Metrics: Span: Log] : F[RhoISpace[F]] = { import coop.rchain.rholang.interpreter.storage._ @@ -48,21 +48,20 @@ object Resources { for { store <- KeyValueStoreManager[F].rSpaceStores space <- RSpace.create[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - store, - RChainScheduler.rholangEC + store ) } yield space } - def mkRuntime[F[_]: Async: Parallel: ContextShift: Metrics: Span: Log]( + def mkRuntime[F[_]: Async: Parallel: Metrics: Span: Log]( prefix: String ): Resource[F, RhoRuntime[F]] = mkTempDir(prefix) .evalMap(RholangCLI.mkRSpaceStoreManager[F](_)) .evalMap(_.rSpaceStores) - .evalMap(RhoRuntime.createRuntime(_, Par(), RChainScheduler.rholangEC)) + .evalMap(RhoRuntime.createRuntime(_, Par())) - def mkRuntimes[F[_]: Async: Parallel: ContextShift: Metrics: Span: Log]( + def mkRuntimes[F[_]: Async: Parallel: Metrics: Span: Log]( prefix: String, initRegistry: Boolean = false ): Resource[F, (RhoRuntime[F], ReplayRhoRuntime[F], RhoHistoryRepository[F])] = @@ -71,7 +70,7 @@ object Resources { .evalMap(_.rSpaceStores) .evalMap(createRuntimes(_, initRegistry = initRegistry)) - def createRuntimes[F[_]: Async: ContextShift: Parallel: Log: Metrics: Span]( + def createRuntimes[F[_]: Async: Parallel: Log: Metrics: Span]( stores: RSpaceStore[F], initRegistry: Boolean = false, additionalSystemProcesses: Seq[Definition[F]] = Seq.empty @@ -81,8 +80,7 @@ object Resources { for { hrstores <- RSpace .createWithReplay[F, Par, BindPattern, ListParWithRandom, TaggedContinuation]( - stores, - RChainScheduler.rholangEC + stores ) (space, replay) = hrstores runtimes <- RhoRuntime diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/StreamTSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/StreamTSpec.scala index dfab5059d31..5b8ad705cd8 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/StreamTSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/matcher/StreamTSpec.scala @@ -3,15 +3,15 @@ package coop.rchain.rholang.interpreter.matcher import cats.arrow.FunctionK import cats.data.{EitherT, WriterT} import cats.syntax.all._ -import cats.effect.laws.discipline.{BracketTests, SyncTests} +import cats.effect.laws._ import cats.laws.discipline.{AlternativeTests, MonadErrorTests, MonadTests} import cats.mtl.laws.discipline.MonadLayerControlTests -import cats.{~>, Eq, Monad} +import cats.{~>, effect, Eq, Eval, Monad} import coop.rchain.catscontrib.laws.discipline.MonadTransTests import coop.rchain.rholang.StackSafetySpec import coop.rchain.rholang.interpreter.matcher.StreamT.{SCons, Step} -import cats.Eval import cats.effect.Sync +import cats.effect.kernel.Sync.Type import org.scalacheck.{Arbitrary, Gen, Prop} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.funsuite.AnyFunSuite @@ -19,6 +19,7 @@ import org.scalatest.matchers.should.Matchers import cats.instances.AllInstances import cats.syntax.AllSyntax import coop.rchain.catscontrib.effect.implicits.sEval +import org.scalacheck.ScalacheckShapeless.arbitrarySingletonType class StreamTSpec extends AnyFlatSpec with Matchers { @@ -80,6 +81,8 @@ class StreamTSpec extends AnyFlatSpec with Matchers { case _ => n.pure[F].flatMap(x => hugeFlatMap[F](x - 1)) } + implicit val x: Monad[StreamT[Eval, *]] = + coop.rchain.rholang.interpreter.matcher.StreamT.streamTMonad[Eval] val huge = hugeFlatMap[StreamT[Eval, *]](maxDepth) assert(StreamT.run(huge).value == Stream(0)) @@ -151,7 +154,7 @@ class StreamTLawsSpec ) ) - implicit def eqEff[A: Eq]: Eq[Effect[A]] = Eq.by(x => x.value.attempt) + implicit def eqEff[A: Eq]: Eq[Effect[A]] = Eq.by(x => x.value.value.attempt) implicit def eqFA[A: Eq]: Eq[StreamTEffect[A]] = Eq.by(StreamT.run[Effect, A]) implicit def eqT: Eq[Throwable] = Eq.allEqual @@ -188,21 +191,27 @@ class StreamTLawsSpec checkProps(MonadErrorTests[StreamTEffect, Unit].monadError[Int, Int, String].props) } + implicit val a: Arbitrary[Sync.Type] = Arbitrary[Sync.Type]( + Gen.oneOf(Seq(Type.Delay, Type.Blocking, Type.InterruptibleMany, Type.InterruptibleOnce)) + ) + + implicit val b: StreamTEffect[Boolean] => Prop = ??? + test("StreamT.SyncLaws") { checkProps(SyncTests[StreamTEffect].sync[Int, Int, String].props) } - test("StreamT.SyncLaws.from") { - val fromEffect = - λ[Effect ~> StreamTEffect[*]]( - e => StreamT.liftF(e) - ) - checkProps( - BracketTests[StreamTEffect[*], Throwable] - .bracketTrans[Effect, Int, Int]( - fromEffect - ) - .props - ) - } +// test("StreamT.SyncLaws.from") { +// val fromEffect = +// λ[Effect ~> StreamTEffect[*]]( +// e => StreamT.liftF(e) +// ) +// checkProps( +// BracketTests[StreamTEffect[*], Throwable] +// .bracketTrans[Effect, Int, Int]( +// fromEffect +// ) +// .props +// ) +// } } trait LowPriorityDerivations { diff --git a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBench.scala b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBench.scala index 2b2aea0ad01..60af8da4d17 100644 --- a/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBench.scala +++ b/rspace-bench/src/test/scala/coop/rchain/rspace/bench/EvalBench.scala @@ -1,6 +1,6 @@ package coop.rchain.rspace.bench -import monix.eval.Task +import cats.effect.IO import monix.execution.schedulers.{CanBlock, TrampolineScheduler} import monix.execution.{ExecutionModel, Scheduler} import org.openjdk.jmh.annotations._ @@ -14,30 +14,30 @@ import scala.concurrent.duration.Duration //for example //java -jar target/scala-2.12/rspacebench_2.12-0.1.0-SNAPSHOT.jar EvalBench -i 10 -wi 5 -f 2 -t 2 class EvalBench { - - import EvalBench._ - - def createTest(state: EvalBenchStateBase): Task[Unit] = { - val par = state.term.getOrElse(throw new Error("Failed to prepare executable rholang term")) - state.runtime.inj(par)(state.rand) - } - - //if we run multiple tests on a single-threaded scheduler - //they will compete on single execution queue - //therefore this test always limited to one thread - @Benchmark - @Threads(1) - def reduceMVCEPPST(state: MVCEPPBenchState): Unit = { - val runTask = createTest(state).executeOn(state.singleThreadedScheduler, forceAsync = false) - runTask.runSyncUnsafe(Duration.Inf)(state.singleThreadedScheduler, CanBlock.permit) - } - - @Benchmark - def reduceMVCEPPMT(state: MVCEPPBenchState): Unit = { - implicit val scheduler: Scheduler = monix.execution.Scheduler.Implicits.global - val runTask = createTest(state) - Await.result(runTask.runToFuture, Duration.Inf) - } +// TODO enable back +// import EvalBench._ +// +// def createTest(state: EvalBenchStateBase): IO[Unit] = { +// val par = state.term.getOrElse(throw new Error("Failed to prepare executable rholang term")) +// state.runtime.inj(par)(state.rand) +// } +// +// //if we run multiple tests on a single-threaded scheduler +// //they will compete on single execution queue +// //therefore this test always limited to one thread +// @Benchmark +// @Threads(1) +// def reduceMVCEPPST(state: MVCEPPBenchState): Unit = { +// val runIO = createTest(state).executeOn(state.singleThreadedScheduler, forceAsync = false) +// runIO.runSyncUnsafe(Duration.Inf)(state.singleThreadedScheduler, CanBlock.permit) +// } +// +// @Benchmark +// def reduceMVCEPPMT(state: MVCEPPBenchState): Unit = { +// implicit val scheduler: Scheduler = monix.execution.Scheduler.Implicits.global +// val runIO = createTest(state) +// Await.result(runIO.runToFuture, Duration.Inf) +// } } object EvalBench { diff --git a/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala b/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala index 02b08e486da..21313d340d9 100644 --- a/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala +++ b/shared/src/main/scala/coop/rchain/catscontrib/effect/implicits/package.scala @@ -1,100 +1,56 @@ package coop.rchain.catscontrib.effect import cats._ +import cats.arrow.FunctionK import cats.effect._ +import cats.effect.kernel.CancelScope import cats.syntax.all._ +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.{FiniteDuration, MILLISECONDS} import scala.util.{Failure, Success, Try} import scala.util.control.NonFatal package object implicits { + // Sync typeclass implementation for cats.Eval datatype is required to use cats Eval for stack safe serialization of + // Rholang types. This replaces (as part of attempt to abstract from concrete effect type) + // monix.Сoeval that was used for this purpose before. + implicit val sEval = new Sync[Eval] { + override def suspend[A](hint: Sync.Type)(thunk: => A): Eval[A] = Eval.later(thunk) - // this is for testing purposes, do not use in production code! - implicit val concurrentId: Async[Id] = - new Concurrent[Id] { - override def start[A](fa: Id[A]): Id[Fiber[Id, A]] = ??? - override def racePair[A, B]( - fa: Id[A], - fb: Id[B] - ): Id[Either[(A, Fiber[Id, B]), (Fiber[Id, A], B)]] = ??? - override def async[A](k: (Either[Throwable, A] => Unit) => Unit): Id[A] = ??? - override def asyncF[A](k: (Either[Throwable, A] => Unit) => Id[Unit]): Id[A] = ??? - override def suspend[A](thunk: => Id[A]): Id[A] = syncId.defer(thunk) - override def bracketCase[A, B](acquire: Id[A])(use: A => Id[B])( - release: (A, ExitCase[Throwable]) => Id[Unit] - ): Id[B] = syncId.bracketCase(acquire)(use)(release) - override def flatMap[A, B](fa: Id[A])(f: A => Id[B]): Id[B] = syncId.flatMap(fa)(f) - override def tailRecM[A, B](a: A)(f: A => Id[Either[A, B]]): Id[B] = syncId.tailRecM(a)(f) - override def raiseError[A](e: Throwable): Id[A] = syncId.raiseError(e) - override def handleErrorWith[A](fa: Id[A])(f: Throwable => Id[A]): Id[A] = - syncId.handleErrorWith(fa)(f) - override def pure[A](x: A): Id[A] = syncId.pure(x) - } - - implicit val syncId: Sync[Id] = - new Sync[Id] { - def pure[A](x: A): cats.Id[A] = x - - def handleErrorWith[A](fa: cats.Id[A])(f: Throwable => cats.Id[A]): cats.Id[A] = - try { - fa - } catch { - case NonFatal(e) => f(e) - } - - @SuppressWarnings(Array("org.wartremover.warts.Throw")) - def raiseError[A](e: Throwable): cats.Id[A] = throw e - - def flatMap[A, B](fa: cats.Id[A])(f: A => cats.Id[B]): cats.Id[B] = - catsInstancesForId.flatMap(fa)(f) - - def tailRecM[A, B](a: A)(f: A => cats.Id[Either[A, B]]): cats.Id[B] = - catsInstancesForId.tailRecM(a)(f) - - @SuppressWarnings(Array("org.wartremover.warts.Throw")) - def bracketCase[A, B](acquire: A)(use: A => B)(release: (A, ExitCase[Throwable]) => Unit): B = - Try(use(acquire)) match { - case Success(result) => - release(acquire, ExitCase.Completed) - result + override def rootCancelScope: CancelScope = CancelScope.Cancelable - case Failure(e) => - release(acquire, ExitCase.error(e)) - throw e - } + override def forceR[A, B](fa: Eval[A])(fb: Eval[B]): Eval[B] = fa.flatMap(_ => fb) - def suspend[A](thunk: => A): A = thunk + override def uncancelable[A](body: Poll[Eval] => Eval[A]): Eval[A] = { + val poll: Poll[Eval] = FunctionK.id[Eval].asInstanceOf[Poll[Eval]] + body(poll) } - // Sync typeclass implementation for cats.Eval datatype is required to use cats Eval for stack safe serialization of - // Rholang types. This replaces (as part of attempt to abstract from concrete effect type) - // monix.Сoeval that was used for this purpose before. - implicit val sEval = new Sync[Eval] { - override def suspend[A](thunk: => Eval[A]): Eval[A] = Eval.defer(thunk) + override def canceled: Eval[Unit] = Eval.Unit - @SuppressWarnings(Array("org.wartremover.warts.Throw")) - override def bracketCase[A, B]( - acquire: Eval[A] - )(use: A => Eval[B])(release: (A, ExitCase[Throwable]) => Eval[Unit]): Eval[B] = - Try(use(acquire.value)) match { - case Success(result) => - release(acquire.value, ExitCase.Completed).flatMap(_ => result) - case Failure(e) => - release(acquire.value, ExitCase.error(e)).map(_ => throw e) - } + override def onCancel[A](fa: Eval[A], fin: Eval[Unit]): Eval[A] = fin.map(_.asInstanceOf[A]) override def flatMap[A, B](fa: Eval[A])(f: A => Eval[B]): Eval[B] = fa.flatMap(f) override def tailRecM[A, B](a: A)(f: A => Eval[Either[A, B]]): Eval[B] = a.tailRecM(f) + override def pure[A](x: A): Eval[A] = Eval.now(x) + + override def monotonic: Eval[FiniteDuration] = + Eval.always(FiniteDuration(java.lang.System.nanoTime, MILLISECONDS)) + + override def realTime: Eval[FiniteDuration] = + Eval.always(FiniteDuration(java.lang.System.currentTimeMillis, MILLISECONDS)) + @SuppressWarnings(Array("org.wartremover.warts.Throw")) override def raiseError[A](e: Throwable): Eval[A] = Eval.later(throw e) override def handleErrorWith[A](fa: Eval[A])(f: Throwable => Eval[A]): Eval[A] = try { Eval.now(fa.value) - } catch { case NonFatal(e) => f(e) } - - override def pure[A](x: A): Eval[A] = Eval.now(x) + } catch { + case NonFatal(e) => f(e) + } } } diff --git a/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala b/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala index a34b79052e7..bd09dadf0a8 100644 --- a/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala +++ b/shared/src/main/scala/coop/rchain/metrics/MetricsSemaphore.scala @@ -2,9 +2,10 @@ package coop.rchain.metrics import cats.effect._ import cats.syntax.all._ - import coop.rchain.catscontrib.ski.kp import cats.effect.std.Semaphore +import cats.~> +import coop.rchain.metrics.implicits.MetricsSyntaxConversion class MetricsSemaphore[F[_]: Sync: Metrics]( underlying: Semaphore[F] @@ -23,12 +24,14 @@ class MetricsSemaphore[F[_]: Sync: Metrics]( def tryAcquireN(n: Long): F[Boolean] = underlying.tryAcquireN(n) def releaseN(n: Long): F[Unit] = underlying.releaseN(n) - def withPermit[A](t: F[A]): F[A] = - for { - _ <- Metrics[F].incrementGauge("lock.permit") - result <- Sync[F].bracket(acquire)(kp(t))(kp(release)) - _ <- Metrics[F].decrementGauge("lock.permit") - } yield result + def withPermit[A](t: F[A]): F[A] = permit.use(_ => t) + + override def permit: Resource[F, Unit] = + Resource.make(Metrics[F].incrementGauge("lock.permit"))( + _ => Metrics[F].decrementGauge("lock.permit") + ) + + override def mapK[G[_]](f: F ~> G)(implicit G: MonadCancel[G, _]): Semaphore[G] = ??? } object MetricsSemaphore { From c35c70c5f10460e217238e1032952b98fc6817be Mon Sep 17 00:00:00 2001 From: stanislavlyalin Date: Sat, 8 Apr 2023 13:16:27 +0300 Subject: [PATCH 17/17] Removed LogicalTime class and usages --- .../HashSetCasperSpecification.scala | 1 + .../MultiParentCasperAddBlockSpec.scala | 3 --- .../MultiParentCasperCommunicationSpec.scala | 3 --- .../batch1/MultiParentCasperDeploySpec.scala | 1 - .../MultiParentCasperFinalizationSpec.scala | 3 --- .../batch1/MultiParentCasperMergeSpec.scala | 19 +++++-------------- .../MultiParentCasperReportingSpec.scala | 3 --- .../batch1/MultiParentCasperRholangSpec.scala | 3 --- .../batch1/MultiParentCasperSmokeSpec.scala | 3 --- .../batch2/LimitedParentDepthSpec.scala | 2 -- .../batch2/SingleParentCasperSpec.scala | 3 --- .../rchain/casper/batch2/ValidateTest.scala | 5 +---- .../engine/RunningHandleHasBlockSpec.scala | 9 +-------- .../casper/genesis/AuthKeyUpdateSpec.scala | 4 +--- .../genesis/PosMultiSigTransferSpec.scala | 4 +--- .../rchain/casper/genesis/PosUpdateSpec.scala | 2 -- .../casper/genesis/RegistryUpdateSpec.scala | 3 --- .../rchain/casper/helper/BlockGenerator.scala | 2 -- .../coop/rchain/casper/helper/TestNode.scala | 13 ++++--------- .../merging/MergeNumberChannelSpec.scala | 2 -- .../rchain/casper/merging/MergingCases.scala | 1 - .../casper/rholang/DeployerIdTest.scala | 2 -- .../casper/rholang/RuntimeManagerTest.scala | 1 - .../casper/util/comm/CommUtilSpec.scala | 3 +-- .../rchain/comm/rp/ClearConnectionsSpec.scala | 3 +-- .../coop/rchain/comm/rp/ConnectSpec.scala | 1 - .../rchain/comm/rp/FindAndConnectSpec.scala | 1 - .../rchain/p2p/EffectsTestInstances.scala | 18 ------------------ 28 files changed, 16 insertions(+), 102 deletions(-) diff --git a/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala b/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala index b0dcc02756f..b08fbd39533 100644 --- a/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala +++ b/casper/src/slowcooker/scala/coop.rchain.casper/HashSetCasperSpecification.scala @@ -60,6 +60,7 @@ object HashSetCasperActions { ConstructDeploy.sourceDeploy(s"new x in { x!(0) }", ts, shardId = "root") implicit class EffectOps[A](f: Effect[A]) { + import cats.effect.unsafe.implicits.global def result: A = f.unsafeRunSync } } diff --git a/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala index c59ba2ab111..4fc49fd44a9 100644 --- a/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/addblock/MultiParentCasperAddBlockSpec.scala @@ -14,7 +14,6 @@ import coop.rchain.comm.rp.ProtocolHelper.packet import coop.rchain.crypto.signatures.{Secp256k1, Signed} import coop.rchain.models.PCost import coop.rchain.models.syntax._ -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.scalatestcontrib._ import coop.rchain.shared.syntax._ @@ -31,7 +30,6 @@ class MultiParentCasperAddBlockSpec extends AnyFlatSpec with Matchers with Inspe import ValidBlock._ import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] implicit val s = Sync[IO] val genesis = buildGenesis() private val SHARD_ID = genesis.genesisBlock.shardId @@ -164,7 +162,6 @@ class MultiParentCasperAddBlockSpec extends AnyFlatSpec with Matchers with Inspe /* it should "reject unsigned blocks" in effectTest { TestNode.standaloneEff(genesis).use { node => - implicit val timeEff = new LogicalTime[Effect] for { basicDeployData <- ConstructDeploy.basicDeployData[Effect](0) diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala index 22288f270cb..0eccf9de226 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperCommunicationSpec.scala @@ -8,7 +8,6 @@ import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.protocol._ import coop.rchain.casper.util.ConstructDeploy import coop.rchain.crypto.signatures.Signed -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors @@ -18,8 +17,6 @@ class MultiParentCasperCommunicationSpec extends AnyFlatSpec with Matchers with import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - val genesis = buildGenesis() // TODO: this test is meaningless as a unit test diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala index e9261f205f7..1211040c060 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperDeploySpec.scala @@ -4,7 +4,6 @@ import coop.rchain.casper.blocks.proposer.{Created, NoNewDeploys} import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.helper.{BlockApiFixture, TestNode} import coop.rchain.casper.util.ConstructDeploy -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala index c64d428b165..5b4afacf31a 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperFinalizationSpec.scala @@ -5,7 +5,6 @@ import coop.rchain.casper.helper.TestNode import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.protocol.BlockMessage import coop.rchain.casper.util.ConstructDeploy -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors @@ -16,8 +15,6 @@ class MultiParentCasperFinalizationSpec extends AnyFlatSpec with Matchers with I import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - val genesis = buildGenesis( buildGenesisParametersFromBonds(List(10L, 10L, 10L, 10L)) ) diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala index c347334489e..331e64d4141 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperMergeSpec.scala @@ -1,9 +1,9 @@ package coop.rchain.casper.batch1 +import cats.effect.Sync import coop.rchain.casper.helper.TestNode import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.util.{ConstructDeploy, RSpaceUtil} -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors @@ -14,8 +14,6 @@ class MultiParentCasperMergeSpec extends AnyFlatSpec with Matchers with Inspecto import RSpaceUtil._ import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - val genesisParams = buildGenesisParametersSize(3) val genesis = buildGenesis(genesisParams) @@ -167,17 +165,10 @@ class MultiParentCasperMergeSpec extends AnyFlatSpec with Matchers with Inspecto it should "not merge blocks that touch the same channel involving joins" ignore effectTest { TestNode.networkEff(genesis, networkSize = 2).use { nodes => for { - current0 <- timeEff.currentMillis - deploy0 = ConstructDeploy.sourceDeploy( - "@1!(47)", - current0, - sec = ConstructDeploy.defaultSec2 - ) - current1 <- timeEff.currentMillis - deploy1 = ConstructDeploy.sourceDeploy( - "for(@x <- @1 & @y <- @2){ @1!(x) }", - current1 - ) + deploy0 <- Sync[Effect].delay( + ConstructDeploy.sourceDeploy("@1!(47)", 1L, sec = ConstructDeploy.defaultSec2) + ) + deploy1 = ConstructDeploy.sourceDeploy("for(@x <- @1 & @y <- @2){ @1!(x) }", 2L) deploy2 <- ConstructDeploy.basicDeployData[Effect](2) deploys = Vector( deploy0, diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala index cc504c09d32..4cd7957684d 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperReportingSpec.scala @@ -7,7 +7,6 @@ import coop.rchain.casper.rholang.Resources import coop.rchain.casper.util.ConstructDeploy import coop.rchain.casper.reporting.{ReportStore, ReportingCasper} import coop.rchain.models.{BindPattern, ListParWithRandom, Par, TaggedContinuation} -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rspace.ReportingRspace.ReportingComm import coop.rchain.shared.scalatestcontrib.effectTest import coop.rchain.store.InMemoryStoreManager @@ -20,8 +19,6 @@ class MultiParentCasperReportingSpec extends AnyFlatSpec with Matchers with Insp import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff: LogicalTime[Effect] = new LogicalTime[Effect] - val genesis: GenesisContext = buildGenesis() "ReportingCasper" should "behave the same way as MultiParentCasper" in effectTest { diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala index 0b19cc9d2d8..798b75c1263 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperRholangSpec.scala @@ -6,7 +6,6 @@ import coop.rchain.casper.protocol.BlockMessage import coop.rchain.casper.rholang.{BlockRandomSeed, RuntimeManager, Tools} import coop.rchain.casper.util.{ConstructDeploy, ProtoUtil, RSpaceUtil} import coop.rchain.crypto.signatures.Secp256k1 -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.models.syntax._ import coop.rchain.shared.Base16 @@ -20,8 +19,6 @@ class MultiParentCasperRholangSpec extends AnyFlatSpec with Matchers with Inspec import RSpaceUtil._ import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff: LogicalTime[Effect] = new LogicalTime[Effect] - val genesis = buildGenesis() "MultiParentCasper" should "create blocks based on deploys" in effectTest { diff --git a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala index ae515f1b1b3..40bad99553b 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch1/MultiParentCasperSmokeSpec.scala @@ -4,7 +4,6 @@ import cats.syntax.all._ import coop.rchain.casper.helper.TestNode import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.util.ConstructDeploy -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors @@ -14,8 +13,6 @@ class MultiParentCasperSmokeSpec extends AnyFlatSpec with Matchers with Inspecto import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - private val genesis = buildGenesis() it should "perform the most basic deploy successfully" in effectTest { diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala index f04097274d7..309f7237515 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/LimitedParentDepthSpec.scala @@ -6,14 +6,12 @@ import cats.syntax.traverse._ import coop.rchain.casper.helper.TestNode import coop.rchain.casper.util.ConstructDeploy.basicDeployData import coop.rchain.casper.util.GenesisBuilder.buildGenesis -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import monix.execution.Scheduler import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class LimitedParentDepthSpec extends AnyFlatSpec with Matchers { implicit val scheduler = Scheduler.fixedPool("limited-parent-depth-scheduler", 2) - implicit val timeEff = new LogicalTime[IO] val genesisContext = buildGenesis() diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala b/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala index 07f52a93bce..dc2b778cab3 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/SingleParentCasperSpec.scala @@ -7,7 +7,6 @@ import coop.rchain.casper.protocol.DeployData import coop.rchain.casper.util.ConstructDeploy import coop.rchain.casper.util.GenesisBuilder.buildGenesis import coop.rchain.crypto.signatures.Signed -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib.effectTest import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inspectors @@ -15,8 +14,6 @@ import org.scalatest.matchers.should.Matchers // TODO Reenable after new finalizer is implemented. class SingleParentCasperSpec extends AnyFlatSpec with Matchers with Inspectors { - implicit val timeEff = new LogicalTime[Effect] - val genesis = buildGenesis() "SingleParentCasper" should "create blocks with a single parent" ignore effectTest { diff --git a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala index 9279354e7a1..e87ba1073ad 100644 --- a/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/batch2/ValidateTest.scala @@ -51,10 +51,7 @@ class ValidateTest implicit val metrics: Metrics[IO] = new Metrics.MetricsNOP[IO]() implicit val s = Sync[IO] - override def beforeEach(): Unit = { - log.reset() - timeEff.reset() - } + override def beforeEach(): Unit = log.reset() def createChain[F[_]: Async: BlockStore: BlockDagStorage]( length: Int, diff --git a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala index 3e15cbe5b46..f8a735af0ac 100644 --- a/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/engine/RunningHandleHasBlockSpec.scala @@ -15,12 +15,7 @@ import coop.rchain.comm.rp.RPConf import coop.rchain.comm.{Endpoint, NodeIdentifier, PeerNode} import coop.rchain.metrics.Metrics import coop.rchain.models.BlockHash.BlockHash -import coop.rchain.p2p.EffectsTestInstances.{ - createRPConfAsk, - LogStub, - LogicalTime, - TransportLayerStub -} +import coop.rchain.p2p.EffectsTestInstances.{createRPConfAsk, LogStub, TransportLayerStub} import coop.rchain.shared.Log import org.scalatest._ import org.scalatest.funspec.AnyFunSpec @@ -40,7 +35,6 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with Ref.unsafe[IO, Connections](List(local)) implicit val transportLayer = new TransportLayerStub[IO] implicit val rpConf = createRPConfAsk[IO](local) - implicit val time = new LogicalTime[IO] implicit val commUtil = CommUtil.of[IO] implicit val blockRetriever = BlockRetriever.of[IO] @@ -59,7 +53,6 @@ class RunningHandleHasBlockSpec extends AnyFunSpec with BeforeAndAfterEach with override def beforeEach(): Unit = { transportLayer.reset() transportLayer.setResponses(alwaysSuccess) - time.reset() } describe("BlockRetriever") { diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala index 19260da1bcc..0953b038227 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/AuthKeyUpdateSpec.scala @@ -9,7 +9,6 @@ import coop.rchain.models.GDeployId import coop.rchain.models.rholang.RhoType.{RhoBoolean, RhoString, RhoTuple2} import coop.rchain.models.rholang.implicits._ import coop.rchain.models.syntax._ -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.shared.scalatestcontrib._ import org.scalatest.Inspectors @@ -22,8 +21,7 @@ class AuthKeyUpdateSpec extends AnyFlatSpec with Matchers with Inspectors { import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - private val shardId = "root" + private val shardId = "root" private val p1 = PrivateKey("fc743bd08a822d544bfbe05a5663fc325039a44c8f0c7fbea95a85517da5c36b".unsafeDecodeHex) private val pub1 = Secp256k1.toPublic(p1) diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala index 569df117604..118467f0544 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/PosMultiSigTransferSpec.scala @@ -7,7 +7,6 @@ import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.util.ConstructDeploy import coop.rchain.crypto.PrivateKey import coop.rchain.crypto.signatures.Secp256k1 -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import coop.rchain.models.syntax._ import coop.rchain.rholang.build.CompiledRholangTemplate @@ -20,8 +19,7 @@ class PosMultiSigTransferSpec extends AnyFlatSpec with Matchers with Inspectors import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - val shardId = "root" + val shardId = "root" val p1 = PrivateKey("fc743bd08a822d544bfbe05a5663fc325039a44c8f0c7fbea95a85517da5c36b".unsafeDecodeHex) val pub1 = Secp256k1.toPublic(p1) diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala index 1889352eec3..b9b598bf65b 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/PosUpdateSpec.scala @@ -9,7 +9,6 @@ import coop.rchain.models.GDeployId import coop.rchain.models.rholang.RhoType.{RhoBoolean, RhoNumber, RhoString, RhoTuple2} import coop.rchain.models.rholang.implicits._ import coop.rchain.models.syntax._ -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.util.RevAddress import coop.rchain.shared.scalatestcontrib._ import org.scalatest.Inside.inside @@ -23,7 +22,6 @@ class PosUpdateSpec extends AnyFlatSpec with Matchers with Inspectors { import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] private val shardId = "root" private val hexP1 = "fc743bd08a822d544bfbe05a5663fc325039a44c8f0c7fbea95a85517da5c36b" private val hexP2 = "6e88cf274735f3f7f73ec3d7f0362c439ab508427682b5bd788007aca665d810" diff --git a/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala b/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala index c411a9ee227..82438061f5d 100644 --- a/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/genesis/RegistryUpdateSpec.scala @@ -6,7 +6,6 @@ import coop.rchain.casper.helper.TestNode._ import coop.rchain.casper.util.ConstructDeploy import coop.rchain.crypto.PrivateKey import coop.rchain.crypto.signatures.Secp256k1 -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib._ import coop.rchain.models.syntax._ import coop.rchain.rholang.interpreter.util.RevAddress @@ -20,8 +19,6 @@ class RegistryUpdateSpec extends AnyFlatSpec with Matchers with Inspectors { import coop.rchain.casper.util.GenesisBuilder._ - implicit val timeEff = new LogicalTime[Effect] - it should "update the testLib right" in effectTest { val shardId = "root" val p1 = diff --git a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala index 15ce2dbbc41..f3fc423458a 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/BlockGenerator.scala @@ -22,7 +22,6 @@ import coop.rchain.models.Validator.Validator import coop.rchain.models.block.StateHash._ import coop.rchain.models.blockImplicits.getRandomBlock import coop.rchain.models.syntax._ -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.shared.syntax._ import coop.rchain.shared.{Log, LogSource} @@ -31,7 +30,6 @@ object BlockGenerator { private[this] val GenerateBlockMetricsSource = Metrics.Source(CasperMetricsSource, "generate-block") - implicit val timeEff = new LogicalTime[IO] implicit val logSource: LogSource = LogSource(this.getClass) // Dummy empty Casper snapshot diff --git a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala index 7c394a86aa4..57194d1b550 100644 --- a/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala +++ b/casper/src/test/scala/coop/rchain/casper/helper/TestNode.scala @@ -48,7 +48,6 @@ case class TestNode[F[_]: Async]( tls: TransportLayerServerTestImpl[F], genesis: BlockMessage, validatorIdOpt: Option[ValidatorIdentity], - logicalTime: LogicalTime[F], synchronyConstraintThreshold: Double, dataDir: Path, maxNumberOfParents: Int = Int.MaxValue, @@ -318,11 +317,10 @@ object TestNode { maxParentDepth: Option[Int], withReadOnlySize: Int ): Resource[F, IndexedSeq[TestNode[F]]] = { - val n = sks.length - val names = (1 to n).map(i => if (i <= (n - withReadOnlySize)) s"node-$i" else s"readOnly-$i") - val isReadOnly = (1 to n).map(i => if (i <= (n - withReadOnlySize)) false else true) - val peers = names.map(peerNode(_, 40400)) - val logicalTime = new LogicalTime[F] + val n = sks.length + val names = (1 to n).map(i => if (i <= (n - withReadOnlySize)) s"node-$i" else s"readOnly-$i") + val isReadOnly = (1 to n).map(i => if (i <= (n - withReadOnlySize)) false else true) + val peers = names.map(peerNode(_, 40400)) val nodesF = names @@ -338,7 +336,6 @@ object TestNode { genesis, sk, storageMatrixPath, - logicalTime, synchronyConstraintThreshold, maxNumberOfParents, maxParentDepth, @@ -373,7 +370,6 @@ object TestNode { genesis: BlockMessage, sk: PrivateKey, storageDir: Path, - logicalTime: LogicalTime[F], synchronyConstraintThreshold: Double, maxNumberOfParents: Int, maxParentDepth: Option[Int], @@ -465,7 +461,6 @@ object TestNode { tls, genesis, validatorId, - logicalTime, synchronyConstraintThreshold, newStorageDir, maxNumberOfParents, diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala index 858e51a3ba6..344189bb69c 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergeNumberChannelSpec.scala @@ -11,7 +11,6 @@ import coop.rchain.crypto.hash.Blake2b512Random import coop.rchain.metrics.Span import coop.rchain.models.Par import coop.rchain.models.rholang.RhoType.{RhoName, RhoNumber} -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.accounting.Cost import coop.rchain.rholang.interpreter.merging.RholangMergingLogic import coop.rchain.rholang.interpreter.merging.RholangMergingLogic.convertToReadNumber @@ -301,7 +300,6 @@ class MergeNumberChannelSpec extends AnyFlatSpec { } yield () } } - implicit val timeEff = new LogicalTime[IO] implicit val logEff = Log.log[IO] implicit val spanEff = Span.noop[IO] diff --git a/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala b/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala index bb1e3552cfb..6bf5bb44866 100644 --- a/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala +++ b/casper/src/test/scala/coop/rchain/casper/merging/MergingCases.scala @@ -8,7 +8,6 @@ import coop.rchain.casper.rholang.{BlockRandomSeed, Resources, RuntimeManager} import coop.rchain.casper.syntax._ import coop.rchain.casper.util.{ConstructDeploy, GenesisBuilder} import coop.rchain.models.syntax.modelsSyntaxByteString -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.rspace.merger.{EventLogIndex, EventLogMergingLogic} import coop.rchain.sdk.dag.merging.ConflictResolutionLogic diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala index 18d0d2f05cb..414f97aa196 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/DeployerIdTest.scala @@ -14,14 +14,12 @@ import coop.rchain.casper.syntax._ import coop.rchain.models.Expr.ExprInstance.GBool import coop.rchain.models.rholang.implicits._ import coop.rchain.models.{GDeployerId, Par} -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.shared.scalatestcontrib.effectTest import coop.rchain.shared.{Base16, Log} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class DeployerIdTest extends AnyFlatSpec with Matchers { - implicit val time = new LogicalTime[IO] implicit val log: Log[IO] = new Log.NOPLog[IO]() private val dummyMergeableName = BlockRandomSeed.nonNegativeMergeableTagName("dummy") diff --git a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala index b86a1da2c22..4a7797d9f28 100644 --- a/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala +++ b/casper/src/test/scala/coop/rchain/casper/rholang/RuntimeManagerTest.scala @@ -22,7 +22,6 @@ import coop.rchain.metrics import coop.rchain.metrics.{Metrics, NoopSpan, Span} import coop.rchain.models.PCost import coop.rchain.models.block.StateHash.StateHash -import coop.rchain.p2p.EffectsTestInstances.LogicalTime import coop.rchain.rholang.interpreter.SystemProcesses.BlockData import coop.rchain.rholang.interpreter.accounting.Cost import coop.rchain.rholang.interpreter.compiler.Compiler diff --git a/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala b/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala index 45036f351cd..58e9290407f 100644 --- a/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala +++ b/casper/src/test/scala/coop/rchain/casper/util/comm/CommUtilSpec.scala @@ -12,7 +12,7 @@ import coop.rchain.comm.rp.RPConf import coop.rchain.comm.{Endpoint, NodeIdentifier, PeerNode} import coop.rchain.metrics.Metrics.MetricsNOP import coop.rchain.models.BlockHash.BlockHash -import coop.rchain.p2p.EffectsTestInstances.{LogStub, LogicalTime, TransportLayerStub} +import coop.rchain.p2p.EffectsTestInstances.{LogStub, TransportLayerStub} import coop.rchain.shared._ import org.scalatest.BeforeAndAfterEach import org.scalatest.funspec.AnyFunSpec @@ -114,7 +114,6 @@ class CommUtilSpec extends AnyFunSpec with BeforeAndAfterEach with Matchers { implicit val transport = new TransportLayerStub[IO] implicit val askConf = new ConstApplicativeAsk[IO, RPConf](conf) implicit val log = new LogStub[IO] - implicit val time = new LogicalTime[IO] implicit val metrics = new MetricsNOP[IO] private def initRequestedBlocks( diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala index 92820e1c504..acc78027ff9 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ClearConnectionsSpec.scala @@ -9,7 +9,7 @@ import coop.rchain.comm._ import coop.rchain.comm.protocol.routing._ import coop.rchain.comm.rp.Connect._ import coop.rchain.metrics.Metrics -import coop.rchain.p2p.EffectsTestInstances.{LogicalTime, TransportLayerStub} +import coop.rchain.p2p.EffectsTestInstances.TransportLayerStub import coop.rchain.shared._ import org.scalatest._ import org.scalatest.funspec.AnyFunSpec @@ -31,7 +31,6 @@ class ClearConnectionsSpec implicit val transport = new TransportLayerStub[IO] implicit val log = new Log.NOPLog[IO] implicit val metric = new Metrics.MetricsNOP[IO] - implicit val time = new LogicalTime[IO] override def beforeEach(): Unit = { transport.reset() diff --git a/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala index 0ab6e5d0cfd..c11d737834e 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/ConnectSpec.scala @@ -27,7 +27,6 @@ class ConnectSpec extends AnyFunSpec with Matchers with BeforeAndAfterEach with type Effect[A] = CommErrT[IO, A] implicit val logEff = new Log.NOPLog[Effect] - implicit val timeEff = new LogicalTime[Effect] implicit val metricEff = new Metrics.MetricsNOP[Effect] implicit val nodeDiscoveryEff = new NodeDiscoveryStub[Effect]() implicit val transportLayerEff = new TransportLayerStub[Effect] diff --git a/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala b/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala index 3c8d7846335..6ff0fac119a 100644 --- a/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala +++ b/comm/src/test/scala/coop/rchain/comm/rp/FindAndConnectSpec.scala @@ -31,7 +31,6 @@ class FindAndConnectSpec val src: PeerNode = peer("src") val deftimeout: FiniteDuration = FiniteDuration(1, MILLISECONDS) implicit val log = new Log.NOPLog[IO] - implicit val time = new LogicalTime[Effect] implicit val metric = new Metrics.MetricsNOP[IO] implicit val nodeDiscovery = new NodeDiscoveryStub[Effect]() implicit val rpConf = conf(defaultTimeout = deftimeout) diff --git a/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala b/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala index 23b76c8e35c..b6c5461fc28 100644 --- a/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala +++ b/comm/src/test/scala/coop/rchain/p2p/EffectsTestInstances.scala @@ -19,24 +19,6 @@ object EffectsTestInstances { val networkId = "test" - class LogicalTime[F[_]: Sync] { - var clock: Long = 0 - - def currentMillis: F[Long] = Sync[F].delay { - this.clock = clock + 1 - clock - } - - def nanoTime: F[Long] = Sync[F].delay { - this.clock = clock + 1 - clock - } - - def sleep(duration: FiniteDuration): F[Unit] = Sync[F].delay(()) - - def reset(): Unit = this.clock = 0 - } - class NodeDiscoveryStub[F[_]: Sync]() extends NodeDiscovery[F] { var nodes: List[PeerNode] = List.empty[PeerNode]