diff --git a/integration-tests/test/test_propose.py b/integration-tests/test/test_propose.py index 62ed8476f32..2c1dbd80516 100644 --- a/integration-tests/test/test_propose.py +++ b/integration-tests/test/test_propose.py @@ -34,7 +34,7 @@ "contract_2.rho": 197, "contract_3.rho": 329, "contract_4.rho": 782, - "contract_5.rho": 3892, + "contract_5.rho": 3868, } @contextmanager diff --git a/models/src/main/scala/coop/rchain/models/package.scala b/models/src/main/scala/coop/rchain/models/package.scala index 427e641c8b4..2e98ae0791a 100644 --- a/models/src/main/scala/coop/rchain/models/package.scala +++ b/models/src/main/scala/coop/rchain/models/package.scala @@ -3,6 +3,7 @@ package coop.rchain import coop.rchain.models.ByteStringSyntax import coop.rchain.models.ByteArraySyntax import coop.rchain.models.StringSyntax +import coop.rchain.models.rholangn.parmanager.primitive.syntax.PrimitiveWriterSyntax package object models { // Importing syntax object means using all extensions in the project diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Basic.scala b/models/src/main/scala/coop/rchain/models/rholangn/Basic.scala new file mode 100644 index 00000000000..189a9b5ad21 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Basic.scala @@ -0,0 +1,162 @@ +package coop.rchain.models.rholangn + +object NilN extends BasicN + +/** * + * Rholang process + * + * For example, `@0!(1) | @2!(3) | for(x <- @0) { Nil }` has two sends + * and one receive. + */ +final class ParProcN(val ps: Seq[ParN]) extends BasicN { + def sortedPs: Seq[ParN] = parmanager.Manager.sortPars(ps) +} +object ParProcN { def apply(ps: Seq[ParN]): ParProcN = new ParProcN(ps) } + +/** * + * A send is written `chan!(data)` or `chan!!(data)` for a persistent send. + * Upon send, all free variables in data are substituted with their values. + */ +final class SendN(val chan: ParN, val data: Seq[ParN], val persistent: Boolean) extends BasicN +object SendN { + def apply(chan: ParN, data: Seq[ParN], persistent: Boolean): SendN = + new SendN(chan, data, persistent) + + def apply(chan: ParN, data: Seq[ParN]): SendN = + apply(chan, data, persistent = false) + + def apply(chan: ParN, data: ParN, persistent: Boolean): SendN = + apply(chan, Seq(data), persistent) + + def apply(chan: ParN, data: ParN): SendN = + apply(chan, Seq(data), persistent = false) +} + +/** * + * A receive is written `for(binds) { body }` + * i.e. `for(patterns <- source) { body }` + * or for a persistent recieve: `for(patterns <= source) { body }`. + * + * It's an error for free Variable to occur more than once in a pattern. + */ +final class ReceiveN( + val binds: Seq[ReceiveBindN], + val body: ParN, + val persistent: Boolean, + val peek: Boolean, + val bindCount: Int +) extends BasicN { + def sortedBinds: Seq[ReceiveBindN] = parmanager.Manager.sortBinds(binds) +} +object ReceiveN { + def apply( + binds: Seq[ReceiveBindN], + body: ParN, + persistent: Boolean, + peek: Boolean, + bindCount: Int + ): ReceiveN = + new ReceiveN(binds, body, persistent, peek, bindCount) + + def apply( + bind: ReceiveBindN, + body: ParN, + persistent: Boolean, + peek: Boolean, + bindCount: Int + ): ReceiveN = + apply(Seq(bind), body, persistent, peek, bindCount) + + def apply(binds: Seq[ReceiveBindN], body: ParN, bindCount: Int): ReceiveN = + apply(binds, body, persistent = false, peek = false, bindCount) + + def apply(bind: ReceiveBindN, body: ParN, bindCount: Int): ReceiveN = + apply(Seq(bind), body, bindCount) +} + +final class ReceiveBindN( + val patterns: Seq[ParN], + val source: ParN, + val remainder: Option[VarN], + val freeCount: Int +) extends AuxParN + +object ReceiveBindN { + def apply( + patterns: Seq[ParN], + source: ParN, + remainder: Option[VarN], + freeCount: Int + ): ReceiveBindN = new ReceiveBindN(patterns, source, remainder, freeCount) + + def apply(pattern: ParN, source: ParN, remainder: Option[VarN], freeCount: Int): ReceiveBindN = + apply(Seq(pattern), source, remainder, freeCount) + + def apply(patterns: Seq[ParN], source: ParN, freeCount: Int): ReceiveBindN = + new ReceiveBindN(patterns, source, None, freeCount) + + def apply(pattern: ParN, source: ParN, freeCount: Int): ReceiveBindN = + apply(Seq(pattern), source, freeCount) + + def apply(pattern: ParN, source: ParN): ReceiveBindN = + apply(Seq(pattern), source, 0) +} + +/** + * + */ +final class MatchN(val target: ParN, val cases: Seq[MatchCaseN]) extends BasicN + +object MatchN { + def apply(target: ParN, cases: Seq[MatchCaseN]): MatchN = new MatchN(target, cases) + def apply(target: ParN, mCase: MatchCaseN): MatchN = apply(target, Seq(mCase)) +} + +final class MatchCaseN(val pattern: ParN, val source: ParN, val freeCount: Int) extends AuxParN + +object MatchCaseN { + def apply(pattern: ParN, source: ParN, freeCount: Int = 0): MatchCaseN = + new MatchCaseN(pattern, source, freeCount) +} + +/** + * The new construct serves as a variable binder with scope Proc which producesan unforgeable process + * for each uniquely declared variable and substitutes these (quoted) processes for the variables. + * + * @param bindCount Total number of variables entered in p. This makes it easier to substitute or walk a term. + * @param p Rholang executable code inside New. + * For normalized form, p should not contain solely another new. + * Also for normalized form, the first use should be level+0, next use level+1 + * up to level+count for the last used variable. + * @param uri List of names Rho built-in processes listening on channels (e.g. `rho:io:stdout`). + * For normalization, uri-referenced variables come at the end, and in lexicographical order. + * @param injections List of injected uri-referenced variables (e.g. rho:rchain:deployId). + * Should be sort by key in lexicographical order. + */ +final class NewN( + val bindCount: Int, + val p: ParN, + val uri: Seq[String], + val injections: Map[String, ParN] +) extends BasicN { + def sortedUri: Seq[String] = parmanager.Manager.sortUris(uri) + def sortedInjections: Seq[(String, ParN)] = parmanager.Manager.sortInjections(injections) +} + +object NewN { + def apply( + bindCount: Int, + p: ParN, + uri: Seq[String], + injections: Map[String, ParN] + ): NewN = new NewN(bindCount, p, uri, injections) + + def apply( + bindCount: Int, + p: ParN, + uri: Seq[String], + injections: Seq[(String, ParN)] + ): NewN = new NewN(bindCount, p, uri, Map.from(injections)) + + def apply(bindCount: Int, p: ParN): NewN = new NewN(bindCount, p, Seq(), Map()) +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Bindings.scala b/models/src/main/scala/coop/rchain/models/rholangn/Bindings.scala new file mode 100644 index 00000000000..bd91b7dc1f8 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Bindings.scala @@ -0,0 +1,30 @@ +package coop.rchain.models.rholangn + +import coop.rchain.models._ + +object Bindings { + def toProto(p: ParN): Par = BindingsToProto.toProto(p) + def toProto(ps: Seq[ParN]): Seq[Par] = ps.map(toProto) + def toProtoVarOpt(p: VarN): Var = BindingsToProto.toVar(p) + def toProtoExpr(e: ExprN): Expr = BindingsToProto.toExpr(e) + def toProtoVar(v: VarN): Var = BindingsToProto.toVar(v) + def toProtoUnforgeable(u: UnforgeableN): GUnforgeable = BindingsToProto.toUnforgeable(u) + def toProtoConnective(c: ConnectiveN): Connective = BindingsToProto.toConnective(c) + def toProtoSend(x: SendN): Send = BindingsToProto.toSend(x) + def toProtoReceive(x: ReceiveN): Receive = BindingsToProto.toReceive(x) + def toProtoMatch(x: MatchN): Match = BindingsToProto.toMatch(x) + def toProtoNew(x: NewN): New = BindingsToProto.toNew(x) + + def fromProto(p: Par): ParN = BindingsFromProto.fromProto(p) + def fromProto(ps: Seq[Par]): Seq[ParN] = ps.map(fromProto) + def fromProtoVarOpt(p: Var): VarN = BindingsFromProto.fromVar(p) + def fromProtoExpr(e: Expr): ExprN = BindingsFromProto.fromExpr(e) + def fromProtoVar(v: Var): VarN = BindingsFromProto.fromVar(v) + def fromProtoUnforgeable(u: GUnforgeable): UnforgeableN = BindingsFromProto.fromUnforgeable(u) + def fromProtoConnective(c: Connective): ConnectiveN = BindingsFromProto.fromConnective(c) + def fromProtoSend(x: Send): SendN = BindingsFromProto.fromSend(x) + def fromProtoReceive(x: Receive): ReceiveN = BindingsFromProto.fromReceive(x) + def fromProtoMatch(x: Match): MatchN = BindingsFromProto.fromMatch(x) + def fromProtoNew(x: New): NewN = BindingsFromProto.fromNew(x) + +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/BindingsFromProto.scala b/models/src/main/scala/coop/rchain/models/rholangn/BindingsFromProto.scala new file mode 100644 index 00000000000..6e481a19639 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/BindingsFromProto.scala @@ -0,0 +1,426 @@ +package coop.rchain.models.rholangn + +import coop.rchain.models.Connective.ConnectiveInstance._ +import coop.rchain.models.Expr.ExprInstance._ +import coop.rchain.models.GUnforgeable.UnfInstance._ +import coop.rchain.models.Var.VarInstance._ +import coop.rchain.models._ +import scalapb.GeneratedMessage + +import scala.annotation.unused + +private[rholangn] object BindingsFromProto { + + def fromProto(p: Par): ParN = { + val ps = + Seq(p.sends, p.receives, p.news, p.exprs, p.matches, p.unforgeables, p.bundles, p.connectives) + .filter(_.nonEmpty) + .flatten + .map(fromProtoMessage) + ps match { + case Nil => NilN + case p :: Nil => p + case _ => ParProcN(ps) + } + } + + private def fromProtoMessage(m: GeneratedMessage): ParN = m match { + + /** Basic types */ + case x: Send => fromSend(x) + case x: Receive => fromReceive(x) + case x: Match => fromMatch(x) + case x: New => fromNew(x) + + /** Expressions */ + case e: Expr => fromExpr(e) + + /** Unforgeable names */ + case u: GUnforgeable => fromUnforgeable(u) + + /** Connective */ + case c: Connective => fromConnective(c) + + /** Other types */ + case x: Bundle => fromBundle(x) + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromExpr(e: Expr): ExprN = e.exprInstance match { + + /** Ground types */ + case x: GBool => fromGBool(x) + case x: GInt => fromGInt(x) + case x: GBigInt => fromGBigInt(x) + case x: GString => fromGString(x) + case x: GByteArray => fromGByteArray(x) + case x: GUri => fromGUri(x) + + /** Collections */ + case x: EListBody => fromEList(x.value) + case x: ETupleBody => fromETuple(x.value) + case x: ESetBody => fromParSet(x.value) + case x: EMapBody => fromParMap(x.value) + + /** Vars */ + case x: EVarBody => fromVar(x.value.v) + + /** Operations */ + case x: ENegBody => fromENeg(x.value) + case x: ENotBody => fromENot(x.value) + case x: EPlusBody => fromEPlus(x.value) + case x: EMinusBody => fromEMinus(x.value) + case x: EMultBody => fromEMult(x.value) + case x: EDivBody => fromEDiv(x.value) + case x: EModBody => fromEMod(x.value) + case x: ELtBody => fromELt(x.value) + case x: ELteBody => fromELte(x.value) + case x: EGtBody => fromEGt(x.value) + case x: EGteBody => fromEGte(x.value) + case x: EEqBody => fromEEq(x.value) + case x: ENeqBody => fromENeq(x.value) + case x: EAndBody => fromEAnd(x.value) + case x: EShortAndBody => fromEShortAnd(x.value) + case x: EOrBody => fromEOr(x.value) + case x: EShortOrBody => fromEShortOr(x.value) + case x: EPlusPlusBody => fromEPlusPlus(x.value) + case x: EMinusMinusBody => fromEMinusMinus(x.value) + case x: EPercentPercentBody => fromEPercentPercent(x.value) + case x: EMethodBody => fromEMethod(x.value) + case x: EMatchesBody => fromEMatches(x.value) + + case _ => throw new Exception("Unknown type for Expr conversion") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromVar(x: Var): VarN = x.varInstance match { + case n: BoundVar => fromBoundVar(n) + case n: FreeVar => fromFreeVar(n) + case _: Wildcard => WildcardN + case _ => throw new Exception("Unknown type for Var conversion") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromUnforgeable(u: GUnforgeable): UnforgeableN = + u.unfInstance match { + case x: GPrivateBody => fromPrivate(x.value) + case x: GDeployIdBody => fromDeployId(x.value) + case x: GDeployerIdBody => fromDeployerId(x.value) + case x: GSysAuthTokenBody => fromGSysAuthToken(x.value) + case _ => throw new Exception("Unknown type for GUnforgeable conversion") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromConnective(c: Connective): ConnectiveN = c.connectiveInstance match { + case _: ConnBool => ConnBoolN + case _: ConnInt => ConnIntN + case _: ConnBigInt => ConnBigIntN + case _: ConnString => ConnStringN + case _: ConnUri => ConnUriN + case _: ConnByteArray => ConnByteArrayN + case x: ConnNotBody => fromConnNotBody(x) + case x: ConnAndBody => fromConnAndBody(x) + case x: ConnOrBody => fromConnOrBody(x) + case x: VarRefBody => fromVarRefBody(x) + case _ => throw new Exception("Unknown type for Connective conversion") + } + + private def fromProto(ps: Seq[Par]): Seq[ParN] = ps.map(fromProto) + private def fromProto(varOpt: Option[Var]): Option[VarN] = varOpt.map(fromVar) + private def fromProtoKVPairs(ps: Seq[(Par, Par)]): Seq[(ParN, ParN)] = + ps.map { case (k, v) => (fromProto(k), fromProto(v)) } + private def fromProtoInjections(ps: Seq[(String, Par)]): Seq[(String, ParN)] = + ps.map { case (str, p) => (str, fromProto(p)) } + + /** Basic types */ + def fromSend(x: Send): SendN = { + val chan = fromProto(x.chan) + val data = fromProto(x.data) + val persistent = x.persistent + SendN(chan, data, persistent) + } + + def fromReceive(x: Receive): ReceiveN = { + val binds = x.binds.map(fromReceiveBind) + val body = fromProto(x.body) + val persistent = x.persistent + val peek = x.peek + val bindCount = x.bindCount + ReceiveN(binds, body, persistent, peek, bindCount) + } + + private def fromReceiveBind(x: ReceiveBind): ReceiveBindN = { + val patterns = fromProto(x.patterns) + val source = fromProto(x.source) + val remainder = fromProto(x.remainder) + val freeCount = x.freeCount + ReceiveBindN(patterns, source, remainder, freeCount) + } + + def fromMatch(x: Match): MatchN = { + val target = fromProto(x.target) + val cases = x.cases.map(fromMatchCase) + MatchN(target, cases) + } + + private def fromMatchCase(x: MatchCase): MatchCaseN = { + val pattern = fromProto(x.pattern) + val source = fromProto(x.source) + val freeCount = x.freeCount + MatchCaseN(pattern, source, freeCount) + } + + def fromNew(x: New): NewN = { + val bindCount = x.bindCount + val p = fromProto(x.p) + val uri = x.uri + val injections: Seq[(String, ParN)] = fromProtoInjections(x.injections.toSeq) + NewN(bindCount, p, uri, injections) + } + + /** Ground types */ + private def fromGBool(x: GBool): GBoolN = { + val v = x.value + GBoolN(v) + } + + private def fromGInt(x: GInt): GIntN = { + val v = x.value + GIntN(v) + } + + private def fromGBigInt(x: GBigInt): GBigIntN = { + val v = x.value + GBigIntN(v) + } + + private def fromGString(x: GString): GStringN = { + val v = x.value + GStringN(v) + } + + private def fromGByteArray(x: GByteArray): GByteArrayN = { + val v = x.value.toByteArray + GByteArrayN(v) + } + + private def fromGUri(x: GUri): GUriN = { + val v = x.value + GUriN(v) + } + + /** Collections */ + private def fromEList(x: EList): EListN = { + val ps = fromProto(x.ps) + val remainder = fromProto(x.remainder) + EListN(ps, remainder) + } + + private def fromETuple(x: ETuple): ETupleN = { + val ps = fromProto(x.ps) + ETupleN(ps) + } + + private def fromParSet(x: ParSet): ESetN = { + val ps = fromProto(x.ps.sortedPars) + val remainder = fromProto(x.remainder) + ESetN(ps, remainder) + } + + private def fromParMap(x: ParMap): EMapN = { + val ps = fromProtoKVPairs(x.ps.sortedList) + val remainder = fromProto(x.remainder) + EMapN(ps, remainder) + } + + /** Vars */ + private def fromBoundVar(x: BoundVar): BoundVarN = { + val idx = x.value + BoundVarN(idx) + } + + private def fromFreeVar(x: FreeVar): FreeVarN = { + val idx = x.value + FreeVarN(idx) + } + + /** Unforgeable names */ + private def fromPrivate(x: GPrivate): UPrivateN = { + val v = x.id.toByteArray + UPrivateN(v) + } + + private def fromDeployId(x: GDeployId): UDeployIdN = { + val v = x.sig.toByteArray + UDeployIdN(v) + } + + private def fromDeployerId(x: GDeployerId): UDeployerIdN = { + val v = x.publicKey.toByteArray + UDeployerIdN(v) + } + private def fromGSysAuthToken(@unused x: GSysAuthToken): USysAuthTokenN = + USysAuthTokenN() + + /** Operations */ + private def fromENeg(x: ENeg): ENegN = { + val p = fromProto(x.p) + ENegN(p) + } + + private def fromENot(x: ENot): ENotN = { + val p = fromProto(x.p) + ENotN(p) + } + + private def fromEPlus(x: EPlus): EPlusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EPlusN(p1, p2) + } + + private def fromEMinus(x: EMinus): EMinusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EMinusN(p1, p2) + } + + private def fromEMult(x: EMult): EMultN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EMultN(p1, p2) + } + + private def fromEDiv(x: EDiv): EDivN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EDivN(p1, p2) + } + + private def fromEMod(x: EMod): EModN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EModN(p1, p2) + } + + private def fromELt(x: ELt): ELtN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + ELtN(p1, p2) + } + + private def fromELte(x: ELte): ELteN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + ELteN(p1, p2) + } + + private def fromEGt(x: EGt): EGtN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EGtN(p1, p2) + } + + private def fromEGte(x: EGte): EGteN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EGteN(p1, p2) + } + + private def fromEEq(x: EEq): EEqN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EEqN(p1, p2) + } + + private def fromENeq(x: ENeq): ENeqN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + ENeqN(p1, p2) + } + + private def fromEAnd(x: EAnd): EAndN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EAndN(p1, p2) + } + + private def fromEShortAnd(x: EShortAnd): EShortAndN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EShortAndN(p1, p2) + } + + private def fromEOr(x: EOr): EOrN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EOrN(p1, p2) + } + + private def fromEShortOr(x: EShortOr): EShortOrN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EShortOrN(p1, p2) + } + + private def fromEPlusPlus(x: EPlusPlus): EPlusPlusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EPlusPlusN(p1, p2) + } + + private def fromEMinusMinus(x: EMinusMinus): EMinusMinusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EMinusMinusN(p1, p2) + } + + private def fromEPercentPercent(x: EPercentPercent): EPercentPercentN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EPercentPercentN(p1, p2) + } + + private def fromEMethod(x: EMethod): EMethodN = { + val methodName = x.methodName + val target = fromProto(x.target) + val arguments = fromProto(x.arguments) + EMethodN(methodName, target, arguments) + } + + private def fromEMatches(x: EMatches): EMatchesN = { + val target = fromProto(x.target) + val pattern = fromProto(x.pattern) + EMatchesN(target, pattern) + } + + /** Connective */ + private def fromConnNotBody(x: ConnNotBody): ConnNotN = { + val p = fromProto(x.value) + ConnNotN(p) + } + + private def fromConnAndBody(x: ConnAndBody): ConnAndN = { + val ps = fromProto(x.value.ps) + ConnAndN(ps) + } + + private def fromConnOrBody(x: ConnOrBody): ConnOrN = { + val ps = fromProto(x.value.ps) + ConnOrN(ps) + } + + private def fromVarRefBody(x: VarRefBody): ConnVarRefN = { + val index = x.value.index + val depth = x.value.depth + ConnVarRefN(index, depth) + } + + /** Other types */ + private def fromBundle(x: Bundle): BundleN = { + val body = fromProto(x.body) + val writeFlag = x.writeFlag + val readFlag = x.readFlag + BundleN(body, writeFlag, readFlag) + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/BindingsToProto.scala b/models/src/main/scala/coop/rchain/models/rholangn/BindingsToProto.scala new file mode 100644 index 00000000000..64a9e9ccd12 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/BindingsToProto.scala @@ -0,0 +1,444 @@ +package coop.rchain.models.rholangn + +import cats.Eval +import cats.effect.Sync +import com.google.protobuf.ByteString +import coop.rchain.catscontrib.effect.implicits.sEval +import coop.rchain.models.Connective.ConnectiveInstance._ +import coop.rchain.models.Expr.ExprInstance._ +import coop.rchain.models.Var.VarInstance._ +import coop.rchain.models.Var.WildcardMsg +import coop.rchain.models._ +import coop.rchain.models.rholang.implicits._ + +import scala.annotation.unused +import scala.collection.immutable.BitSet + +private[rholangn] object BindingsToProto { + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toProto(p: ParN): Par = p match { + + /** Basic types */ + case x: NilN.type => Par() + case x: ParProcN => toParProc(x) + case x: SendN => toSend(x) + case x: ReceiveN => toReceive(x) + case x: MatchN => toMatch(x) + case x: NewN => toNew(x) + + /** Expressions */ + case e: ExprN => toExpr(e) + + /** Unforgeable names */ + case u: UnforgeableN => toUnforgeable(u) + + /** Connective */ + case c: ConnectiveN => toConnective(c) + + /** Other types */ + case x: BundleN => toBundle(x) + + case _ => throw new Exception("Unknown type for toProto conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toExpr(e: ExprN): Expr = e match { + + /** Ground types */ + case x: GBoolN => toGBool(x) + case x: GIntN => toGInt(x) + case x: GBigIntN => toGBigInt(x) + case x: GStringN => toGString(x) + case x: GByteArrayN => toGByteArray(x) + case x: GUriN => toGUri(x) + + /** Collections */ + case x: EListN => toEList(x) + case x: ETupleN => toETuple(x) + case x: ESetN => toParSet(x) + case x: EMapN => toParMap(x) + + /** Vars */ + case v: VarN => EVar(toVar(v)) + + /** Operations */ + case x: ENegN => toENeg(x) + case x: ENotN => toENot(x) + case x: EPlusN => toEPlus(x) + case x: EMinusN => toEMinus(x) + case x: EMultN => toEMult(x) + case x: EDivN => toEDiv(x) + case x: EModN => toEMod(x) + case x: ELtN => toELt(x) + case x: ELteN => toELte(x) + case x: EGtN => toEGt(x) + case x: EGteN => toEGte(x) + case x: EEqN => toEEq(x) + case x: ENeqN => toENeq(x) + case x: EAndN => toEAnd(x) + case x: EShortAndN => toEShortAnd(x) + case x: EOrN => toEOr(x) + case x: EShortOrN => toEShortOr(x) + case x: EPlusPlusN => toEPlusPlus(x) + case x: EMinusMinusN => toEMinusMinus(x) + case x: EPercentPercentN => toEPercentPercent(x) + case x: EMethodN => toEMethod(x) + case x: EMatchesN => toEMatches(x) + + case _ => throw new Exception("Unknown type for Expression conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toVar(x: VarN): Var = x match { + case n: BoundVarN => toBoundVar(n) + case n: FreeVarN => toFreeVar(n) + case _: WildcardN.type => Wildcard(WildcardMsg()) + case _ => throw new Exception("Unknown type for Var conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toUnforgeable(u: UnforgeableN): GUnforgeable = u match { + case x: UPrivateN => toPrivate(x) + case x: UDeployIdN => toDeployId(x) + case x: UDeployerIdN => toDeployerId(x) + case x: USysAuthTokenN => toGSysAuthToken(x) + case _ => throw new Exception("Unknown type for Unforgeable conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toConnective(c: ConnectiveN): Connective = c match { + case _: ConnBoolN.type => Connective(ConnBool(true)) + case _: ConnIntN.type => Connective(ConnInt(true)) + case _: ConnBigIntN.type => Connective(ConnBigInt(true)) + case _: ConnStringN.type => Connective(ConnString(true)) + case _: ConnUriN.type => Connective(ConnUri(true)) + case _: ConnByteArrayN.type => Connective(ConnByteArray(true)) + case x: ConnNotN => Connective(toConnNotBody(x)) + case x: ConnAndN => Connective(toConnAndBody(x)) + case x: ConnOrN => Connective(toConnOrBody(x)) + case x: ConnVarRefN => Connective(toVarRefBody(x)) + case _ => throw new Exception("Unknown type for Connective conversation") + } + + private def toProto(ps: Seq[ParN]): Seq[Par] = ps.map(toProto) + private def toProto(varOpt: Option[VarN]): Option[Var] = varOpt.map(toVar) + private def toProtoKVPairs(ps: Seq[(ParN, ParN)]): Seq[(Par, Par)] = + ps.map { case (k, v) => (toProto(k), toProto(v)) } + private def toProtoInjections(injections: Seq[(String, ParN)]): Seq[(String, Par)] = + injections.map { case (str, p) => (str, toProto(p)) } + + /** Basic types */ + private def toParProc(x: ParProcN): Par = { + val p = x.ps.foldLeft(Par())((acc, pN) => acc ++ toProto(pN)) + p.withConnectiveUsed(x.connectiveUsed) + } + + def toSend(x: SendN): Send = { + val chan = toProto(x.chan) + val data = toProto(x.data) + val persistent = x.persistent + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + Send(chan, data, persistent, locallyFree, connectiveUsed) + } + + def toReceive(x: ReceiveN): Receive = { + val binds = x.binds.map(toReceiveBind) + val body = toProto(x.body) + val persistent = x.persistent + val peek = x.peek + val bindCount = x.bindCount + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + Receive(binds, body, persistent, peek, bindCount, locallyFree, connectiveUsed) + } + + private def toReceiveBind(x: ReceiveBindN): ReceiveBind = { + val patterns = toProto(x.patterns) + val source = toProto(x.source) + val remainder = toProto(x.remainder) + val freeCount = x.freeCount + ReceiveBind(patterns, source, remainder, freeCount) + } + + def toMatch(x: MatchN): Match = { + val target = toProto(x.target) + val cases = x.cases.map(toMatchCase) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + Match(target, cases, locallyFree, connectiveUsed) + } + + private def toMatchCase(x: MatchCaseN): MatchCase = { + val pattern = toProto(x.pattern) + val source = toProto(x.source) + val freeCount = x.freeCount + MatchCase(pattern, source, freeCount) + } + + def toNew(x: NewN): New = { + val bindCount = x.bindCount + val p = toProto(x.p) + val uri = x.uri + val injections: Map[String, Par] = Map.from(toProtoInjections(x.injections.toSeq)) + val locallyFree = BitSet() + New(bindCount, p, uri, injections, locallyFree) + } + + /** Ground types */ + private def toGBool(x: GBoolN): GBool = { + val v = x.v + GBool(v) + } + + private def toGInt(x: GIntN): GInt = { + val v = x.v + GInt(v) + } + + private def toGBigInt(x: GBigIntN): GBigInt = { + val v = x.v + GBigInt(v) + } + + private def toGString(x: GStringN): GString = { + val v = x.v + GString(v) + } + + private def toGByteArray(x: GByteArrayN): GByteArray = { + val v = ByteString.copyFrom(x.v) + GByteArray(v) + } + + private def toGUri(x: GUriN): GUri = { + val v = x.v + GUri(v) + } + + /** Collections */ + private def toEList(x: EListN): EList = { + val ps = toProto(x.ps) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + val remainder = toProto(x.remainder) + EList(ps, locallyFree, connectiveUsed, remainder) + } + + private def toETuple(x: ETupleN): ETuple = { + val ps = toProto(x.ps) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + ETuple(ps, locallyFree, connectiveUsed) + } + + private def toParSet(x: ESetN): ParSet = { + val ps = toProto(x.sortedPs) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + val remainder = toProto(x.remainder) + ParSet(ps, connectiveUsed, Sync[Eval].delay(locallyFree), remainder) + } + + private def toParMap(x: EMapN): ParMap = { + val ps = toProtoKVPairs(x.sortedPs) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + val remainder = toProto(x.remainder) + ParMap(ps, connectiveUsed, Sync[Eval].delay(locallyFree), remainder) + } + + /** Vars */ + private def toBoundVar(x: BoundVarN): BoundVar = { + val idx = x.idx + BoundVar(idx) + } + + private def toFreeVar(x: FreeVarN): FreeVar = { + val idx = x.idx + FreeVar(idx) + } + + /** Unforgeable names */ + private def toPrivate(x: UPrivateN): GPrivate = { + val v = ByteString.copyFrom(x.v) + GPrivate(v) + } + + private def toDeployId(x: UDeployIdN): GDeployId = { + val v = ByteString.copyFrom(x.v) + GDeployId(v) + } + + private def toDeployerId(x: UDeployerIdN): GDeployerId = { + val v = ByteString.copyFrom(x.v) + GDeployerId(v) + } + + private def toGSysAuthToken(@unused x: USysAuthTokenN): GSysAuthToken = + GSysAuthToken() + + /** Operations */ + private def toENeg(x: ENegN): ENeg = { + val p = toProto(x.p) + ENeg(p) + } + + private def toENot(x: ENotN): ENot = { + val p = toProto(x.p) + ENot(p) + } + + private def toEPlus(x: EPlusN): EPlus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EPlus(p1, p2) + } + + private def toEMinus(x: EMinusN): EMinus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMinus(p1, p2) + } + + private def toEMult(x: EMultN): EMult = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMult(p1, p2) + } + + private def toEDiv(x: EDivN): EDiv = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EDiv(p1, p2) + } + + private def toEMod(x: EModN): EMod = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMod(p1, p2) + } + + private def toELt(x: ELtN): ELt = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + ELt(p1, p2) + } + + private def toELte(x: ELteN): ELte = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + ELte(p1, p2) + } + + private def toEGt(x: EGtN): EGt = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EGt(p1, p2) + } + + private def toEGte(x: EGteN): EGte = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EGte(p1, p2) + } + + private def toEEq(x: EEqN): EEq = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EEq(p1, p2) + } + + private def toENeq(x: ENeqN): ENeq = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + ENeq(p1, p2) + } + + private def toEAnd(x: EAndN): EAnd = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EAnd(p1, p2) + } + + private def toEShortAnd(x: EShortAndN): EShortAnd = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EShortAnd(p1, p2) + } + + private def toEOr(x: EOrN): EOr = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EOr(p1, p2) + } + + private def toEShortOr(x: EShortOrN): EShortOr = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EShortOr(p1, p2) + } + + private def toEPlusPlus(x: EPlusPlusN): EPlusPlus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EPlusPlus(p1, p2) + } + + private def toEMinusMinus(x: EMinusMinusN): EMinusMinus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMinusMinus(p1, p2) + } + + private def toEPercentPercent(x: EPercentPercentN): EPercentPercent = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EPercentPercent(p1, p2) + } + + private def toEMethod(x: EMethodN): EMethod = { + val methodName = x.methodName + val target = toProto(x.target) + val arguments = toProto(x.arguments) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed + EMethod(methodName, target, arguments, locallyFree, connectiveUsed) + } + + private def toEMatches(x: EMatchesN): EMatches = { + val target = toProto(x.target) + val pattern = toProto(x.pattern) + EMatches(target, pattern) + } + + /** Connective */ + private def toConnNotBody(x: ConnNotN): ConnNotBody = { + val p = toProto(x.p) + ConnNotBody(p) + } + + private def toConnAndBody(x: ConnAndN): ConnAndBody = { + val ps = ConnectiveBody(toProto(x.ps)) + ConnAndBody(ps) + } + + private def toConnOrBody(x: ConnOrN): ConnOrBody = { + val ps = ConnectiveBody(toProto(x.ps)) + ConnOrBody(ps) + } + + private def toVarRefBody(x: ConnVarRefN): VarRefBody = { + val index = x.index + val depth = x.depth + VarRefBody(VarRef(index, depth)) + } + + /** Other types */ + private def toBundle(x: BundleN): Bundle = { + val body = toProto(x.body) + val writeFlag = x.writeFlag + val readFlag = x.readFlag + Bundle(body, writeFlag, readFlag) + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Collection.scala b/models/src/main/scala/coop/rchain/models/rholangn/Collection.scala new file mode 100644 index 00000000000..fde7e135d5a --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Collection.scala @@ -0,0 +1,107 @@ +package coop.rchain.models.rholangn + +import scala.collection.immutable.{TreeMap, TreeSet} + +/** + * Ordered collection of 0 or more processes. + * @param ps The sequence of any Rholang processes + * @param remainder gives support to use ... in the list construction and deconstruction e.g. [1, 2, 3 ... rest]. + * It's defined as optional variable. + */ +final class EListN(val ps: Seq[ParN], val remainder: Option[VarN]) extends CollectionN { + def :+(elem: ParN): EListN = EListN(ps :+ elem, remainder) + def +:(elem: ParN): EListN = EListN(elem +: ps, remainder) + def ++(elems: Seq[ParN]): EListN = EListN(ps ++ elems, None) + def ++(that: EListN): EListN = EListN(ps ++ that.ps, None) +} + +object EListN { + def apply(ps: Seq[ParN] = Seq(), r: Option[VarN] = None): EListN = new EListN(ps, r) + def apply(p: ParN): EListN = apply(Seq(p), None) + def empty: EListN = EListN() +} + +/** + * Ordered collection of 1 or more processes. + * @param ps The non-empty sequence of any Rholang processes + */ +final class ETupleN private (val ps: Seq[ParN]) extends CollectionN +object ETupleN { + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def apply(ps: Seq[ParN]): ETupleN = + if (ps.isEmpty) throw new Exception("Cannot create ETuple with an empty par sequence") + else new ETupleN(ps) + def apply(p: ParN): ETupleN = apply(Seq(p)) +} + +/** + * A Rholang set is an unordered collection of 0 or more processes. + * @param ps The sequence of any Rholang processes + * @param remainder gives support to use ... in the set construction and deconstruction e.g. Set(1, 2, 3 ... rest). + * It's defined as optional variable. + */ +final class ESetN(private val ps: TreeSet[ParN], val remainder: Option[VarN]) extends CollectionN { + def sortedPs: Seq[ParN] = ps.toSeq + + def +(elem: ParN): ESetN = ESetN(ps + elem, remainder) + def -(elem: ParN): ESetN = ESetN(ps - elem, remainder) + + def ++(elems: Seq[ParN]): ESetN = ESetN(ps ++ elems, None) + def --(elems: Seq[ParN]): ESetN = ESetN(ps -- elems, None) + + def ++(that: ESetN): ESetN = ESetN(ps ++ that.ps, None) + def --(that: ESetN): ESetN = ESetN(ps -- that.ps, None) + + def contains(elem: ParN): Boolean = ps.contains(elem) +} +object ESetN { + def apply(): ESetN = new ESetN(TreeSet.empty(ParN.ordering), None) + def apply(ps: Seq[ParN], r: Option[VarN] = None): ESetN = + new ESetN(TreeSet.from(ps)(ParN.ordering), r) + def apply(p: ParN): ESetN = ESetN(Seq(p), None) + def empty: ESetN = ESetN() + private def apply(ps: TreeSet[ParN], remainder: Option[VarN]): ESetN = new ESetN(ps, remainder) +} + +/** + * A Rholang map is an unordered collection of 0 or more key-value pairs; both keys and values are processes. + * @param ps The sequence of any Rholang processes (that form key-value pairs) + * @param remainder gives support to use ... in the set construction and deconstruction e.g. {"a":1, "b":2 ... rest}. + * It's defined as optional variable. + */ +final class EMapN(private val ps: TreeMap[ParN, ParN], val remainder: Option[VarN]) + extends CollectionN { + def sortedPs: Seq[(ParN, ParN)] = ps.toSeq + + def +(kv: (ParN, ParN)): EMapN = EMapN(ps + kv, remainder) + def -(key: ParN): EMapN = EMapN(ps - key, remainder) + + def ++(kvs: Seq[(ParN, ParN)]): EMapN = EMapN(ps ++ kvs, None) + def --(keys: Iterable[ParN]): EMapN = EMapN(ps -- keys, None) + + def ++(that: EMapN): EMapN = EMapN(ps ++ that.ps, None) + def --(that: EMapN): EMapN = EMapN(ps -- that.keys, None) + + def contains(p: ParN): Boolean = ps.contains(p) + def get(key: ParN): Option[ParN] = ps.get(key) + def getOrElse(key: ParN, default: ParN): ParN = ps.getOrElse(key, default) + + def keys: Seq[ParN] = ps.keys.toSeq + def values: Seq[ParN] = ps.values.toSeq +} + +object EMapN { + def apply(ps: Seq[(ParN, ParN)], r: Option[VarN]): EMapN = + new EMapN(TreeMap.from(ps)(ParN.ordering), r) + def apply(ps: Seq[(ParN, ParN)]): EMapN = apply(ps, None) + + def apply(ps: Map[ParN, ParN], r: Option[VarN]): EMapN = + new EMapN(TreeMap.from(ps)(ParN.ordering), r) + def apply(ps: Map[ParN, ParN]): EMapN = apply(ps, None) + + def apply(): EMapN = apply(Seq()) + def empty: EMapN = EMapN() + + private def apply(ps: TreeMap[ParN, ParN], remainder: Option[VarN]): EMapN = + new EMapN(ps, remainder) +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Connective.scala b/models/src/main/scala/coop/rchain/models/rholangn/Connective.scala new file mode 100644 index 00000000000..49520ca7ca6 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Connective.scala @@ -0,0 +1,54 @@ +package coop.rchain.models.rholangn + +/** Connective for type Bool in pattern */ +object ConnBoolN extends ConnectiveSTypeN + +/** Connective for type Int in pattern */ +object ConnIntN extends ConnectiveSTypeN + +/** Connective for type BigInt in pattern */ +object ConnBigIntN extends ConnectiveSTypeN + +/** Connective for type String in pattern */ +object ConnStringN extends ConnectiveSTypeN + +/** Connective for type Uri in pattern */ +object ConnUriN extends ConnectiveSTypeN + +/** Connective for type ByteArray in pattern */ +object ConnByteArrayN extends ConnectiveSTypeN + +/** The "~" (logical Not) for pattern matching. + * the pattern ~p says "anything but p" */ +final class ConnNotN(val p: ParN) extends ConnectiveFuncN +object ConnNotN { def apply(p: ParN): ConnNotN = new ConnNotN(p) } + +/** The "/\" (logical And) Conjunction for pattern matching. */ +// TODO: Consider a replacement `ps: Seq[ParN]` to `p1: ParN, p2: ParN` +final class ConnAndN(val ps: Seq[ParN]) extends ConnectiveFuncN +object ConnAndN { + def apply(ps: Seq[ParN]): ConnAndN = new ConnAndN(ps) + def apply(p1: ParN, p2: ParN): ConnAndN = new ConnAndN(Seq(p1, p2)) +} + +/** The "\/" (logical Or) Disjunction for pattern matching. */ +// TODO: Consider a replacement `ps: Seq[ParN]` to `p1: ParN, p2: ParN` +final class ConnOrN(val ps: Seq[ParN]) extends ConnectiveFuncN +object ConnOrN { + def apply(ps: Seq[ParN]): ConnOrN = new ConnOrN(ps) + def apply(p1: ParN, p2: ParN): ConnOrN = new ConnOrN(Seq(p1, p2)) +} + +/** The "=..." Binding for Bound variable in pattern matching. + * The purpose of VarRef is to provide a mechanism to bind variables to values or processes + * within pattern matching structures in Rholang, which is useful for controlling the flow of information + * and processes within a Rholang program. + * E.g.: + * match someProc { =x => x!(*someChannel) } + * or + * for(@{=*x} <- someChannel) { x!(*someOtherChannel) } + */ +final class ConnVarRefN(val index: Int, val depth: Int) extends ConnectiveVarN +object ConnVarRefN { + def apply(index: Int, depth: Int): ConnVarRefN = new ConnVarRefN(index, depth) +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Ground.scala b/models/src/main/scala/coop/rchain/models/rholangn/Ground.scala new file mode 100644 index 00000000000..c715f08b988 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Ground.scala @@ -0,0 +1,21 @@ +package coop.rchain.models.rholangn + +final class GBoolN(val v: Boolean) extends GroundN +object GBoolN { def apply(v: Boolean): GBoolN = new GBoolN(v) } + +final class GIntN(val v: Long) extends GroundN +object GIntN { def apply(v: Long): GIntN = new GIntN(v) } + +final class GBigIntN(val v: BigInt) extends GroundN +object GBigIntN { def apply(v: BigInt): GBigIntN = new GBigIntN(v) } + +final class GStringN(val v: String) extends GroundN +object GStringN { def apply(v: String): GStringN = new GStringN(v) } + +final class GByteArrayN(val v: Array[Byte]) extends GroundN +object GByteArrayN { + def apply(bytes: Array[Byte]): GByteArrayN = new GByteArrayN(bytes) +} + +final class GUriN(val v: String) extends GroundN +object GUriN { def apply(v: String): GUriN = new GUriN(v) } diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Operation.scala b/models/src/main/scala/coop/rchain/models/rholangn/Operation.scala new file mode 100644 index 00000000000..e41a95414db --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Operation.scala @@ -0,0 +1,85 @@ +package coop.rchain.models.rholangn + +final class ENegN(private val input: ParN) extends Operation1ParN { + override val p: ParN = input +} +object ENegN { def apply(p: ParN): ENegN = new ENegN(p) } + +final class ENotN(private val input: ParN) extends Operation1ParN { + override val p: ParN = input +} +object ENotN { def apply(p: ParN): ENotN = new ENotN(p) } + +final class EPlusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EPlusN { def apply(p1: ParN, p2: ParN): EPlusN = new EPlusN(p1, p2) } + +final class EMinusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EMinusN { def apply(p1: ParN, p2: ParN): EMinusN = new EMinusN(p1, p2) } + +final class EMultN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EMultN { def apply(p1: ParN, p2: ParN): EMultN = new EMultN(p1, p2) } + +final class EDivN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EDivN { def apply(p1: ParN, p2: ParN): EDivN = new EDivN(p1, p2) } + +final class EModN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EModN { def apply(p1: ParN, p2: ParN): EModN = new EModN(p1, p2) } + +final class ELtN(val p1: ParN, val p2: ParN) extends Operation2ParN +object ELtN { def apply(p1: ParN, p2: ParN): ELtN = new ELtN(p1, p2) } + +final class ELteN(val p1: ParN, val p2: ParN) extends Operation2ParN +object ELteN { def apply(p1: ParN, p2: ParN): ELteN = new ELteN(p1, p2) } + +final class EGtN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EGtN { def apply(p1: ParN, p2: ParN): EGtN = new EGtN(p1, p2) } + +final class EGteN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EGteN { def apply(p1: ParN, p2: ParN): EGteN = new EGteN(p1, p2) } + +final class EEqN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EEqN { def apply(p1: ParN, p2: ParN): EEqN = new EEqN(p1, p2) } + +final class ENeqN(val p1: ParN, val p2: ParN) extends Operation2ParN +object ENeqN { def apply(p1: ParN, p2: ParN): ENeqN = new ENeqN(p1, p2) } + +final class EAndN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EAndN { def apply(p1: ParN, p2: ParN): EAndN = new EAndN(p1, p2) } + +final class EShortAndN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EShortAndN { def apply(p1: ParN, p2: ParN): EShortAndN = new EShortAndN(p1, p2) } + +final class EOrN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EOrN { def apply(p1: ParN, p2: ParN): EOrN = new EOrN(p1, p2) } + +final class EShortOrN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EShortOrN { def apply(p1: ParN, p2: ParN): EShortOrN = new EShortOrN(p1, p2) } + +final class EPlusPlusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EPlusPlusN { def apply(p1: ParN, p2: ParN): EPlusPlusN = new EPlusPlusN(p1, p2) } + +final class EMinusMinusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EMinusMinusN { def apply(p1: ParN, p2: ParN): EMinusMinusN = new EMinusMinusN(p1, p2) } + +final class EPercentPercentN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EPercentPercentN { + def apply(p1: ParN, p2: ParN): EPercentPercentN = new EPercentPercentN(p1, p2) +} + +final class EMethodN(val methodName: String, val target: ParN, val arguments: Seq[ParN]) + extends OperationOtherN +object EMethodN { + def apply(methodName: String, target: ParN, arguments: Seq[ParN] = Seq()): EMethodN = + new EMethodN(methodName, target, arguments) + def apply(methodName: String, target: ParN, argument: ParN): EMethodN = + new EMethodN(methodName, target, Seq(argument)) +} + +/** + * The p matches q expression is similar to: + * match p { q -> true; _ -> false } + */ +final class EMatchesN(val target: ParN, val pattern: ParN) extends OperationOtherN +object EMatchesN { + def apply(target: ParN, pattern: ParN): EMatchesN = new EMatchesN(target, pattern) +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Other.scala b/models/src/main/scala/coop/rchain/models/rholangn/Other.scala new file mode 100644 index 00000000000..091e1049b0d --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Other.scala @@ -0,0 +1,22 @@ +package coop.rchain.models.rholangn + +/** * + * Nothing can be received from a (quoted) bundle with `readFlag = false`. + * Likeise nothing can be sent to a (quoted) bundle with `writeFlag = false`. + * + * If both flags are set to false, bundle allows only for equivalance check. + * + * @param writeFlag flag indicating whether bundle is writeable + * @param readFlag flag indicating whether bundle is readable + */ +final class BundleN(val body: ParN, val writeFlag: Boolean, val readFlag: Boolean) extends OtherN { + def merge(other: BundleN): BundleN = { + val wFlag = writeFlag && other.writeFlag + val rFlag = readFlag && other.readFlag + BundleN(other.body, wFlag, rFlag) + } +} +object BundleN { + def apply(body: ParN, writeFlag: Boolean, readFlag: Boolean): BundleN = + new BundleN(body, writeFlag, readFlag) +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/RhoType.scala b/models/src/main/scala/coop/rchain/models/rholangn/RhoType.scala new file mode 100644 index 00000000000..3c156dfa816 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/RhoType.scala @@ -0,0 +1,110 @@ +package coop.rchain.models.rholangn + +import cats.Eval +import coop.rchain.models.rholangn.parmanager.Manager._ + +/** Base trait for Rholang elements in the Reducer */ +sealed trait RhoTypeN { + + /** Cryptographic hash code of this object */ + lazy val rhoHash: Array[Byte] = rhoHashFn(this) + + /** The size of serialized bytes lazily evaluated with memoization */ + val serializedSize: Eval[Int] = serializedSizeFn(this).memoize + + /** Serialized bytes lazily evaluated with memoization */ + val serialized: Eval[Array[Byte]] = serializedFn(this).memoize + + /** True if the object or at least one of the nested objects non-concrete. + * Such a object cannot be viewed as if it were a term.*/ + // TODO: Rename connectiveUsed for more clarity + lazy val connectiveUsed: Boolean = connectiveUsedFn(this) + + /** True if the object or at least one of the nested objects can be evaluated in Reducer */ + lazy val evalRequired: Boolean = evalRequiredFn(this) + + /** True if the object or at least one of the nested objects can be substituted in Reducer */ + lazy val substituteRequired: Boolean = substituteRequiredFn(this) + + override def equals(x: Any): Boolean = parmanager.Manager.equals(this, x) +} + +/* TODO: In the future, it is necessary to append the classification. + Add main types and ground types. + Ground types must be part of expressions, and expressions are part of the main types. + */ +/** Auxiliary elements included in other pairs */ +trait AuxParN extends RhoTypeN + +/** Rholang element that can be processed in parallel, together with other elements */ +sealed trait ParN extends RhoTypeN + +object ParN { + + /** + * Create a flatten parallel Par (ParProc) from par sequence. + * See [[flattedPProc]] for more information. + */ + def makeParProc(ps: Seq[ParN]): ParN = flattedPProc(ps) + + /** Combine two pars for their parallel execution */ + def combine(p1: ParN, p2: ParN): ParN = combinePars(p1, p2) + + def compare(p1: ParN, p2: ParN): Int = comparePars(p1, p2) + val ordering: Ordering[ParN] = (p1: ParN, p2: ParN) => compare(p1, p2) +} + +/** Basic rholang operations that can be executed in parallel*/ +trait BasicN extends ParN + +/** Expressions included in Rholang elements */ +sealed trait ExprN extends ParN + +/** Base types for Rholang expressions */ +trait GroundN extends ExprN + +/** Rholang collections */ +trait CollectionN extends ExprN + +/** Variables in Rholang (can be bound, free and wildcard) */ +trait VarN extends ExprN + +/** Operations in Rholang */ +sealed trait OperationN extends ExprN + +/** Operation with one par */ +trait Operation1ParN extends OperationN { + val p: ParN +} + +/** Operation with two par */ +trait Operation2ParN extends OperationN { + val p1: ParN + val p2: ParN +} + +/** Other operations (e.g. method) */ +trait OperationOtherN extends OperationN + +/** Rholang unforgeable names (stored in internal environment map) */ +trait UnforgeableN extends ParN { + val v: Array[Byte] +} + +/** + * Connectives (bindings) are used in patterns to combine several conditions together or + * to set a pattern with some specific Rholang type or variables. + * */ +trait ConnectiveN extends ParN + +/** Connectives for simple types */ +trait ConnectiveSTypeN extends ConnectiveN + +/** Connectives for truth-functional operators */ +trait ConnectiveFuncN extends ConnectiveN + +/** Connectives for variables */ +trait ConnectiveVarN extends ConnectiveN + +/** Other types that can't be categorized */ +trait OtherN extends ParN diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Unforgeable.scala b/models/src/main/scala/coop/rchain/models/rholangn/Unforgeable.scala new file mode 100644 index 00000000000..23314963488 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Unforgeable.scala @@ -0,0 +1,14 @@ +package coop.rchain.models.rholangn + +final class UPrivateN(val v: Array[Byte]) extends UnforgeableN +object UPrivateN { def apply(bytes: Array[Byte]): UPrivateN = new UPrivateN(bytes) } + +final class UDeployIdN(val v: Array[Byte]) extends UnforgeableN +object UDeployIdN { def apply(bytes: Array[Byte]): UDeployIdN = new UDeployIdN(bytes) } + +final class UDeployerIdN(val v: Array[Byte]) extends UnforgeableN +object UDeployerIdN { def apply(bytes: Array[Byte]): UDeployerIdN = new UDeployerIdN(bytes) } + +// TODO: Temporary solution for easier conversion from old types - change type in the future +final class USysAuthTokenN(val v: Array[Byte]) extends UnforgeableN +object USysAuthTokenN { def apply(): USysAuthTokenN = new USysAuthTokenN(Array[Byte]()) } diff --git a/models/src/main/scala/coop/rchain/models/rholangn/Var.scala b/models/src/main/scala/coop/rchain/models/rholangn/Var.scala new file mode 100644 index 00000000000..6532434e4f0 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/Var.scala @@ -0,0 +1,9 @@ +package coop.rchain.models.rholangn + +final class BoundVarN(val idx: Int) extends VarN +object BoundVarN { def apply(value: Int): BoundVarN = new BoundVarN(value) } + +final class FreeVarN(val idx: Int) extends VarN +object FreeVarN { def apply(value: Int): FreeVarN = new FreeVarN(value) } + +object WildcardN extends VarN diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/ConnectiveUsed.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/ConnectiveUsed.scala new file mode 100644 index 00000000000..05145006077 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/ConnectiveUsed.scala @@ -0,0 +1,59 @@ +package coop.rchain.models.rholangn.parmanager + +import coop.rchain.models.rholangn._ + +private[parmanager] object ConnectiveUsed { + private def cUsed(p: RhoTypeN): Boolean = p.connectiveUsed + private def cUsed(kv: (RhoTypeN, RhoTypeN)): Boolean = cUsed(kv._1) || cUsed(kv._2) + private def cUsed(ps: Seq[RhoTypeN]): Boolean = ps.exists(cUsed) + private def cUsedKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Boolean = kVPairs.exists(cUsed) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def connectiveUsedFn(p: RhoTypeN): Boolean = p match { + + /** Basic types */ + case _: NilN.type => false + case pProc: ParProcN => cUsed(pProc.ps) + case send: SendN => cUsed(send.chan) || cUsed(send.data) + case receive: ReceiveN => cUsed(receive.binds) || cUsed(receive.body) + case m: MatchN => cUsed(m.target) || cUsed(m.cases) + case _: NewN => false // There are no situations when New gets into the matcher + + /** Ground types */ + case _: GroundN => false + + /** Collections */ + case eList: EListN => cUsed(eList.ps) || eList.remainder.exists(cUsed) + case eTuple: ETupleN => cUsed(eTuple.ps) + case eSet: ESetN => cUsed(eSet.sortedPs) || eSet.remainder.exists(cUsed) + case eMap: EMapN => cUsedKVPairs(eMap.sortedPs) || eMap.remainder.exists(cUsed) + + /** Vars */ + case _: BoundVarN => false + case _: FreeVarN => true + case _: WildcardN.type => true + + /** Operations */ + case op: Operation1ParN => cUsed(op.p) + case op: Operation2ParN => cUsed(op.p1) || cUsed(op.p2) + case eMethod: EMethodN => cUsed(eMethod.target) || cUsed(eMethod.arguments) + case eMatches: EMatchesN => cUsed(eMatches.target) + + /** Unforgeable names */ + case _: UnforgeableN => false + + /** Connective */ + case _: ConnectiveSTypeN => true + case _: ConnectiveFuncN => true + case _: ConnectiveVarN => false + + /** Auxiliary types */ + case bind: ReceiveBindN => cUsed(bind.source) + case mCase: MatchCaseN => cUsed(mCase.source) + + /** Other types */ + case _: BundleN => false // There are no situations when New gets into the matcher + + case x => throw new Exception(s"Undefined type $x") + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Constants.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Constants.scala new file mode 100644 index 00000000000..5cbcfb8c23a --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Constants.scala @@ -0,0 +1,89 @@ +package coop.rchain.models.rholangn.parmanager + +private[parmanager] object Constants { + final val intSize = 4 + final val longSize = 8 + final val booleanSize = 1 + final val hashSize = 32 // for Blake2b256 + + final val tagSize = 1 + + /** Tags for serialization */ + /** Basic types */ + final val NIL = 0x01.toByte + final val PARPROC = 0x02.toByte + final val SEND = 0x03.toByte + final val RECEIVE = 0x04.toByte + final val MATCH = 0x05.toByte + final val NEW = 0x06.toByte + + /** Ground types */ + final val GBOOL = 0x10.toByte + final val GINT = 0x11.toByte + final val GBIG_INT = 0x12.toByte + final val GSTRING = 0x13.toByte + final val GBYTE_ARRAY = 0x14.toByte + final val GURI = 0x15.toByte + + /** Collections */ + final val ELIST = 0x20.toByte + final val ETUPLE = 0x21.toByte + final val ESET = 0x22.toByte + final val EMAP = 0x23.toByte + + /** Vars */ + final val BOUND_VAR = 0x2A.toByte + final val FREE_VAR = 0x2B.toByte + final val WILDCARD = 0x2C.toByte + + /** Operations */ + final val ENEG = 0x30.toByte + final val ENOT = 0x31.toByte + + final val EPLUS = 0x32.toByte + final val EMINUS = 0x33.toByte + final val EMULT = 0x34.toByte + final val EDIV = 0x35.toByte + final val EMOD = 0x36.toByte + final val ELT = 0x37.toByte + final val ELTE = 0x38.toByte + final val EGT = 0x39.toByte + final val EGTE = 0x3A.toByte + final val EEQ = 0x3B.toByte + final val ENEQ = 0x3C.toByte + final val EAND = 0x3D.toByte + final val ESHORTAND = 0x3E.toByte + final val EOR = 0x3F.toByte + final val ESHORTOR = 0x40.toByte + final val EPLUSPLUS = 0x41.toByte + final val EMINUSMINUS = 0x42.toByte + final val EPERCENT = 0x43.toByte + + final val EMETHOD = 0x4A.toByte + final val EMATCHES = 0x4B.toByte + + /** Unforgeable names */ + final val UPRIVATE = 0x50.toByte + final val UDEPLOY_ID = 0x51.toByte + final val UDEPLOYER_ID = 0x52.toByte + + /** Connective */ + final val CONNECTIVE_BOOL = 0x70.toByte + final val CONNECTIVE_INT = 0x71.toByte + final val CONNECTIVE_STRING = 0x72.toByte + final val CONNECTIVE_URI = 0x73.toByte + final val CONNECTIVE_BYTEARRAY = 0x74.toByte + final val CONNECTIVE_BIG_INT = 0x75.toByte + final val CONNECTIVE_NOT = 0x76.toByte + final val CONNECTIVE_AND = 0x77.toByte + final val CONNECTIVE_OR = 0x78.toByte + final val CONNECTIVE_VARREF = 0x79.toByte + + /** Auxiliary types */ + final val RECEIVE_BIND = 0x80.toByte + final val MATCH_CASE = 0x81.toByte + + /** Other types */ + final val BUNDLE = 0x90.toByte + final val SYS_AUTH_TOKEN = 0x91.toByte +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/EvalRequired.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/EvalRequired.scala new file mode 100644 index 00000000000..81b153a9f6a --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/EvalRequired.scala @@ -0,0 +1,52 @@ +package coop.rchain.models.rholangn.parmanager + +import coop.rchain.models.rholangn._ + +private[parmanager] object EvalRequired { + private def eReq(p: RhoTypeN): Boolean = p.evalRequired + private def eReq(kv: (RhoTypeN, RhoTypeN)): Boolean = eReq(kv._1) || eReq(kv._2) + private def eReq(ps: Seq[RhoTypeN]): Boolean = ps.exists(eReq) + private def eReqKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Boolean = kVPairs.exists(eReq) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def evalRequiredFn(p: RhoTypeN): Boolean = p match { + + /** Basic types */ + case p: BasicN => + p match { + case _: NilN.type => false + case pProc: ParProcN => eReq(pProc.ps) + case _ => true + } + + /** Ground types */ + case _: GroundN => false + + /** Collections */ + case eList: EListN => eReq(eList.ps) + case eTuple: ETupleN => eReq(eTuple.ps) + case eSet: ESetN => eReq(eSet.sortedPs) + case eMap: EMapN => eReqKVPairs(eMap.sortedPs) + + /** Vars */ + case _: VarN => true + + /** Operations */ + case _: OperationN => true + + /** Unforgeable names */ + case _: UnforgeableN => false + + /** Connective */ + case _: ConnectiveN => false + + /** Auxiliary types */ + case _: ReceiveBindN => true + case _: MatchCaseN => true + + /** Other types */ + case bundle: BundleN => eReq(bundle.body) + + case x => throw new Exception(s"Undefined type $x") + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Manager.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Manager.scala new file mode 100644 index 00000000000..a12fc50db10 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Manager.scala @@ -0,0 +1,75 @@ +package coop.rchain.models.rholangn.parmanager + +import cats.Eval +import com.google.protobuf.{CodedInputStream, CodedOutputStream} +import coop.rchain.models.rholangn._ +import coop.rchain.models.rholangn.parmanager.protobuf.{ + ProtoCodec, + ProtoPrimitiveReader, + ProtoPrimitiveWriter +} + +object Manager { + + def equals(self: RhoTypeN, other: Any): Boolean = other match { + case x: RhoTypeN => x.rhoHash sameElements self.rhoHash + case _ => false + } + + def sortPars(ps: Seq[ParN]): Seq[ParN] = Sorting.sortPars(ps) + def sortBinds(bs: Seq[ReceiveBindN]): Seq[ReceiveBindN] = Sorting.sortBinds(bs) + def sortBindsWithT[T](bs: Seq[(ReceiveBindN, T)]): Seq[(ReceiveBindN, T)] = + Sorting.sortBindsWithT(bs) + def sortUris(uris: Seq[String]): Seq[String] = Sorting.sortUris(uris) + def sortInjections(injections: Map[String, ParN]): Seq[(String, ParN)] = + Sorting.sortInjections(injections) + def comparePars(p1: ParN, p2: ParN): Int = Sorting.comparePars(p1, p2) + + private def flatPs(ps: Seq[ParN]): Seq[ParN] = + ps.flatMap { + case _: NilN.type => Seq() + case x: ParProcN => flatPs(x.ps) + case p => Seq(p) + } + + private def makePProc(ps: Seq[ParN]): ParN = ps match { + case Nil => NilN + case p :: Nil => p + case _ => ParProcN(ps) + } + + /** + * Create a flatten parallel Par (ParProc) from par sequence + * Flatting is the process of transforming ParProc(P, Q, ...): + * - empty data: ParProc() -> Nil + * - single data: ParProc(P) -> P + * - nil data: ParProc(P, Q, Nil) -> ParProc(P, Q) + * - nested data ParProc(ParProc(P,Q), ParProc(L,K)) -> ParProc(P, Q, L, K) + * @param ps initial par sequence to be executed in parallel + * @return + */ + def flattedPProc(ps: Seq[ParN]): ParN = makePProc(flatPs(ps)) + + /** + * Create a flatten parallel Par (ParProc) from two Pars. + * See [[flattedPProc]] for more information. + */ + def combinePars(p1: ParN, p2: ParN): ParN = flattedPProc(Seq(p1, p2)) + + /** MetaData */ + def rhoHashFn(p: RhoTypeN): Array[Byte] = RhoHash.rhoHashFn(p) + def serializedSizeFn(p: RhoTypeN): Eval[Int] = SerializedSize.calcSerSize(p) + def serializedFn(p: RhoTypeN): Eval[Array[Byte]] = { + val write = (out: CodedOutputStream) => Serialization.serialize(p, ProtoPrimitiveWriter(out)) + p.serializedSize.flatMap(size => ProtoCodec.encode(size, write)) + } + def connectiveUsedFn(p: RhoTypeN): Boolean = ConnectiveUsed.connectiveUsedFn(p) + def evalRequiredFn(p: RhoTypeN): Boolean = EvalRequired.evalRequiredFn(p) + def substituteRequiredFn(p: RhoTypeN): Boolean = SubstituteRequired.substituteRequiredFn(p) + + // Deserialize with protobuf + def protoDeserialize(bytes: Array[Byte]): ParN = { + val decode = (in: CodedInputStream) => Serialization.deserialize(ProtoPrimitiveReader(in)) + ProtoCodec.decode(bytes, decode).value + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/RhoHash.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/RhoHash.scala new file mode 100644 index 00000000000..68d92f96ebb --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/RhoHash.scala @@ -0,0 +1,385 @@ +package coop.rchain.models.rholangn.parmanager + +import coop.rchain.models.rholangn._ +import coop.rchain.models.rholangn.parmanager.Constants._ +import org.bouncycastle.crypto.digests.Blake2bDigest + +import java.util.concurrent.atomic.AtomicInteger +import scala.annotation.unused + +private[parmanager] object RhoHash { + + private class Hashable(val tag: Byte, val bodySize: Int) { + import Hashable._ + + private val arrSize: Int = bodySize + tagSize + private val arr: Array[Byte] = new Array[Byte](arrSize) + private val pos = new AtomicInteger(tagSize) + + arr(0) = tag // Fill the first element of arr with the tag + + /** Appending methods */ + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + private def append(b: Byte): Unit = { + val currentPos = pos.getAndIncrement() + if (currentPos + 1 > arrSize) throw new Exception("Array size exceeded") + else arr(currentPos) = b + } + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def append(bytes: Array[Byte]): Unit = { + val bytesLength = bytes.length + val currentPos = pos.getAndAdd(bytesLength) + if (currentPos + bytesLength > arrSize) throw new Exception("Array size exceeded") + else Array.copy(bytes, 0, arr, currentPos, bytesLength) + } + + def append(v: Boolean): Unit = append(booleanToByte(v)) + def append(v: Int): Unit = append(intToBytes(v)) + def append(v: Long): Unit = append(longToBytes(v)) + + def append(v: BigInt): Unit = append(v.toByteArray) + def append(v: String): Unit = append(stringToBytes(v)) + + def append(p: RhoTypeN): Unit = append(p.rhoHash) + private def append(kv: (RhoTypeN, RhoTypeN)): Unit = { + append(kv._1) + append(kv._2) + } + private def appendInjection(injection: (String, RhoTypeN)): Unit = { + append(injection._1) + append(injection._2) + } + def appendStrings(strings: Seq[String]): Unit = strings.foreach(append) + def appendKVPairs(kvPairs: Seq[(RhoTypeN, RhoTypeN)]): Unit = kvPairs.foreach(append) + def appendInjections(injections: Seq[(String, RhoTypeN)]): Unit = + injections.foreach(appendInjection) + def append(ps: Seq[RhoTypeN]): Unit = ps.foreach(append) + def append(pOpt: Option[RhoTypeN]): Unit = pOpt.foreach(append) + + // Get the hash of the current array + def calcHash: Array[Byte] = { + val curSize = pos.get() + + if (curSize <= hashSize) { + if (curSize == hashSize) { + arr + } else { + val newBytes = new Array[Byte](hashSize) + val dataStartPos = hashSize - curSize + + for (i <- 0 until hashSize) { + if (i < dataStartPos) newBytes(i) = 0x00.toByte // fill empty place with 0x00.toByte + else newBytes(i) = arr(i - dataStartPos) + } + newBytes + } + } else { + val hashData = arr.slice(0, curSize) + + @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) + def hash(input: Array[Byte]): Array[Byte] = { + val digestFn = new Blake2bDigest(hashSize * 8) + digestFn.update(input, 0, input.length) + val res = new Array[Byte](hashSize) + digestFn.doFinal(res, 0) + res + } + hash(hashData) + } + } + } + private object Hashable { + def apply(tag: Byte, size: Int = 0): Hashable = new Hashable(tag, size) + + private def booleanToByte(v: Boolean): Byte = if (v) 1 else 0 + + private def intToBytes(value: Int): Array[Byte] = { + val byteArray = new Array[Byte](intSize) + for (i <- 0 until intSize) { + byteArray(intSize - 1 - i) = ((value >>> (i * 8)) & 0xFF).toByte + } + byteArray + } + + private def longToBytes(value: Long): Array[Byte] = { + val byteArray = new Array[Byte](longSize) + for (i <- 0 until longSize) { + byteArray(longSize - 1 - i) = ((value >>> (i * longSize)) & 0xFF).toByte + } + byteArray + } + + private def stringToBytes(v: String): Array[Byte] = v.getBytes("UTF-8") + + private def hSizeSeq[T](seq: Seq[T], f: T => Int): Int = seq.map(f).sum + + def hSize(@unused v: Boolean): Int = booleanSize + def hSize(@unused v: Int): Int = intSize + def hSize(@unused v: Long): Int = longSize + def hSize(v: BigInt): Int = hSize(v.toByteArray) + def hSize(v: String): Int = stringToBytes(v).length + def hSize(bytes: Array[Byte]): Int = bytes.length + + def hSize(@unused p: RhoTypeN): Int = hashSize + private def hSize(kv: (RhoTypeN, RhoTypeN)): Int = hSize(kv._1) + hSize(kv._2) + private def hSizeInjection(injection: (String, RhoTypeN)): Int = + hSize(injection._1) + hSize(injection._2) + def hSize(ps: Seq[RhoTypeN]): Int = hSizeSeq[RhoTypeN](ps, hSize) + def hSizeString(strings: Seq[String]): Int = hSizeSeq[String](strings, hSize) + def hSizeKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Int = + hSizeSeq[(RhoTypeN, RhoTypeN)](kVPairs, hSize) + def hSizeInjections(injections: Seq[(String, RhoTypeN)]): Int = + hSizeSeq[(String, RhoTypeN)](injections, hSizeInjection) + def hSize(pOpt: Option[RhoTypeN]): Int = if (pOpt.isDefined) hashSize else 0 + } + + import Hashable._ + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def rhoHashFn(p: RhoTypeN): Array[Byte] = p match { + + /** Basic types */ + case _: NilN.type => Hashable(NIL).calcHash + + case pProc: ParProcN => + val hs = Hashable(PARPROC, hSize(pProc.ps)) + hs.append(pProc.sortedPs) + hs.calcHash + + case send: SendN => + val bodySize = hSize(send.chan) + hSize(send.data) + hSize(send.persistent) + val hs = Hashable(SEND, bodySize) + hs.append(send.chan) + hs.append(send.data) + hs.append(send.persistent) + hs.calcHash + + case receive: ReceiveN => + val bodySize = hSize(receive.binds) + hSize(receive.body) + + hSize(receive.persistent) + hSize(receive.peek) + hSize(receive.bindCount) + val hs = Hashable(RECEIVE, bodySize) + hs.append(receive.sortedBinds) + hs.append(receive.body) + hs.append(receive.persistent) + hs.append(receive.peek) + hs.append(receive.bindCount) + hs.calcHash + + case m: MatchN => + val bodySize = hSize(m.target) + hSize(m.cases) + val hs = Hashable(MATCH, bodySize) + hs.append(m.target) + hs.append(m.cases) + hs.calcHash + + case n: NewN => + val bodySize = hSize(n.bindCount) + hSize(n.p) + + hSizeString(n.uri) + hSizeInjections(n.injections.toSeq) + val hs = Hashable(NEW, bodySize) + hs.append(n.bindCount) + hs.append(n.p) + hs.appendStrings(n.sortedUri) + hs.appendInjections(n.sortedInjections) + hs.calcHash + + /** Ground types */ + case gBool: GBoolN => + val hs = Hashable(GBOOL, hSize(gBool.v)) + hs.append(gBool.v) + hs.calcHash + + case gInt: GIntN => + val hs = Hashable(GINT, hSize(gInt.v)) + hs.append(gInt.v) + hs.calcHash + + case gBigInt: GBigIntN => + val hs = Hashable(GBIG_INT, hSize(gBigInt.v)) + hs.append(gBigInt.v) + hs.calcHash + + case gString: GStringN => + val hs = Hashable(GSTRING, hSize(gString.v)) + hs.append(gString.v) + hs.calcHash + + case gByteArrayN: GByteArrayN => + val hs = Hashable(GBYTE_ARRAY, hSize(gByteArrayN.v)) + hs.append(gByteArrayN.v) + hs.calcHash + + case gUri: GUriN => + val hs = Hashable(GURI, hSize(gUri.v)) + hs.append(gUri.v) + hs.calcHash + + /** Collections */ + case eList: EListN => + val bodySize = hSize(eList.ps) + hSize(eList.remainder) + val hs = Hashable(ELIST, bodySize) + hs.append(eList.ps) + hs.append(eList.remainder) + hs.calcHash + + case eTuple: ETupleN => + val bodySize = hSize(eTuple.ps) + val hs = Hashable(ETUPLE, bodySize) + hs.append(eTuple.ps) + hs.calcHash + + case eSet: ESetN => + val bodySize = hSize(eSet.sortedPs) + hSize(eSet.remainder) + val hs = Hashable(ELIST, bodySize) + hs.append(eSet.sortedPs) + hs.append(eSet.remainder) + hs.calcHash + + case eMap: EMapN => + val bodySize = hSizeKVPairs(eMap.sortedPs) + hSize(eMap.remainder) + val hs = Hashable(EMAP, bodySize) + hs.appendKVPairs(eMap.sortedPs) + hs.append(eMap.remainder) + hs.calcHash + + /** Vars */ + case bv: BoundVarN => + val hs = Hashable(BOUND_VAR, hSize(bv.idx)) + hs.append(bv.idx) + hs.calcHash + + case fv: FreeVarN => + val hs = Hashable(FREE_VAR, hSize(fv.idx)) + hs.append(fv.idx) + hs.calcHash + + case _: WildcardN.type => Hashable(WILDCARD).calcHash + + /** Operations */ + case op: Operation1ParN => + val tag = op match { + case _: ENegN => ENEG + case _: ENotN => ENOT + } + val bodySize = hSize(op.p) + val hs = Hashable(tag, bodySize) + hs.append(op.p) + hs.calcHash + + case op: Operation2ParN => + val tag = op match { + case _: EPlusN => EPLUS + case _: EMinusN => EMINUS + case _: EMultN => EMULT + case _: EDivN => EDIV + case _: EModN => EMOD + case _: ELtN => ELT + case _: ELteN => ELTE + case _: EGtN => EGT + case _: EGteN => EGTE + case _: EEqN => EEQ + case _: ENeqN => ENEQ + case _: EAndN => EAND + case _: EShortAndN => ESHORTAND + case _: EOrN => EOR + case _: EShortOrN => ESHORTOR + case _: EPlusPlusN => EPLUSPLUS + case _: EMinusMinusN => EMINUSMINUS + case _: EPercentPercentN => EPERCENT + } + val bodySize = hSize(op.p1) + hSize(op.p2) + val hs = Hashable(tag, bodySize) + hs.append(op.p1) + hs.append(op.p2) + hs.calcHash + + case eMethod: EMethodN => + val bodySize = hSize(eMethod.methodName) + hSize(eMethod.target) + hSize(eMethod.arguments) + val hs = Hashable(EMETHOD, bodySize) + hs.append(eMethod.methodName) + hs.append(eMethod.target) + hs.append(eMethod.arguments) + hs.calcHash + + case eMatches: EMatchesN => + val bodySize = hSize(eMatches.target) + hSize(eMatches.pattern) + val hs = Hashable(EMATCHES, bodySize) + hs.append(eMatches.target) + hs.append(eMatches.pattern) + hs.calcHash + + /** Unforgeable names */ + case unf: UnforgeableN => + val bodySize = hSize(unf.v) + val t = unf match { + case _: UPrivateN => UPRIVATE + case _: UDeployIdN => UDEPLOY_ID + case _: UDeployerIdN => UDEPLOYER_ID + case _: USysAuthTokenN => SYS_AUTH_TOKEN + } + val hs = Hashable(t, bodySize) + hs.append(unf.v) + hs.calcHash + + /** Connective */ + case _: ConnBoolN.type => Hashable(CONNECTIVE_BOOL).calcHash + case _: ConnIntN.type => Hashable(CONNECTIVE_INT).calcHash + case _: ConnBigIntN.type => Hashable(CONNECTIVE_BIG_INT).calcHash + case _: ConnStringN.type => Hashable(CONNECTIVE_STRING).calcHash + case _: ConnUriN.type => Hashable(CONNECTIVE_URI).calcHash + case _: ConnByteArrayN.type => Hashable(CONNECTIVE_BYTEARRAY).calcHash + + case connNot: ConnNotN => + val bodySize = hSize(connNot.p) + val hs = Hashable(CONNECTIVE_NOT, bodySize) + hs.append(connNot.p) + hs.calcHash + + case connAnd: ConnAndN => + val bodySize = hSize(connAnd.ps) + val hs = Hashable(CONNECTIVE_AND, bodySize) + hs.append(connAnd.ps) + hs.calcHash + + case connOr: ConnOrN => + val bodySize = hSize(connOr.ps) + val hs = Hashable(CONNECTIVE_OR, bodySize) + hs.append(connOr.ps) + hs.calcHash + + case connVarRef: ConnVarRefN => + val bodySize = hSize(connVarRef.index) + hSize(connVarRef.depth) + val hs = Hashable(CONNECTIVE_VARREF, bodySize) + hs.append(connVarRef.index) + hs.append(connVarRef.depth) + hs.calcHash + + /** Auxiliary types */ + case bind: ReceiveBindN => + val bodySize = hSize(bind.patterns) + hSize(bind.source) + + hSize(bind.remainder) + hSize(bind.freeCount) + val hs = Hashable(RECEIVE_BIND, bodySize) + hs.append(bind.patterns) + hs.append(bind.source) + hs.append(bind.remainder) + hs.append(bind.freeCount) + hs.calcHash + + case mCase: MatchCaseN => + val bodySize = hSize(mCase.pattern) + hSize(mCase.source) + hSize(mCase.freeCount) + val hs = Hashable(MATCH_CASE, bodySize) + hs.append(mCase.pattern) + hs.append(mCase.source) + hs.append(mCase.freeCount) + hs.calcHash + + /** Other types */ + case bundle: BundleN => + val bodySize = hSize(bundle.body) + hSize(bundle.writeFlag) + hSize(bundle.readFlag) + val hs = Hashable(BUNDLE, bodySize) + hs.append(bundle.body) + hs.append(bundle.writeFlag) + hs.append(bundle.readFlag) + hs.calcHash + + case x => throw new Exception(s"Undefined type $x") + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Serialization.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Serialization.scala new file mode 100644 index 00000000000..95d2b89e1fd --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Serialization.scala @@ -0,0 +1,328 @@ +package coop.rchain.models.rholangn.parmanager + +import cats.Eval +import cats.syntax.all._ +import coop.rchain.models.rholangn._ +import coop.rchain.models.rholangn.parmanager.Constants._ +import coop.rchain.models.rholangn.parmanager.primitive.{PrimitiveReader, PrimitiveWriter} + +object Serialization { + + // TODO: Properly handle errors with return type (remove throw) + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def serialize(p: RhoTypeN, primitiveWriter: PrimitiveWriter[Eval]): Eval[Unit] = Eval.defer { + import primitiveWriter._ + val syntax = primitive.syntax.PrimitiveWriterSyntax.primitiveWriterSyntax(primitiveWriter) + import syntax._ + + // Recursive traversal of the whole object without memoization of intermediaries + val writePar: RhoTypeN => Eval[Unit] = serialize(_, primitiveWriter) + // Recursive traversal using memoized values + // val writePar: RhoTypeN => Eval[Unit] = _.serialized.flatMap(primitiveWriter.writeRaw) + + p match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case _: NilN.type => write(NIL) + case gBool: GBoolN => write(GBOOL) *> write(gBool.v) + case gInt: GIntN => write(GINT) *> write(gInt.v) + case gBigInt: GBigIntN => write(GBIG_INT) *> writeBigInt(gBigInt.v) + case gString: GStringN => write(GSTRING) *> write(gString.v) + case gByteArray: GByteArrayN => write(GBYTE_ARRAY) *> write(gByteArray.v) + case gUri: GUriN => write(GURI) *> write(gUri.v) + case _: WildcardN.type => write(WILDCARD) + + /* Unforgeable names */ + case unf: UnforgeableN => + val unfKind = unf match { + case _: UPrivateN => UPRIVATE + case _: UDeployIdN => UDEPLOY_ID + case _: UDeployerIdN => UDEPLOYER_ID + case _: USysAuthTokenN => SYS_AUTH_TOKEN + } + write(unfKind) *> write(unf.v) + + /* Vars */ + case bVar: BoundVarN => write(BOUND_VAR) *> write(bVar.idx) + case fVar: FreeVarN => write(FREE_VAR) *> write(fVar.idx) + case rVar: ConnVarRefN => + write(CONNECTIVE_VARREF) *> write(rVar.index) *> write(rVar.depth) + + /* Simple types */ + case _: ConnBoolN.type => write(CONNECTIVE_BOOL) + case _: ConnIntN.type => write(CONNECTIVE_INT) + case _: ConnBigIntN.type => write(CONNECTIVE_BIG_INT) + case _: ConnStringN.type => write(CONNECTIVE_STRING) + case _: ConnUriN.type => write(CONNECTIVE_URI) + case _: ConnByteArrayN.type => write(CONNECTIVE_BYTEARRAY) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case op: Operation1ParN => + val tag = op match { + case _: ENegN => ENEG + case _: ENotN => ENOT + } + write(tag) *> writePar(op.p) + + case b: BundleN => + write(BUNDLE) *> writePar(b.body) *> write(b.writeFlag) *> write(b.readFlag) + + /* Connective */ + case connNot: ConnNotN => write(CONNECTIVE_NOT) *> writePar(connNot.p) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case op: Operation2ParN => + val tag = op match { + case _: EPlusN => EPLUS + case _: EMinusN => EMINUS + case _: EMultN => EMULT + case _: EDivN => EDIV + case _: EModN => EMOD + case _: ELtN => ELT + case _: ELteN => ELTE + case _: EGtN => EGT + case _: EGteN => EGTE + case _: EEqN => EEQ + case _: ENeqN => ENEQ + case _: EAndN => EAND + case _: EShortAndN => ESHORTAND + case _: EOrN => EOR + case _: EShortOrN => ESHORTOR + case _: EPlusPlusN => EPLUSPLUS + case _: EMinusMinusN => EMINUSMINUS + case _: EPercentPercentN => EPERCENT + } + write(tag) *> writePar(op.p1) *> writePar(op.p2) + + case eMatches: EMatchesN => + write(EMATCHES) *> writePar(eMatches.target) *> writePar(eMatches.pattern) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case pProc: ParProcN => write(PARPROC) *> writeSeq(pProc.sortedPs, writePar) + + case send: SendN => + write(SEND) *> + writePar(send.chan) *> + writeSeq(send.data, writePar) *> + write(send.persistent) + + case receive: ReceiveN => + write(RECEIVE) *> + writeSeq(receive.sortedBinds, writePar) *> + writePar(receive.body) *> + write(receive.persistent) *> + write(receive.peek) *> + write(receive.bindCount) + + case m: MatchN => write(MATCH) *> writePar(m.target) *> writeSeq(m.cases, writePar) + + case n: NewN => + write(NEW) *> + write(n.bindCount) *> + writePar(n.p) *> + writeSeq[String](n.sortedUri, write) *> + writeSeq[(String, ParN)](n.sortedInjections, writeTupleStringT(_, writePar)) + + /* Collections */ + case eList: EListN => + write(ELIST) *> writeSeq(eList.ps, writePar) *> writeOpt(eList.remainder, writePar) + case eTuple: ETupleN => write(ETUPLE) *> writeSeq(eTuple.ps, writePar) + case eSet: ESetN => + write(ESET) *> writeSeq(eSet.sortedPs, writePar) *> writeOpt(eSet.remainder, writePar) + case eMap: EMapN => + write(EMAP) *> + writeSeq[(ParN, ParN)](eMap.sortedPs, writeTuple(_, writePar)) *> + writeOpt(eMap.remainder, writePar) + + /* Connective */ + case connAnd: ConnAndN => write(CONNECTIVE_AND) *> writeSeq(connAnd.ps, writePar) + case connOr: ConnOrN => write(CONNECTIVE_OR) *> writeSeq(connOr.ps, writePar) + + case eMethod: EMethodN => + write(EMETHOD) *> + write(eMethod.methodName) *> + writePar(eMethod.target) *> + writeSeq(eMethod.arguments, writePar) + + /* Auxiliary types */ + case bind: ReceiveBindN => + write(RECEIVE_BIND) *> + writeSeq(bind.patterns, writePar) *> + writePar(bind.source) *> + writeOpt(bind.remainder, writePar) *> + write(bind.freeCount) + + case mCase: MatchCaseN => + write(MATCH_CASE) *> + writePar(mCase.pattern) *> + writePar(mCase.source) *> + write(mCase.freeCount) + + case unknownType => throw new Exception(s"Unknown type `$unknownType`") + } + } + + // TODO: Properly handle errors with return type (remove throw) + def deserialize(primitiveReader: PrimitiveReader[Eval]): Eval[ParN] = { + import primitiveReader._ + + def readBigInt: Eval[BigInt] = readBytes.map(BigInt(_)) + + // Reads a sequence + def readSeq[T](v: Eval[T]): Eval[Seq[T]] = readInt.flatMap(Seq.range(0, _).as(v).sequence) + + // Reads par object with all nested objects + def readPar: Eval[ParN] = readByte >>= matchPar + + // Reads sequence of pars + def readPars: Eval[Seq[ParN]] = readSeq(readPar) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readVar: Eval[VarN] = + readPar.map { + case v: VarN => v + case p => throw new Exception(s"Value must be Var, found `$p`") + } + + def readVarOpt: Eval[Option[VarN]] = + readBool.flatMap(x => if (x) readVar.map(Some(_)) else Eval.now(none)) + + def readTuplePar: Eval[(ParN, ParN)] = (readPar, readPar).mapN((_, _)) + def readTupleStringPar: Eval[(String, ParN)] = (readString, readPar).mapN((_, _)) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readReceiveBind(tag: Byte): Eval[ReceiveBindN] = tag match { + case RECEIVE_BIND => + for { + patterns <- readPars + source <- readPar + remainder <- readVarOpt + freeCount <- readInt + } yield ReceiveBindN(patterns, source, remainder, freeCount) + case _ => throw new Exception(s"Invalid tag `$tag` for ReceiveBindN deserialization") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readMatchMCase(tag: Byte): Eval[MatchCaseN] = tag match { + case MATCH_CASE => + for { + pattern <- readPar + source <- readPar + freeCount <- readInt + } yield MatchCaseN(pattern, source, freeCount) + case _ => throw new Exception(s"Invalid tag `$tag` for matchMCase deserialization") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def matchPar(tag: Byte): Eval[ParN] = tag match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case NIL => Eval.now(NilN) + case GBOOL => readBool.map(GBoolN(_)) + case GINT => readLong.map(GIntN(_)) + case GBIG_INT => readBigInt.map(GBigIntN(_)) + case GSTRING => readString.map(GStringN(_)) + case GBYTE_ARRAY => readBytes.map(GByteArrayN(_)) + case GURI => readString.map(GUriN(_)) + case WILDCARD => Eval.now(WildcardN) + + /* Unforgeable names */ + case UPRIVATE => readBytes.map(UPrivateN(_)) + case UDEPLOY_ID => readBytes.map(UDeployIdN(_)) + case UDEPLOYER_ID => readBytes.map(UDeployerIdN(_)) + // TODO: Temporary solution for easier conversion from old types - change type in the future + case SYS_AUTH_TOKEN => readBytes.as(USysAuthTokenN()) + + /* Vars */ + case BOUND_VAR => readInt.map(BoundVarN(_)) + case FREE_VAR => readInt.map(FreeVarN(_)) + case CONNECTIVE_VARREF => (readInt, readInt).mapN(ConnVarRefN(_, _)) + + /* Simple types */ + case CONNECTIVE_BOOL => Eval.now(ConnBoolN) + case CONNECTIVE_INT => Eval.now(ConnIntN) + case CONNECTIVE_BIG_INT => Eval.now(ConnBigIntN) + case CONNECTIVE_STRING => Eval.now(ConnStringN) + case CONNECTIVE_URI => Eval.now(ConnUriN) + case CONNECTIVE_BYTEARRAY => Eval.now(ConnByteArrayN) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case ENEG => readPar.map(ENegN(_)) + case ENOT => readPar.map(ENotN(_)) + + case BUNDLE => (readPar, readBool, readBool).mapN(BundleN(_, _, _)) + + /* Connective */ + case CONNECTIVE_NOT => readPar.map(ConnNotN(_)) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case EPLUS => (readPar, readPar).mapN(EPlusN(_, _)) + case EMINUS => (readPar, readPar).mapN(EMinusN(_, _)) + case EMULT => (readPar, readPar).mapN(EMultN(_, _)) + case EDIV => (readPar, readPar).mapN(EDivN(_, _)) + case EMOD => (readPar, readPar).mapN(EModN(_, _)) + case ELT => (readPar, readPar).mapN(ELtN(_, _)) + case ELTE => (readPar, readPar).mapN(ELteN(_, _)) + case EGT => (readPar, readPar).mapN(EGtN(_, _)) + case EGTE => (readPar, readPar).mapN(EGteN(_, _)) + case EEQ => (readPar, readPar).mapN(EEqN(_, _)) + case ENEQ => (readPar, readPar).mapN(ENeqN(_, _)) + case EAND => (readPar, readPar).mapN(EAndN(_, _)) + case ESHORTAND => (readPar, readPar).mapN(EShortAndN(_, _)) + case EOR => (readPar, readPar).mapN(EOrN(_, _)) + case ESHORTOR => (readPar, readPar).mapN(EShortOrN(_, _)) + case EPLUSPLUS => (readPar, readPar).mapN(EPlusPlusN(_, _)) + case EMINUSMINUS => (readPar, readPar).mapN(EMinusMinusN(_, _)) + case EPERCENT => (readPar, readPar).mapN(EPercentPercentN(_, _)) + + case EMATCHES => (readPar, readPar).mapN(EMatchesN(_, _)) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case PARPROC => readPars.map(ParProcN(_)) + + case SEND => (readPar, readPars, readBool).mapN(SendN(_, _, _)) + + case RECEIVE => + (readSeq(readByte >>= readReceiveBind), readPar, readBool, readBool, readInt) + .mapN(ReceiveN(_, _, _, _, _)) + + case MATCH => + (readPar, readSeq(readByte >>= readMatchMCase)).mapN(MatchN(_, _)) + + case NEW => + (readInt, readPar, readSeq(readString), readSeq(readTupleStringPar)).mapN(NewN(_, _, _, _)) + + /* Collections */ + case ELIST => (readPars, readVarOpt).mapN(EListN(_, _)) + case ETUPLE => readPars.map(ETupleN(_)) + case ESET => (readPars, readVarOpt).mapN(ESetN(_, _)) + case EMAP => (readSeq(readTuplePar), readVarOpt).mapN(EMapN(_, _)) + + /* Connective */ + case CONNECTIVE_AND => readPars.map(ConnAndN(_)) + case CONNECTIVE_OR => readPars.map(ConnOrN(_)) + + case EMETHOD => (readString, readPar, readPars).mapN(EMethodN(_, _, _)) + + case _ => throw new Exception(s"Invalid tag `$tag` for ParN deserialization") + } + + readPar + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/SerializedSize.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/SerializedSize.scala new file mode 100644 index 00000000000..89092d4a882 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/SerializedSize.scala @@ -0,0 +1,166 @@ +package coop.rchain.models.rholangn.parmanager + +import cats.Eval +import cats.syntax.all._ +import com.google.protobuf.CodedOutputStream +import coop.rchain.models.rholangn.{RhoTypeN, _} + +import scala.annotation.unused + +private object ProtobufSerializedSize { + import Constants._ + + // Terminal expressions + def sSize(bytes: Array[Byte]): Eval[Int] = + Eval.later(CodedOutputStream.computeByteArraySizeNoTag(bytes)) + def sSize(@unused v: Boolean): Eval[Int] = Eval.now(booleanSize) + def sSize(v: Int): Eval[Int] = Eval.later(CodedOutputStream.computeInt32SizeNoTag(v)) + def sSize(v: Long): Eval[Int] = Eval.later(CodedOutputStream.computeInt64SizeNoTag(v)) + def sSize(v: String): Eval[Int] = Eval.later(CodedOutputStream.computeStringSizeNoTag(v)) + def sSize(v: BigInt): Eval[Int] = sSize(v.toByteArray) + + // Recursive traversal using memoized value + def sSize(x: RhoTypeN): Eval[Int] = x.serializedSize + + // Recursive traversal of a sequence using memoized values + def sSize(ps: Seq[RhoTypeN]): Eval[Int] = sSizeSeq[RhoTypeN](ps, sSize) + + def sSize(kv: (RhoTypeN, RhoTypeN)): Eval[Int] = + kv.bimap(sSize, sSize).mapN(_ + _) + + def sSize(pOpt: Option[RhoTypeN]): Eval[Int] = + (Eval.now(booleanSize), pOpt.traverse(sSize)).mapN(_ + _.getOrElse(0)) + + def sSizeSeqTuplePar(seq: Seq[(RhoTypeN, RhoTypeN)]): Eval[Int] = + sSizeSeq[(RhoTypeN, RhoTypeN)](seq, sSize) + + def sSizeTupleStringPar(kv: (String, RhoTypeN)): Eval[Int] = + kv.bimap(sSize, sSize).mapN(_ + _) + + def sSizeSeqTupleStringPar(seq: Seq[(String, RhoTypeN)]): Eval[Int] = + sSizeSeq[(String, RhoTypeN)](seq, sSizeTupleStringPar) + + def totalSize(sizes: Int*): Int = tagSize + sizes.sum + + // Calculates serialized size of a sequence (the sum of element sizes) + def sSizeSeq[T](seq: Seq[T], f: T => Eval[Int]): Eval[Int] = + (sSize(seq.size), seq.traverse(f).map(_.sum)).mapN(_ + _) +} + +private[parmanager] object SerializedSize { + import ProtobufSerializedSize._ + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def calcSerSize(p: RhoTypeN): Eval[Int] = Eval.defer { + p match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case _: NilN.type => Eval.now(totalSize()) + case gBool: GBoolN => sSize(gBool.v).map(totalSize(_)) + case gInt: GIntN => sSize(gInt.v).map(totalSize(_)) + case gBigInt: GBigIntN => sSize(gBigInt.v).map(totalSize(_)) + case gString: GStringN => sSize(gString.v).map(totalSize(_)) + case gByteArray: GByteArrayN => sSize(gByteArray.v).map(totalSize(_)) + case gUri: GUriN => sSize(gUri.v).map(totalSize(_)) + case _: WildcardN.type => Eval.now(totalSize()) + + /* Unforgeable names */ + case unf: UnforgeableN => sSize(unf.v).map(totalSize(_)) + + /* Vars */ + case v: BoundVarN => sSize(v.idx).map(totalSize(_)) + case v: FreeVarN => sSize(v.idx).map(totalSize(_)) + case v: ConnVarRefN => (sSize(v.index), sSize(v.depth)).mapN(totalSize(_, _)) + + /* Simple types */ + case _: ConnectiveSTypeN => Eval.now(totalSize()) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case op: Operation1ParN => sSize(op.p).map(totalSize(_)) + + case bundle: BundleN => + (sSize(bundle.body), sSize(bundle.writeFlag), sSize(bundle.readFlag)) + .mapN(totalSize(_, _, _)) + + /* Connective */ + case connNot: ConnNotN => sSize(connNot.p).map(totalSize(_)) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case op: Operation2ParN => (sSize(op.p1), sSize(op.p2)).mapN(totalSize(_, _)) + + case eMatches: EMatchesN => + (sSize(eMatches.target), sSize(eMatches.pattern)).mapN(totalSize(_, _)) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case pProc: ParProcN => sSize(pProc.ps).map(totalSize(_)) + + case send: SendN => { + (sSize(send.chan), sSize(send.data), sSize(send.persistent)).mapN(totalSize(_, _, _)) + } + + case receive: ReceiveN => + val bindsSize = sSize(receive.binds) + val bodySize = sSize(receive.body) + val persistentSize = sSize(receive.persistent) + val peekSize = sSize(receive.peek) + val bindCountSize = sSize(receive.bindCount) + (bindsSize, bodySize, persistentSize, peekSize, bindCountSize).mapN( + totalSize(_, _, _, _, _) + ) + + case m: MatchN => + val targetSize = sSize(m.target) + val casesSize = sSize(m.cases) + (targetSize, casesSize).mapN(totalSize(_, _)) + + case n: NewN => + val bindCountSize = sSize(n.bindCount) + val pSize = sSize(n.p) + val uriSize = sSizeSeq[String](n.uri, sSize) + val injectionsSize = sSizeSeqTupleStringPar(n.injections.toSeq) + (bindCountSize, pSize, uriSize, injectionsSize).mapN(totalSize(_, _, _, _)) + + /* Collections */ + case list: EListN => (sSize(list.ps), sSize(list.remainder)).mapN(totalSize(_, _)) + case eTuple: ETupleN => sSize(eTuple.ps).map(totalSize(_)) + case eSet: ESetN => (sSize(eSet.sortedPs), sSize(eSet.remainder)).mapN(totalSize(_, _)) + case eMap: EMapN => + (sSizeSeqTuplePar(eMap.sortedPs), sSize(eMap.remainder)).mapN(totalSize(_, _)) + + /* Connective */ + case connAnd: ConnAndN => sSize(connAnd.ps).map(totalSize(_)) + case connOr: ConnOrN => sSize(connOr.ps).map(totalSize(_)) + + case eMethod: EMethodN => + val methodNameSize = sSize(eMethod.methodName) + val targetSize = sSize(eMethod.target) + val argumentsSize = sSize(eMethod.arguments) + (methodNameSize, targetSize, argumentsSize).mapN(totalSize(_, _, _)) + + /* Auxiliary types */ + + case bind: ReceiveBindN => + val patternsSize = sSize(bind.patterns) + val sourceSize = sSize(bind.source) + val reminderSize = sSize(bind.remainder) + val freeCountSize = sSize(bind.freeCount) + (patternsSize, sourceSize, reminderSize, freeCountSize).mapN(totalSize(_, _, _, _)) + + case mCase: MatchCaseN => + val patternSize = sSize(mCase.pattern) + val sourceSize = sSize(mCase.source) + val freeCountSize = sSize(mCase.freeCount) + (patternSize, sourceSize, freeCountSize).mapN(totalSize(_, _, _)) + + case x => throw new Exception(s"Undefined type $x") + } + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Sorting.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Sorting.scala new file mode 100644 index 00000000000..b31a58e59f5 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/Sorting.scala @@ -0,0 +1,21 @@ +package coop.rchain.models.rholangn.parmanager + +import coop.rchain.models.rholangn._ + +import java.util +import scala.math.Ordered.orderingToOrdered + +private[parmanager] object Sorting { + implicit val o: Ordering[Array[Byte]] = (a: Array[Byte], b: Array[Byte]) => + util.Arrays.compare(a, b) + + def sortPars(ps: Seq[ParN]): Seq[ParN] = ps.sorted(Ordering.by((p: ParN) => p.rhoHash)) + def sortBinds(bs: Seq[ReceiveBindN]): Seq[ReceiveBindN] = + bs.sorted(Ordering.by((b: ReceiveBindN) => b.rhoHash)) + def sortBindsWithT[T](bs: Seq[(ReceiveBindN, T)]): Seq[(ReceiveBindN, T)] = + bs.sortBy { case (receiveBind, _) => receiveBind.rhoHash } + def sortUris(uris: Seq[String]): Seq[String] = uris.sorted + def sortInjections(injections: Map[String, ParN]): Seq[(String, ParN)] = + injections.toSeq.sortBy { case (str, _) => str } + def comparePars(p1: ParN, p2: ParN): Int = p1.rhoHash compare p2.rhoHash +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/SubstituteRequired.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/SubstituteRequired.scala new file mode 100644 index 00000000000..953580b27ff --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/SubstituteRequired.scala @@ -0,0 +1,62 @@ +package coop.rchain.models.rholangn.parmanager + +import coop.rchain.models.rholangn._ + +private[parmanager] object SubstituteRequired { + private def sReq(p: RhoTypeN): Boolean = p.substituteRequired + private def sReq(kv: (RhoTypeN, RhoTypeN)): Boolean = + kv._1.substituteRequired || kv._2.substituteRequired + private def sReq(ps: Seq[RhoTypeN]): Boolean = ps.exists(sReq) + private def sReqKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Boolean = kVPairs.exists(sReq) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def substituteRequiredFn(p: RhoTypeN): Boolean = p match { + + /** Basic types */ + case _: NilN.type => false + case pProc: ParProcN => sReq(pProc.ps) + case send: SendN => sReq(send.chan) || sReq(send.data) + case receive: ReceiveN => sReq(receive.binds) || sReq(receive.body) + case m: MatchN => sReq(m.target) || sReq(m.cases) + case n: NewN => sReq(n.p) + + /** Ground types */ + case _: GroundN => false + + /** Collections */ + case eList: EListN => sReq(eList.ps) + case eTuple: ETupleN => sReq(eTuple.ps) + case eSet: ESetN => sReq(eSet.sortedPs) + case eMap: EMapN => sReqKVPairs(eMap.sortedPs) + + /** Vars */ + case _: BoundVarN => true + case _: FreeVarN => false + case _: WildcardN.type => false + + /** Operations */ + case op: Operation1ParN => sReq(op.p) + case op: Operation2ParN => sReq(op.p1) || sReq(op.p2) + case eMethod: EMethodN => sReq(eMethod.target) || sReq(eMethod.arguments) + case eMatches: EMatchesN => sReq(eMatches.target) || sReq(eMatches.pattern) + + /** Unforgeable names */ + case _: UnforgeableN => false + + /** Connective */ + case _: ConnectiveSTypeN => false + case connNot: ConnNotN => sReq(connNot.p) + case connAnd: ConnAndN => sReq(connAnd.ps) + case connOr: ConnOrN => sReq(connOr.ps) + case _: ConnVarRefN => true + + /** Auxiliary types */ + case bind: ReceiveBindN => sReq(bind.patterns) || sReq(bind.source) + case mCase: MatchCaseN => sReq(mCase.pattern) || sReq(mCase.source) + + /** Other types */ + case bundle: BundleN => sReq(bundle.body) + + case x => throw new Exception(s"Undefined type $x") + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/PrimitiveReader.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/PrimitiveReader.scala new file mode 100644 index 00000000000..05ab0825af6 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/PrimitiveReader.scala @@ -0,0 +1,10 @@ +package coop.rchain.models.rholangn.parmanager.primitive + +trait PrimitiveReader[F[_]] { + def readByte: F[Byte] + def readBytes: F[Array[Byte]] + def readBool: F[Boolean] + def readInt: F[Int] + def readLong: F[Long] + def readString: F[String] +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/PrimitiveWriter.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/PrimitiveWriter.scala new file mode 100644 index 00000000000..7772ec84433 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/PrimitiveWriter.scala @@ -0,0 +1,15 @@ +package coop.rchain.models.rholangn.parmanager.primitive + +trait PrimitiveWriter[F[_]] { + def write(x: Byte): F[Unit] + + /** Writes raw bytes without size prefix */ + def writeRaw(x: Array[Byte]): F[Unit] + + /** Writes bytes with size prefix */ + def write(x: Array[Byte]): F[Unit] + def write(x: Boolean): F[Unit] + def write(x: Int): F[Unit] + def write(x: Long): F[Unit] + def write(x: String): F[Unit] +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/syntax/PrimitiveWriterSyntax.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/syntax/PrimitiveWriterSyntax.scala new file mode 100644 index 00000000000..20afea7707b --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/primitive/syntax/PrimitiveWriterSyntax.scala @@ -0,0 +1,33 @@ +package coop.rchain.models.rholangn.parmanager.primitive.syntax + +import cats.Applicative +import cats.syntax.all._ +import coop.rchain.models.rholangn.parmanager.primitive.PrimitiveWriter + +final class PrimitiveWriterOps[F[_]](val writer: PrimitiveWriter[F]) extends AnyVal { + def writeBigInt(x: BigInt): F[Unit] = writer.write(x.toByteArray) + + def writeOpt[T](pOpt: Option[T], writeT: T => F[Unit])( + implicit applicativeF: Applicative[F] + ): F[Unit] = + pOpt.map(writer.write(true) *> writeT(_)).getOrElse(writer.write(false)) + + def writeSeq[T](seq: Seq[T], writeT: T => F[Unit])( + implicit applicativeF: Applicative[F] + ): F[Unit] = + writer.write(seq.size) *> seq.traverse_(writeT) + + def writeTuple[T](kv: (T, T), writeT: T => F[Unit])( + implicit applicativeF: Applicative[F] + ): F[Unit] = writeT(kv._1) *> writeT(kv._2) + + def writeTupleStringT[T](kv: (String, T), writeT: T => F[Unit])( + implicit applicativeF: Applicative[F] + ): F[Unit] = writer.write(kv._1) *> writeT(kv._2) +} + +object PrimitiveWriterSyntax { + implicit final def primitiveWriterSyntax[F[_]: Applicative]( + writer: PrimitiveWriter[F] + ): PrimitiveWriterOps[F] = new PrimitiveWriterOps[F](writer) +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoCodec.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoCodec.scala new file mode 100644 index 00000000000..bce8d58588e --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoCodec.scala @@ -0,0 +1,34 @@ +package coop.rchain.models.rholangn.parmanager.protobuf + +import cats.Applicative +import cats.syntax.all._ +import com.google.protobuf.{CodedInputStream, CodedOutputStream} + +import java.io.{ByteArrayInputStream, ByteArrayOutputStream} +import scala.util.Using + +object ProtoCodec { + + // TODO: Properly handle errors + @SuppressWarnings(Array("org.wartremover.warts.TryPartial")) + def decode[F[_], T](bv: Array[Byte], read: CodedInputStream => F[T]): F[T] = + Using(new ByteArrayInputStream(bv)) { input => + val cis = CodedInputStream.newInstance(input) + read(cis) + }.get + + // TODO: Properly handle errors + @SuppressWarnings(Array("org.wartremover.warts.TryPartial")) + def encode[F[_]: Applicative]( + payloadSize: Int, + write: CodedOutputStream => F[Unit] + ): F[Array[Byte]] = + Using(new ByteArrayOutputStream(payloadSize)) { baos => + val cos = CodedOutputStream.newInstance(baos) + write(cos).map { _ => + cos.flush() + baos.flush() + baos.toByteArray + } + }.get +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoPrimitiveReader.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoPrimitiveReader.scala new file mode 100644 index 00000000000..eb7e9ca8977 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoPrimitiveReader.scala @@ -0,0 +1,24 @@ +package coop.rchain.models.rholangn.parmanager.protobuf + +import cats.Eval +import com.google.protobuf.CodedInputStream +import coop.rchain.models.rholangn.parmanager.primitive.PrimitiveReader + +object ProtoPrimitiveReader { + + /** Wrapper for protobuf de-serialization of primitive types. */ + def apply(input: CodedInputStream) = new PrimitiveReader[Eval] { + // NOTE: Eval.always is used to ensure correct deserialization and read from input stream + def readByte: Eval[Byte] = Eval.always(input.readRawByte()) + + def readBytes: Eval[Array[Byte]] = Eval.always(input.readByteArray()) + + def readBool: Eval[Boolean] = Eval.always(input.readBool()) + + def readInt: Eval[Int] = Eval.always(input.readUInt32()) + + def readLong: Eval[Long] = Eval.always(input.readUInt64()) + + def readString: Eval[String] = Eval.always(input.readString()) + } +} diff --git a/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoPrimitiveWriter.scala b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoPrimitiveWriter.scala new file mode 100644 index 00000000000..8e662ffae38 --- /dev/null +++ b/models/src/main/scala/coop/rchain/models/rholangn/parmanager/protobuf/ProtoPrimitiveWriter.scala @@ -0,0 +1,26 @@ +package coop.rchain.models.rholangn.parmanager.protobuf + +import cats.Eval +import com.google.protobuf.CodedOutputStream +import coop.rchain.models.rholangn.parmanager.primitive.PrimitiveWriter + +/** Wrapper for protobuf serialization of primitive types. */ +object ProtoPrimitiveWriter { + def apply(output: CodedOutputStream) = new PrimitiveWriter[Eval] { + def write(x: Byte): Eval[Unit] = Eval.later(output.writeRawByte(x)) + + /** Writes raw bytes without size prefix */ + def writeRaw(x: Array[Byte]): Eval[Unit] = Eval.later(output.writeRawBytes(x)) + + /** Writes bytes with size prefix */ + def write(x: Array[Byte]): Eval[Unit] = Eval.later(output.writeByteArrayNoTag(x)) + + def write(x: Boolean): Eval[Unit] = Eval.later(output.writeBoolNoTag(x)) + + def write(x: Int): Eval[Unit] = Eval.later(output.writeUInt32NoTag(x)) + + def write(x: Long): Eval[Unit] = Eval.later(output.writeUInt64NoTag(x)) + + def write(x: String): Eval[Unit] = Eval.later(output.writeStringNoTag(x)) + } +} diff --git a/models/src/test/scala/coop/rchain/models/rholangn/BindingsSpec.scala b/models/src/test/scala/coop/rchain/models/rholangn/BindingsSpec.scala new file mode 100644 index 00000000000..535cc063665 --- /dev/null +++ b/models/src/test/scala/coop/rchain/models/rholangn/BindingsSpec.scala @@ -0,0 +1,465 @@ +package coop.rchain.models.rholangn + +import com.google.protobuf.ByteString +import coop.rchain.models.Connective.ConnectiveInstance._ +import coop.rchain.models.Expr.ExprInstance._ +import coop.rchain.models.Var.VarInstance._ +import coop.rchain.models.Var.WildcardMsg +import coop.rchain.models._ +import coop.rchain.models.rholang.implicits._ +import coop.rchain.models.rholangn.Bindings._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks + +import scala.collection.immutable.BitSet + +class BindingsSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + val sizeTest: Int = 50 + val bytesTest: Array[Byte] = Array.fill(sizeTest)(42) + val strTest: String = List.fill(sizeTest)("42").mkString + + /** Basic types */ + it should "test Nil" in { + val p1: ParN = NilN + val p2: Par = Par() + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ParProc" in { + val p1: ParN = ParProcN(Seq(GIntN(42), GBoolN(true))) + val p2: Par = GInt(42) ++ GBool(true) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "show error in old implementation" in { + val p1: Par = Par( + List(), + List(), + List(), + Vector(Expr(GInt(42)), Expr(GBool(true))), + List(), + List(), + List(), + List(), + AlwaysEqual(BitSet()) + ) + val p2: Par = Par( + List(), + List(), + List(), + Vector(Expr(GBool(true)), Expr(GInt(42))), + List(), + List(), + List(), + List(), + AlwaysEqual(BitSet()) + ) + p1.equals(p2) should be(false) + } + + it should "test Send" in { + val p1: ParN = SendN(NilN, Seq(NilN, SendN(NilN, NilN)), persistent = true) + val p2: Par = Send(Par(), Seq(Par(), Send(Par(), Seq(Par()))), persistent = true) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test Receive" in { + val bind11 = ReceiveBindN(Seq(FreeVarN(41), FreeVarN(42)), NilN, Some(BoundVarN(42)), 2) + val bind12 = ReceiveBindN(Seq(FreeVarN(42), FreeVarN(41)), NilN, Some(BoundVarN(42)), 2) + val p1: ParN = ReceiveN(Seq(bind11, bind12), NilN, persistent = true, peek = false, 4) + val bind21 = + ReceiveBind(Seq(EVar(FreeVar(41)), EVar(FreeVar(42))), Par(), Some(BoundVar(42)), 2) + val bind22 = + ReceiveBind(Seq(EVar(FreeVar(42)), EVar(FreeVar(41))), Par(), Some(BoundVar(42)), 2) + val p2: Par = Receive(Seq(bind21, bind22), Par(), persistent = true, peek = false, 4) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test Match" in { + val case11 = MatchCaseN(FreeVarN(41), BoundVarN(42), 1) + val case12 = MatchCaseN(WildcardN, BoundVarN(42), 0) + val p1: ParN = MatchN(NilN, Seq(case11, case12)) + val case21 = MatchCase(EVar(FreeVar(41)), EVar(BoundVar(42)), 1) + val case22 = MatchCase(EVar(Wildcard(WildcardMsg())), EVar(BoundVar(42))) + val p2: Par = Match(Par(), Seq(case21, case22)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test New" in { + val uri = Seq("4", "2", "3", "1") + val inj1 = Map("4" -> NilN, "3" -> NilN) + val inj2 = Map("4" -> Par(), "3" -> Par()) + val p1: ParN = NewN(1, BoundVarN(0), uri, inj1) + val p2: Par = New(1, EVar(BoundVar(0)), uri, inj2) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Ground types */ + it should "test GBool" in { + val p1: ParN = GBoolN(true) + val p2: Par = GBool(true) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test GInt" in { + val p1: ParN = GIntN(42) + val p2: Par = GInt(42) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test GBigInt" in { + val p1: ParN = GBigIntN(BigInt(bytesTest)) + val p2: Par = GBigInt(BigInt(bytesTest)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test GString" in { + val p1: ParN = GStringN(strTest) + val p2: Par = GString(strTest) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test GByteArray" in { + val p1: ParN = GByteArrayN(bytesTest) + val p2: Par = GByteArray(ByteString.copyFrom(bytesTest)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test GUri" in { + val p1: ParN = GUriN(strTest) + val p2: Par = GUri(strTest) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Collections */ + it should "test EList" in { + val p1: ParN = EListN(Seq(NilN, EListN()), Some(BoundVarN(42))) + val p2: Par = EList(Seq(Par(), EList()), BitSet(), connectiveUsed = false, Some(BoundVar(42))) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ETuple" in { + val p1: ParN = ETupleN(Seq(NilN, ETupleN(NilN))) + val p2: Par = ETuple(Seq(Par(), ETuple(Seq(Par())))) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ESet" in { + val p1: ParN = ESetN(Seq(NilN, ESetN())) + val p2: Par = ParSet(Seq(Par(), ParSet(Seq()))) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMap" in { + val p1: ParN = EMapN(Seq(NilN -> EMapN(), EMapN() -> NilN)) + val emptyMap: Par = ParMap(Seq()) + val p2: Par = ParMap(Seq(Par() -> emptyMap, emptyMap -> Par())) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Vars */ + it should "test BoundVar" in { + val p1: ParN = BoundVarN(42) + val p2: Par = EVar(BoundVar(42)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test FreeVar" in { + val p1: ParN = FreeVarN(42) + val p2: Par = EVar(FreeVar(42)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test Wildcard" in { + val p1: ParN = WildcardN + val p2: Par = EVar(Wildcard(WildcardMsg())) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Operations */ + it should "test ENeg" in { + val p1: ParN = ENegN(GIntN(42)) + val p2: Par = ENeg(GInt(42)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ENot" in { + val p1: ParN = ENotN(GBoolN(true)) + val p2: Par = ENot(GBool(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EPlus" in { + val p1: ParN = EPlusN(GIntN(42), GIntN(43)) + val p2: Par = EPlus(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMinus" in { + val p1: ParN = EMinusN(GIntN(42), GIntN(43)) + val p2: Par = EMinus(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMult" in { + val p1: ParN = EMultN(GIntN(42), GIntN(43)) + val p2: Par = EMult(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EDiv" in { + val p1: ParN = EDivN(GIntN(42), GIntN(43)) + val p2: Par = EDiv(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMod" in { + val p1: ParN = EModN(GIntN(42), GIntN(43)) + val p2: Par = EMod(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ELt" in { + val p1: ParN = ELtN(GIntN(42), GIntN(43)) + val p2: Par = ELt(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ELte" in { + val p1: ParN = ELteN(GIntN(42), GIntN(43)) + val p2: Par = ELte(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EGt" in { + val p1: ParN = EGtN(GIntN(42), GIntN(43)) + val p2: Par = EGt(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EGteN" in { + val p1: ParN = EGteN(GIntN(42), GIntN(43)) + val p2: Par = EGte(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EEq" in { + val p1: ParN = EEqN(GIntN(42), GIntN(43)) + val p2: Par = EEq(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ENeq" in { + val p1: ParN = ENeqN(GIntN(42), GIntN(43)) + val p2: Par = ENeq(GInt(42), GInt(43)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EAnd" in { + val p1: ParN = EAndN(GBoolN(true), GBoolN(false)) + val p2: Par = EAnd(GBool(true), GBool(false)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EShortAnd" in { + val p1: ParN = EShortAndN(GBoolN(true), GBoolN(false)) + val p2: Par = EShortAnd(GBool(true), GBool(false)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EOr" in { + val p1: ParN = EOrN(GBoolN(true), GBoolN(false)) + val p2: Par = EOr(GBool(true), GBool(false)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EShortOr" in { + val p1: ParN = EShortOrN(GBoolN(true), GBoolN(false)) + val p2: Par = EShortOr(GBool(true), GBool(false)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EPlusPlus" in { + val p1: ParN = EPlusPlusN(GStringN("42"), GStringN("43")) + val p2: Par = EPlusPlus(GString("42"), GString("43")) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMinusMinus" in { + val p1: ParN = EMinusMinusN(EListN(NilN), EListN(NilN)) + val p2: Par = EMinusMinus(EList(Seq(Par())), EList(Seq(Par()))) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMatches" in { + val p1: ParN = EMatchesN(GIntN(42), GIntN(42)) + val p2: Par = EMatches(GInt(42), GInt(42)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EPercentPercent" in { + val p1: ParN = EPercentPercentN(GStringN("x"), GIntN(42)) + val p2: Par = EPercentPercent(GString("x"), GInt(42)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test EMethod" in { + val p1: ParN = EMethodN("nth", EListN(NilN), GIntN(1)) + val p2: Par = EMethod("nth", EList(Seq(Par())), Seq(GInt(1): Par)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Unforgeable names */ + it should "test UPrivate" in { + val p1: ParN = UPrivateN(bytesTest) + val p2: Par = GPrivate(ByteString.copyFrom(bytesTest)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test UDeployId" in { + val p1: ParN = UDeployIdN(bytesTest) + val p2: Par = GDeployId(ByteString.copyFrom(bytesTest)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test UDeployerId" in { + val p1: ParN = UDeployerIdN(bytesTest) + val p2: Par = GDeployerId(ByteString.copyFrom(bytesTest)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Connective */ + it should "test ConnBool" in { + val p1: ParN = ConnBoolN + val p2: Par = Connective(ConnBool(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnInt" in { + val p1: ParN = ConnIntN + val p2: Par = Connective(ConnInt(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnBigInt" in { + val p1: ParN = ConnBigIntN + val p2: Par = Connective(ConnBigInt(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnString" in { + val p1: ParN = ConnStringN + val p2: Par = Connective(ConnString(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnUri" in { + val p1: ParN = ConnUriN + val p2: Par = Connective(ConnUri(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnByteArray" in { + val p1: ParN = ConnByteArrayN + val p2: Par = Connective(ConnByteArray(true)) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnNotN" in { + val p1: ParN = ConnNotN(SendN(NilN, NilN)) + val p2: Par = Connective(ConnNotBody(Send(Par(), Seq(Par())))) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnAndN" in { + val p1: ParN = ConnAndN(WildcardN, SendN(NilN, NilN)) + val p2: Par = Connective( + ConnAndBody(ConnectiveBody(Seq(EVar(Wildcard(WildcardMsg())), Send(Par(), Seq(Par()))))) + ) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnOrN" in { + val p1: ParN = ConnOrN(WildcardN, SendN(NilN, NilN)) + val p2: Par = Connective( + ConnOrBody(ConnectiveBody(Seq(EVar(Wildcard(WildcardMsg())), Send(Par(), Seq(Par()))))) + ) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test ConnVarRefN" in { + val p1: ParN = ConnVarRefN(0, 1) + val p2: Par = Connective(VarRefBody(VarRef(0, 1))) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + /** Other types */ + it should "test Bundle" in { + val p1: ParN = BundleN(NilN, writeFlag = true, readFlag = true) + val p2: Par = Bundle(Par(), writeFlag = true, readFlag = true) + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } + + it should "test SysAuthToken" in { + val p1: ParN = USysAuthTokenN() + val p2: Par = GSysAuthToken() + toProto(p1) should be(p2) + fromProto(p2) should be(p1) + } +} diff --git a/models/src/test/scala/coop/rchain/models/rholangn/CollectionSpec.scala b/models/src/test/scala/coop/rchain/models/rholangn/CollectionSpec.scala new file mode 100644 index 00000000000..74db7817ade --- /dev/null +++ b/models/src/test/scala/coop/rchain/models/rholangn/CollectionSpec.scala @@ -0,0 +1,254 @@ +package coop.rchain.models.rholangn + +import coop.rchain.models.rholangn.CollectionSpecTestData._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks + +object CollectionSpecTestData { + // After sorting, these two elements will be the same + val pproc1: ParProcN = ParProcN(Seq(GIntN(42), NilN)) + val pproc2: ParProcN = ParProcN(Seq(NilN, GIntN(42))) +} + +class EListSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + it should "not preserve ordering" in { + val p1 = EListN(Seq(NilN, EListN(), pproc1)) + val p2 = EListN(Seq(NilN, pproc1, EListN())) + p1 should not be p2 + } + + it should "sort data in elements" in { + val p1 = EListN(pproc1) + val p2 = EListN(pproc2) + p1 should be(p2) + } + + it should "perform append operation" in { + val p1 = EListN() :+ NilN :+ pproc1 :+ EListN() + val p2 = EListN(Seq(NilN, pproc1, EListN())) + p1 should be(p2) + } + + it should "perform prepend operation" in { + val p1 = NilN +: pproc1 +: EListN(EListN()) + val p2 = EListN(Seq(NilN, pproc1, ESetN())) + p1 should be(p2) + } + + it should "perform union operation" in { + val p11 = EListN(Seq(pproc1, EListN())) + val p12 = EListN(Seq(NilN, GIntN(42))) + val p2 = EListN(Seq(pproc1, EListN(), NilN, GIntN(42))) + p11 ++ p12 should be(p2) + } + + it should "perform union with sequence operation" in { + val p11 = EListN(Seq(pproc1, EListN())) + val seq = Seq(NilN, GIntN(42)) + val p2 = EListN(Seq(pproc1, EListN(), NilN, GIntN(42))) + p11 ++ seq should be(p2) + } +} + +class ETupleSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + it should "throw exception during creation tuple with an empty par sequence " in { + try { + ETupleN(Seq()) + } catch { + case ex: Exception => ex shouldBe a[Throwable] + } + } + it should "not preserve ordering" in { + val p1 = ETupleN(Seq(NilN, ETupleN(NilN), pproc1)) + val p2 = ETupleN(Seq(NilN, pproc1, ETupleN(NilN))) + p1 should not be p2 + } + + it should "sort data inside elements" in { + val p1 = ESetN(pproc1) + val p2 = ESetN(pproc2) + p1 should be(p2) + } +} + +class ESetSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + it should "preserve ordering" in { + val p1 = ESetN(Seq(NilN, ESetN(), pproc1)) + val p2 = ESetN(Seq(NilN, pproc2, ESetN())) + p1.sortedPs should be(p2.sortedPs) + p1 should be(p2) + } + + it should "deduplicate its elements where last seen element wins" in { + val p1 = ESetN(Seq(NilN, ESetN(), pproc1, NilN, ESetN(), pproc2)) + val p2 = ESetN(Seq(NilN, ESetN(), pproc1)) + p1 should be(p2) + } + + it should "distinguish different elements" in { + val p1 = ESetN(Seq(GIntN(42), ESetN(), pproc1)) + val p2 = ESetN(Seq(GIntN(43), ESetN(), pproc1)) + p1 should not be p2 + } + + it should "perform append operation" in { + val p1 = ESetN.empty + NilN + pproc1 + ESetN() + pproc2 + val p2 = ESetN(Seq(NilN, pproc1, ESetN())) + p1 should be(p2) + } + + it should "perform delete operation" in { + val p1 = ESetN(Seq(NilN, pproc1, ESetN())) - pproc2 - ESetN() - GIntN(42) + val p2 = ESetN(Seq(NilN)) + p1 should be(p2) + } + + it should "perform contain operation" in { + val p = ESetN(Seq(NilN, pproc1, ESetN())) + p.contains(NilN) should be(true) + p.contains(pproc2) should be(true) + p.contains(GIntN(42)) should be(false) + } + + it should "perform union operation" in { + val p11 = ESetN(Seq(pproc1, ESetN())) + val p12 = ESetN(Seq(NilN, pproc2, GIntN(42))) + val p2 = ESetN(Seq(NilN, pproc1, ESetN(), GIntN(42))) + p11 ++ p12 should be(p2) + } + + it should "perform union operation with sequence" in { + val p11 = ESetN(Seq(pproc1, ESetN())) + val seq = Seq(NilN, pproc2, GIntN(42)) + val p2 = ESetN(Seq(NilN, pproc1, ESetN(), GIntN(42))) + p11 ++ seq should be(p2) + } + + it should "perform difference operation" in { + val p1 = ESetN(Seq(NilN, pproc1, ESetN(), GIntN(42))) + val p2 = ESetN(Seq(pproc1, ESetN(), GIntN(43))) + val pDiff = ESetN(Seq(NilN, GIntN(42))) + p1 -- p2 should be(pDiff) + } + + it should "perform difference operation with sequence" in { + val p1 = ESetN(Seq(NilN, pproc1, ESetN(), GIntN(42))) + val seq = Seq(pproc1, ESetN(), GIntN(43)) + val pDiff = ESetN(Seq(NilN, GIntN(42))) + p1 -- seq should be(pDiff) + } +} + +class EMapSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + it should "preserve ordering" in { + val p1 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + val p2 = EMapN(Seq(pproc2 -> EMapN(), NilN -> GIntN(42))) + p1.sortedPs should be(p2.sortedPs) + p1 should be(p2) + } + + it should "deduplicate its elements where last seen element wins" in { + val p1 = + EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), NilN -> GIntN(43), pproc2 -> NilN)) + val p2 = EMapN(Seq(NilN -> GIntN(43), pproc1 -> NilN)) + p1 should be(p2) + } + + it should "distinguish different elements" in { + val p1 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + val p2 = EMapN(Seq(NilN -> GIntN(43), pproc1 -> EMapN())) + p1 should not be p2 + } + + it should "perform append operation" in { + val p1 = EMapN.empty + + (NilN -> GIntN(42)) + (pproc1 -> GIntN(43)) + (EMapN() -> NilN) + (pproc2 -> EMapN()) + val p2 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), EMapN() -> NilN)) + p1 should be(p2) + } + + it should "perform delete operation" in { + val p1 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), EMapN() -> NilN)) - + pproc2 - EMapN() - GIntN(42) + val p2 = EMapN(Seq(NilN -> GIntN(42))) + p1 should be(p2) + } + + it should "perform union operation" in { + val p11 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + val p12 = EMapN(Seq(GIntN(42) -> GIntN(43), pproc2 -> NilN)) + val p2 = EMapN(Seq(NilN -> GIntN(42), GIntN(42) -> GIntN(43), pproc1 -> NilN)) + p11 ++ p12 should be(p2) + } + + it should "perform union operation with sequence" in { + val p11 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + val seq = Seq(GIntN(42) -> GIntN(43), pproc2 -> NilN) + val p2 = EMapN(Seq(NilN -> GIntN(42), GIntN(42) -> GIntN(43), pproc1 -> NilN)) + p11 ++ seq should be(p2) + } + + it should "perform difference operation" in { + val p1 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), EMapN() -> NilN)) + val p2 = EMapN(Seq(NilN -> GIntN(42), pproc2 -> GIntN(42), EMapN() -> GIntN(42))) + val pDiff = EMapN.empty + p1 -- p2 should be(pDiff) + } + + it should "perform difference operation with sequence" in { + val p1 = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), EMapN() -> NilN)) + val seq = Seq(NilN, pproc2, EMapN()) + val pDiff = EMapN.empty + p1 -- seq should be(pDiff) + } + + it should "perform contain operation" in { + val p = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + p.contains(NilN) should be(true) + p.contains(pproc2) should be(true) + p.contains(GIntN(42)) should be(false) + } + + it should "perform get() operation" in { + val p = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + p.get(NilN) should be(Some(GIntN(42))) + p.get(pproc2) should be(Some(EMapN())) + p.get(GIntN(42)) should be(None) + } + + it should "perform getOrElse() operation" in { + val p = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN())) + p.getOrElse(NilN, GIntN(43)) should be(GIntN(42)) + p.getOrElse(pproc2, GIntN(43)) should be(EMapN()) + p.getOrElse(GIntN(42), GIntN(43)) should be(GIntN(43)) + } + + it should "return keys in right order" in { + val p = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), EMapN() -> NilN)) + val keys1 = p.keys + val keys2 = p.sortedPs.map(_._1) + keys1 should be(keys2) + } + + it should "return values in right order" in { + val p = EMapN(Seq(NilN -> GIntN(42), pproc1 -> EMapN(), EMapN() -> NilN)) + val values1 = p.values + val values2 = p.sortedPs.map(_._2) + values1 should be(values2) + } +} + +class CollectionSortSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + "ESet and EMap should " should "export pars in the same order as ParProc" in { + val pProc = ParProcN(Seq(pproc1, ESetN(), GIntN(42), NilN)) + val set = ESetN(Seq(pproc2, GIntN(42), ESetN(), NilN)) + val map = EMapN(Seq(NilN -> NilN, pproc2 -> NilN, GIntN(42) -> NilN, ESetN() -> NilN)) + + val ps1 = pProc.sortedPs + val ps2 = set.sortedPs + val ps3 = map.keys + + (ps1 == ps2) == (ps1 == ps3) should be(true) + } +} diff --git a/models/src/test/scala/coop/rchain/models/rholangn/ParProcFlattingSpec.scala b/models/src/test/scala/coop/rchain/models/rholangn/ParProcFlattingSpec.scala new file mode 100644 index 00000000000..4a4534e16cc --- /dev/null +++ b/models/src/test/scala/coop/rchain/models/rholangn/ParProcFlattingSpec.scala @@ -0,0 +1,66 @@ +package coop.rchain.models.rholangn + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks + +class ParProcFlattingSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + + it should "test flatting empty data" in { + val p = ParN.makeParProc(Seq()) + val expected = NilN + p should be(expected) + } + + it should "test flatting single Nil data" in { + val p = ParN.makeParProc(Seq(NilN)) + val expected = NilN + p should be(expected) + } + + it should "test flatting single not Nil data" in { + val p = ParN.makeParProc(Seq(GIntN(42))) + val expected = GIntN(42) + p should be(expected) + } + + it should "test flatting multiple data" in { + val p = ParN.makeParProc(Seq(GIntN(42), GIntN(43))) + val expected = ParProcN(Seq(GIntN(42), GIntN(43))) + p should be(expected) + } + + it should "test flatting multiple same data" in { + val p = ParN.makeParProc(Seq(GIntN(42), GIntN(42))) + val expected = ParProcN(Seq(GIntN(42), GIntN(42))) + p should be(expected) + } + + it should "test flatting multiple data with Nil" in { + val p = ParN.makeParProc(Seq(GIntN(42), GIntN(43), NilN)) + val expected = ParProcN(Seq(GIntN(42), GIntN(43))) + p should be(expected) + } + + it should "test flatting 2 data with Nil" in { + val p = ParN.makeParProc(Seq(GIntN(42), NilN)) + val expected = GIntN(42) + p should be(expected) + } + + it should "test flatting nested data" in { + val pProc1 = ParProcN(Seq(GIntN(42), GIntN(43))) + val pProc2 = ParProcN(Seq(GIntN(44), GIntN(45))) + val p = ParN.makeParProc(Seq(pProc1, pProc2)) + val expected = ParProcN(Seq(GIntN(42), GIntN(43), GIntN(44), GIntN(45))) + p should be(expected) + } + + it should "test flatting nested single data" in { + val pProc1 = ParProcN(Seq(GIntN(42))) + val pProc2 = ParProcN(Seq(NilN)) + val p = ParN.makeParProc(Seq(pProc1, pProc2)) + val expected = GIntN(42) + p should be(expected) + } +} diff --git a/models/src/test/scala/coop/rchain/models/rholangn/ParSpec.scala b/models/src/test/scala/coop/rchain/models/rholangn/ParSpec.scala new file mode 100644 index 00000000000..0f20b6b9892 --- /dev/null +++ b/models/src/test/scala/coop/rchain/models/rholangn/ParSpec.scala @@ -0,0 +1,384 @@ +package coop.rchain.models.rholangn + +import coop.rchain.models.rholangn.parmanager.Manager +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks + +class ParSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + + /** Test hashing and serialization for par + * @param p1 Par for testing + * @param p2Opt optional Par (used for testing if necessary to check the correct sorting) + * @return true - if the result of serialization and hashing for both pairs is the same + */ + def simpleCheck(p1: ParN, p2Opt: Option[ParN] = None): Boolean = { + // Serialization and hashing testing + val bytes1 = p1.serialized.value + val recover1 = Manager.protoDeserialize(bytes1) + val res1: Boolean = p1.rhoHash sameElements recover1.rhoHash + + // Testing possibility of calculating the rest of the metadata (without checking correctness) + val _ = p1.connectiveUsed || p1.evalRequired || p1.substituteRequired + + // the correct sorting testing + val res2: Boolean = if (p2Opt.isDefined) { + val p2 = p2Opt.get + val bytes2 = p2.serialized.value + (p1.rhoHash sameElements p2.rhoHash) && + (bytes1 sameElements bytes2) && + (p1.connectiveUsed == p2.connectiveUsed) && + (p1.evalRequired == p2.evalRequired) && + (p1.substituteRequired == p2.substituteRequired) + } else true + + res1 && res2 + } + + val sizeTest: Int = 50 + val bytesTest: Array[Byte] = Array.fill(sizeTest)(42) + val strTest: String = List.fill(sizeTest)("42").mkString + + /** Basic types */ + it should "test Nil" in { + val p = NilN + simpleCheck(p) should be(true) + } + + it should "test ParProc" in { + val p1 = ParProcN(Seq(NilN, ParProcN(Seq(NilN)))) + val p2 = ParProcN(Seq(ParProcN(Seq(NilN)), NilN)) + simpleCheck(p1, Some(p2)) should be(true) + } + + it should "test Send with same data order" in { + val p = SendN(NilN, Seq(NilN, SendN(NilN, NilN)), persistent = true) + simpleCheck(p) should be(true) + } + + it should "test Send with different data order" in { + val p1 = SendN(NilN, Seq(NilN, SendN(NilN, NilN)), persistent = true) + val p2 = SendN(NilN, Seq(SendN(NilN, NilN), NilN), persistent = true) + simpleCheck(p1, Some(p2)) should be(false) + } + + it should "test Receive with same data order" in { + val bind1 = ReceiveBindN(Seq(FreeVarN(41), FreeVarN(42)), NilN, Some(BoundVarN(42)), 2) + val bind2 = ReceiveBindN(Seq(FreeVarN(42), FreeVarN(41)), NilN, Some(BoundVarN(42)), 2) + val p = ReceiveN(Seq(bind1, bind2), NilN, persistent = true, peek = false, 4) + simpleCheck(p) should be(true) + } + + it should "test Receive with different data order" in { + val bind1 = ReceiveBindN(Seq(FreeVarN(41), FreeVarN(42)), NilN, Some(BoundVarN(42)), 2) + val bind2 = ReceiveBindN(Seq(FreeVarN(42), FreeVarN(41)), NilN, Some(BoundVarN(42)), 2) + val p1 = ReceiveN(Seq(bind1, bind2), NilN, persistent = true, peek = false, 4) + val p2 = ReceiveN(Seq(bind2, bind1), NilN, persistent = true, peek = false, 4) + simpleCheck(p1, Some(p2)) should be(true) + } + + it should "test match with same data order" in { + val case1 = MatchCaseN(FreeVarN(41), BoundVarN(42), 1) + val case2 = MatchCaseN(WildcardN, BoundVarN(42)) + val p = MatchN(NilN, Seq(case1, case2)) + simpleCheck(p) should be(true) + } + + it should "test New with different data order" in { + val inj1: Map[String, ParN] = + Map("rho:rchain:deployId" -> NilN, "rho:rchain:deployerId" -> NilN) + val p1 = NewN(1, BoundVarN(0), Seq("rho:io:stdout", "rho:io:stderr"), inj1) + + val inj2: Map[String, ParN] = + Map("rho:rchain:deployerId" -> NilN, "rho:rchain:deployId" -> NilN) + val p2 = NewN(1, BoundVarN(0), Seq("rho:io:stderr", "rho:io:stdout"), inj2) + simpleCheck(p1, Some(p2)) should be(true) + } + + /** Ground types */ + it should "test GBool" in { + val p = GBoolN(true) + simpleCheck(p) should be(true) + } + + it should "test GInt" in { + val p = GIntN(42) + simpleCheck(p) should be(true) + } + + it should "test GBigInt" in { + val p = GBigIntN(BigInt(bytesTest)) + simpleCheck(p) should be(true) + } + + it should "test GString" in { + val p = GStringN(strTest) + simpleCheck(p) should be(true) + } + + it should "test GByteArray" in { + val p = GByteArrayN(bytesTest) + simpleCheck(p) should be(true) + } + + it should "test GUri" in { + val p = GUriN(strTest) + simpleCheck(p) should be(true) + } + + /** Collections */ + it should "test EList with same data order" in { + val p = EListN(Seq(NilN, EListN()), Some(BoundVarN(42))) + simpleCheck(p) should be(true) + } + + it should "test EList with different data order" in { + val p1 = EListN(Seq(NilN, EListN()), Some(BoundVarN(42))) + val p2 = EListN(Seq(EListN(), NilN), Some(BoundVarN(42))) + simpleCheck(p1, Some(p2)) should be(false) + } + + it should "test ETuple with same data order" in { + val p = ETupleN(Seq(NilN, ETupleN(NilN))) + simpleCheck(p) should be(true) + } + + it should "test ETuple with different data order" in { + val p1 = ETupleN(Seq(NilN, ETupleN(NilN))) + val p2 = ETupleN(Seq(ETupleN(NilN), NilN)) + simpleCheck(p1, Some(p2)) should be(false) + } + + it should "test ESet with same data order" in { + val p = ESetN(Seq(NilN, ESetN()), Some(BoundVarN(42))) + simpleCheck(p) should be(true) + } + + it should "test ESet with different data order" in { + val p1 = ESetN(Seq(NilN, ESetN(NilN))) + val p2 = ESetN(Seq(ESetN(NilN), NilN)) + simpleCheck(p1, Some(p2)) should be(true) + } + + it should "test EMap with same data order" in { + val p = EMapN(Seq(NilN -> EMapN(), EMapN() -> NilN), Some(BoundVarN(42))) + simpleCheck(p) should be(true) + } + + it should "test EMap with different data order" in { + val p1 = EMapN(Seq(NilN -> EMapN(), EMapN() -> NilN)) + val p2 = EMapN(Seq(EMapN() -> NilN, NilN -> EMapN())) + simpleCheck(p1, Some(p2)) should be(true) + } + + /** Vars */ + it should "test BoundVar" in { + val p = BoundVarN(42) + simpleCheck(p) should be(true) + } + + it should "test FreeVar" in { + val p = FreeVarN(42) + simpleCheck(p) should be(true) + } + + it should "test Wildcard" in { + val p = WildcardN + simpleCheck(p) should be(true) + } + + /** Operations */ + it should "test ENeg" in { + val p = ENegN(GIntN(42)) + simpleCheck(p) should be(true) + } + + it should "test ENot" in { + val p = ENotN(GBoolN(true)) + simpleCheck(p) should be(true) + } + + it should "test EPlus with same data order" in { + val p = EPlusN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EPlus with different data order" in { + val p1 = EPlusN(GIntN(42), GIntN(43)) + val p2 = EPlusN(GIntN(43), GIntN(42)) + simpleCheck(p1, Some(p2)) should be(false) + } + + it should "test EMinus" in { + val p = EMinusN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EMult" in { + val p = EMultN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EDiv" in { + val p = EDivN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EMod" in { + val p = EModN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test ELt" in { + val p = ELtN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test ELte" in { + val p = ELteN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EGt" in { + val p = EGtN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EGteN" in { + val p = EGteN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EEq with same data order" in { + val p = EEqN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test ENeq" in { + val p = ENeqN(GIntN(42), GIntN(43)) + simpleCheck(p) should be(true) + } + + it should "test EAnd" in { + val p = EAndN(GBoolN(true), GBoolN(false)) + simpleCheck(p) should be(true) + } + + it should "test EShortAnd" in { + val p = EShortAndN(GBoolN(true), GBoolN(false)) + simpleCheck(p) should be(true) + } + + it should "test EOr" in { + val p = EOrN(GBoolN(true), GBoolN(false)) + simpleCheck(p) should be(true) + } + + it should "test EShortOr" in { + val p = EShortOrN(GBoolN(true), GBoolN(false)) + simpleCheck(p) should be(true) + } + + it should "test EPlusPlus" in { + val p = EPlusPlusN(GStringN("42"), GStringN("43")) + simpleCheck(p) should be(true) + } + + it should "test EMinusMinus" in { + val p = EMinusMinusN(EListN(NilN), EListN(NilN)) + simpleCheck(p) should be(true) + } + + it should "test EMatches" in { + val p = EMatchesN(GIntN(42), GIntN(42)) + simpleCheck(p) should be(true) + } + + it should "test EPercentPercent" in { + val p = EPercentPercentN(GStringN("x"), GIntN(42)) + simpleCheck(p) should be(true) + } + + it should "test EMethod" in { + val p = EMethodN("nth", EListN(NilN), GIntN(1)) + simpleCheck(p) should be(true) + } + + /** Unforgeable names */ + it should "test UPrivate" in { + val p = UPrivateN(bytesTest) + simpleCheck(p) should be(true) + } + + it should "test UDeployId" in { + val p = UDeployIdN(bytesTest) + simpleCheck(p) should be(true) + } + + it should "test UDeployerId" in { + val p = UDeployerIdN(bytesTest) + simpleCheck(p) should be(true) + } + + /** Connective */ + it should "test ConnBool" in { + val p = ConnBoolN + simpleCheck(p) should be(true) + } + + it should "test ConnInt" in { + val p = ConnIntN + simpleCheck(p) should be(true) + } + + it should "test ConnBigInt" in { + val p = ConnBigIntN + simpleCheck(p) should be(true) + } + + it should "test ConnString" in { + val p = ConnStringN + simpleCheck(p) should be(true) + } + + it should "test ConnUri" in { + val p = ConnUriN + simpleCheck(p) should be(true) + } + + it should "test ConnByteArray" in { + val p = ConnByteArrayN + simpleCheck(p) should be(true) + } + + it should "test ConnNotN" in { + val p = ConnNotN(SendN(NilN, NilN)) + simpleCheck(p) should be(true) + } + + it should "test ConnAndN" in { + val p = ConnAndN(WildcardN, SendN(NilN, NilN)) + simpleCheck(p) should be(true) + } + + it should "test ConnOrN" in { + val p = ConnOrN(WildcardN, SendN(NilN, NilN)) + simpleCheck(p) should be(true) + } + + it should "test ConnVarRefN" in { + val p = ConnVarRefN(0, 1) + simpleCheck(p) should be(true) + } + + /** Other types */ + it should "test Bundle" in { + val p = BundleN(NilN, writeFlag = true, readFlag = true) + simpleCheck(p) should be(true) + } + + it should "test SysAuthToken" in { + val p = USysAuthTokenN() + simpleCheck(p) should be(true) + } +} diff --git a/models/src/test/scala/coop/rchain/models/rholangn/SortingSpec.scala b/models/src/test/scala/coop/rchain/models/rholangn/SortingSpec.scala new file mode 100644 index 00000000000..ce2a5ef9e59 --- /dev/null +++ b/models/src/test/scala/coop/rchain/models/rholangn/SortingSpec.scala @@ -0,0 +1,73 @@ +package coop.rchain.models.rholangn + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks + +class SortingSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + @SuppressWarnings(Array("org.wartremover.warts.Return", "org.wartremover.warts.Var")) + def compareHashes(a: Array[Byte], b: Array[Byte]): Int = + if (a eq null) { + if (b eq null) 0 + else -1 + } else if (b eq null) 1 + else { + val L = math.min(a.length, b.length) + var i = 0 + while (i < L) { + if (a(i) < b(i)) return -1 + else if (b(i) < a(i)) return 1 + i += 1 + } + if (L < b.length) -1 + else if (L < a.length) 1 + else 0 + } + + it should "test sorting for ParProc" in { + val unsorted: Seq[GIntN] = Seq(GIntN(2), GIntN(5), GIntN(1), GIntN(3), GIntN(4), GIntN(2)) + val sorted = ParProcN(unsorted).sortedPs + val expected: Seq[GIntN] = unsorted.sortWith((a, b) => compareHashes(a.rhoHash, b.rhoHash) < 0) + sorted should be(expected) + } + + it should "test sorting for ESet" in { + val unsorted: Seq[GIntN] = Seq(GIntN(2), GIntN(5), GIntN(1), GIntN(3), GIntN(4)) + val sorted = ESetN(unsorted).sortedPs + val expected: Seq[GIntN] = unsorted.sortWith((a, b) => compareHashes(a.rhoHash, b.rhoHash) < 0) + sorted should be(expected.distinct) + } + + it should "test sorting for EMap>" in { + val unsorted: Seq[GIntN] = Seq(GIntN(2), GIntN(5), GIntN(1), GIntN(3), GIntN(4)) + val values = Seq.range(1, unsorted.length + 1).map(x => GIntN(x.toLong)) + val pars = unsorted zip values + val sorted = EMapN(pars).sortedPs + val expectedPars = pars.sortWith((a, b) => compareHashes(a._1.rhoHash, b._1.rhoHash) < 0) + sorted should be(expectedPars) + } + + it should "test sorting for receive binds" in { + val bind1 = ReceiveBindN(Seq(FreeVarN(41)), NilN, Some(BoundVarN(42)), 1) + val bind2 = ReceiveBindN(Seq(FreeVarN(42)), NilN, Some(BoundVarN(42)), 1) + val bind3 = ReceiveBindN(Seq(FreeVarN(43)), NilN, Some(BoundVarN(42)), 1) + val bind4 = ReceiveBindN(Seq(FreeVarN(44)), NilN, Some(BoundVarN(42)), 1) + val bind5 = ReceiveBindN(Seq(FreeVarN(45)), NilN, Some(BoundVarN(42)), 1) + val unsortedBinds = Seq(bind1, bind2, bind3, bind4, bind5) + val sorted = parmanager.Manager.sortBinds(unsortedBinds) + val expected = unsortedBinds.sortWith((a, b) => compareHashes(a.rhoHash, b.rhoHash) < 0) + sorted should be(expected) + + val bind1WithT = (bind1, 1) + val bind2WithT = (bind2, 2) + val bind3WithT = (bind3, 3) + val bind4WithT = (bind4, 4) + val bind5WithT = (bind5, 5) + val unsortedWithT = Seq(bind1WithT, bind2WithT, bind3WithT, bind4WithT, bind5WithT) + val sortedWithT = parmanager.Manager.sortBindsWithT(unsortedWithT) + val expectedWithT = + unsortedWithT.sortWith((a, b) => compareHashes(a._1.rhoHash, b._1.rhoHash) < 0) + sortedWithT should be(expectedWithT) + } + +} diff --git a/models/src/test/scala/coop/rchain/models/rholangn/StackSafetySpec.scala b/models/src/test/scala/coop/rchain/models/rholangn/StackSafetySpec.scala new file mode 100644 index 00000000000..456cb781425 --- /dev/null +++ b/models/src/test/scala/coop/rchain/models/rholangn/StackSafetySpec.scala @@ -0,0 +1,80 @@ +package coop.rchain.models.rholangn + +import cats.Eval +import coop.rchain.catscontrib.effect.implicits.sEval +import coop.rchain.models.rholangn.parmanager.{Manager, Serialization} +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import scala.annotation.tailrec +import scala.collection.immutable.Seq + +class StackSafetySpec extends AnyFlatSpec with Matchers { + + def findMaxRecursionDepth(): Int = { + def count(i: Int): Int = + try { + count(i + 1) //apparently, the try-catch is enough for tailrec to not work. Lucky! + } catch { + case _: StackOverflowError => i + } + println("About to find max recursion depth for this test run") + val maxDepth = count(0) + println(s"Calculated max recursion depth is $maxDepth") + // Because of OOM errors on CI depth recursion is limited + val maxDepthLimited = Math.min(200, maxDepth) + println(s"Used recursion depth is limited to $maxDepthLimited") + maxDepthLimited + } + + "Rholang par" should "not blow up on a huge structure with List" in { + import coop.rchain.models.Expr.ExprInstance.GInt + import coop.rchain.models._ + import coop.rchain.models.rholang.implicits._ + import coop.rchain.models.serialization.implicits._ + import coop.rchain.shared.Serialize + + @tailrec + def hugePar(n: Int, par: Par = Par(exprs = Seq(GInt(0)))): Par = + if (n == 0) par + else hugePar(n - 1, Par(exprs = Seq(EList(Seq(par))))) + + val maxRecursionDepth: Int = findMaxRecursionDepth() + val par = hugePar(maxRecursionDepth) + val anotherPar = hugePar(maxRecursionDepth) + + noException shouldBe thrownBy { + ProtoM.serializedSize(par).value + + val encoded = Serialize[Par].encode(par) + Serialize[Par].decode(encoded) + + HashM[Par].hash[Eval](par).value + par.hashCode() + + EqualM[Par].equal[Eval](par, anotherPar).value + par == anotherPar + } + } + + "RholangN par" should "not blow up on a huge structure with List" in { + + @tailrec + def hugePar(n: Int, par: ParN = GIntN(0)): ParN = + if (n == 0) par + else hugePar(n - 1, EListN(par)) + + val maxRecursionDepth: Int = findMaxRecursionDepth() + val par = hugePar(maxRecursionDepth) + val anotherPar = hugePar(maxRecursionDepth) + noException shouldBe thrownBy { + val sData = par.serialized.value + val decoded = Manager.protoDeserialize(sData) + assert(par == decoded) + assert(par.rhoHash sameElements anotherPar.rhoHash) + assert(par.serializedSize.value == anotherPar.serializedSize.value) + assert(par == anotherPar) + par == anotherPar + } + } +} diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/PrettyPrinter.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/PrettyPrinter.scala index 5a2410f22ea..0fc3f66199e 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/PrettyPrinter.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/PrettyPrinter.scala @@ -18,6 +18,8 @@ import coop.rchain.models.GUnforgeable.UnfInstance.{ } import coop.rchain.shared.{Base16, Printer} import cats.Eval +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ object PrettyPrinter { def apply(): PrettyPrinter = PrettyPrinter(0, 0) @@ -50,6 +52,7 @@ final case class PrettyPrinter( def buildString(e: Expr): String = buildStringM(e).value.cap() def buildString(v: Var): String = buildStringM(v).value.cap() def buildString(m: GeneratedMessage): String = buildStringM(m).value.cap() + def buildString(p: ParN): String = buildStringM(toProto(p)).value.cap() def buildChannelString(p: Par): String = buildChannelStringM(p).value.cap() @SuppressWarnings(Array("org.wartremover.warts.Throw")) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/Compiler.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/Compiler.scala index 18ae07b665e..9b012b0a068 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/Compiler.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/Compiler.scala @@ -4,8 +4,9 @@ import cats.effect.Sync import cats.syntax.all._ import coop.rchain.models.Connective.ConnectiveInstance import coop.rchain.models.Par -import coop.rchain.models.rholang.implicits.VectorPar import coop.rchain.models.rholang.sorter.Sortable +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn.Proc import coop.rchain.rholang.ast.rholang_mercury.{parser, Yylex} import coop.rchain.rholang.interpreter.errors._ @@ -79,7 +80,7 @@ object Compiler { ProcNormalizeMatcher .normalizeMatch[F]( term, - ProcVisitInputs(VectorPar(), BoundMapChain.empty, FreeMap.empty) + ProcVisitInputs(NilN, BoundMapChain.empty, FreeMap.empty) ) .flatMap { normalizedTerm => if (normalizedTerm.freeMap.count > 0) { @@ -91,12 +92,12 @@ object Compiler { TopLevelFreeVariablesNotAllowedError(topLevelFreeList.mkString(", ")) ) } else if (normalizedTerm.freeMap.connectives.nonEmpty) { - def connectiveInstanceToString(conn: ConnectiveInstance): String = - if (conn.isConnAndBody) "/\\ (conjunction)" - else if (conn.isConnOrBody) "\\/ (disjunction)" - else if (conn.isConnNotBody) "~ (negation)" - else conn.toString - + def connectiveInstanceToString(conn: ConnectiveN): String = conn match { + case _: ConnAndN => "/\\ (conjunction)" + case _: ConnOrN => "\\/ (disjunction)" + case _: ConnNotN => "~ (negation)" + case x => x.toString + } val connectives = normalizedTerm.freeMap.connectives .map { case (connType, sourcePosition) => @@ -112,7 +113,7 @@ object Compiler { TopLevelWildcardsNotAllowedError(topLevelWildcardList.mkString(", ")) ) } - } else normalizedTerm.par.pure[F] + } else toProto(normalizedTerm.par).pure[F] } /** diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/FreeMap.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/FreeMap.scala index d6d01e89180..3c3e95b3a95 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/FreeMap.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/FreeMap.scala @@ -1,6 +1,6 @@ package coop.rchain.rholang.interpreter.compiler -import coop.rchain.models.Connective.ConnectiveInstance +import coop.rchain.models.rholangn._ /** * A structure to keep track of free variables using de Bruijn levels (0 based). @@ -15,7 +15,7 @@ final case class FreeMap[T]( nextLevel: Int, levelBindings: Map[String, FreeContext[T]], wildcards: List[SourcePosition], - connectives: List[(ConnectiveInstance, SourcePosition)] + connectives: List[(ConnectiveN, SourcePosition)] ) { def get(name: String): Option[FreeContext[T]] = levelBindings.get(name) @@ -62,7 +62,7 @@ final case class FreeMap[T]( FreeMap(nextLevel, levelBindings, wildcards :+ sourcePosition, connectives) def addConnective( - connective: ConnectiveInstance, + connective: ConnectiveN, sourcePosition: SourcePosition ): FreeMap[T] = FreeMap( diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/ReceiveBindsSortMatcher.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/ReceiveBindsSortMatcher.scala index dae46c49d56..9a25141cf2d 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/ReceiveBindsSortMatcher.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/ReceiveBindsSortMatcher.scala @@ -1,11 +1,10 @@ package coop.rchain.rholang.interpreter.compiler import cats.effect.Sync -import coop.rchain.models.rholang.sorter.ReceiveSortMatcher.sortBind -import coop.rchain.models.{Par, ReceiveBind, Var} import cats.syntax.all._ +import coop.rchain.models.rholang.sorter.ReceiveSortMatcher.sortBind import coop.rchain.models.rholang.sorter._ -import coop.rchain.models.rholang.implicits._ +import coop.rchain.models.{Par, ReceiveBind, Var} object ReceiveBindsSortMatcher { // Used during normalize to presort the binds. diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalize.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalize.scala index 3643d797e79..8dcdb9c4863 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalize.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalize.scala @@ -3,10 +3,10 @@ package coop.rchain.rholang.interpreter.compiler import cats.effect.Sync import cats.syntax.all._ import coop.rchain.models._ -import coop.rchain.models.rholang.implicits._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ -import coop.rchain.rholang.interpreter.errors._ import coop.rchain.rholang.interpreter.compiler.normalizer.processes._ +import coop.rchain.rholang.interpreter.errors._ sealed trait VarSort case object ProcSort extends VarSort @@ -20,14 +20,16 @@ object ProcNormalizeMatcher { def normalizeMatch[F[_]: Sync](p: Proc, input: ProcVisitInputs)( implicit env: Map[String, Par] ): F[ProcVisitOutputs] = Sync[F].defer { - def unaryExp[T](subProc: Proc, input: ProcVisitInputs, constructor: Par => T)( - implicit toExprInstance: T => Expr + def unaryExp( + subProc: Proc, + input: ProcVisitInputs, + constructor: ParN => ExprN ): F[ProcVisitOutputs] = - normalizeMatch[F](subProc, input.copy(par = VectorPar())) + normalizeMatch[F](subProc, input.copy(par = NilN)) .map( subResult => ProcVisitOutputs( - input.par.prepend(constructor(subResult.par), input.boundMapChain.depth), + ParN.combine(input.par, constructor(subResult.par)), subResult.freeMap ) ) @@ -36,16 +38,16 @@ object ProcNormalizeMatcher { subProcLeft: Proc, subProcRight: Proc, input: ProcVisitInputs, - constructor: (Par, Par) => T - )(implicit toExprInstance: T => Expr): F[ProcVisitOutputs] = + constructor: (ParN, ParN) => ExprN + ): F[ProcVisitOutputs] = for { - leftResult <- normalizeMatch[F](subProcLeft, input.copy(par = VectorPar())) + leftResult <- normalizeMatch[F](subProcLeft, input.copy(par = NilN)) rightResult <- normalizeMatch[F]( subProcRight, - input.copy(par = VectorPar(), freeMap = leftResult.freeMap) + input.copy(par = NilN, freeMap = leftResult.freeMap) ) } yield ProcVisitOutputs( - input.par.prepend(constructor(leftResult.par, rightResult.par), input.boundMapChain.depth), + ParN.combine(input.par, constructor(leftResult.par, rightResult.par)), rightResult.freeMap ) @@ -82,32 +84,31 @@ object ProcNormalizeMatcher { case p: PMethod => PMethodNormalizer.normalize(p, input) - case p: PNot => unaryExp(p.proc_, input, ENot.apply) - case p: PNeg => unaryExp(p.proc_, input, ENeg.apply) - - case p: PMult => binaryExp(p.proc_1, p.proc_2, input, EMult.apply) - case p: PDiv => binaryExp(p.proc_1, p.proc_2, input, EDiv.apply) - case p: PMod => binaryExp(p.proc_1, p.proc_2, input, EMod.apply) - case p: PPercentPercent => binaryExp(p.proc_1, p.proc_2, input, EPercentPercent.apply) - case p: PAdd => binaryExp(p.proc_1, p.proc_2, input, EPlus.apply) - case p: PMinus => binaryExp(p.proc_1, p.proc_2, input, EMinus.apply) - case p: PPlusPlus => binaryExp(p.proc_1, p.proc_2, input, EPlusPlus.apply) - case p: PMinusMinus => binaryExp(p.proc_1, p.proc_2, input, EMinusMinus.apply) - - case p: PLt => binaryExp(p.proc_1, p.proc_2, input, ELt.apply) - case p: PLte => binaryExp(p.proc_1, p.proc_2, input, ELte.apply) - case p: PGt => binaryExp(p.proc_1, p.proc_2, input, EGt.apply) - case p: PGte => binaryExp(p.proc_1, p.proc_2, input, EGte.apply) - - case p: PEq => binaryExp(p.proc_1, p.proc_2, input, EEq.apply) - case p: PNeq => binaryExp(p.proc_1, p.proc_2, input, ENeq.apply) - - case p: PAnd => binaryExp(p.proc_1, p.proc_2, input, EAnd.apply) - case p: POr => binaryExp(p.proc_1, p.proc_2, input, EOr.apply) - case p: PShortAnd => binaryExp(p.proc_1, p.proc_2, input, EShortAnd.apply) - case p: PShortOr => binaryExp(p.proc_1, p.proc_2, input, EShortOr.apply) - case p: PMatches => - PMatchesNormalizer.normalize(p, input) + case p: PNot => unaryExp(p.proc_, input, ENotN.apply) + case p: PNeg => unaryExp(p.proc_, input, ENegN.apply) + + case p: PMult => binaryExp(p.proc_1, p.proc_2, input, EMultN.apply) + case p: PDiv => binaryExp(p.proc_1, p.proc_2, input, EDivN.apply) + case p: PMod => binaryExp(p.proc_1, p.proc_2, input, EModN.apply) + case p: PPercentPercent => binaryExp(p.proc_1, p.proc_2, input, EPercentPercentN.apply) + case p: PAdd => binaryExp(p.proc_1, p.proc_2, input, EPlusN.apply) + case p: PMinus => binaryExp(p.proc_1, p.proc_2, input, EMinusN.apply) + case p: PPlusPlus => binaryExp(p.proc_1, p.proc_2, input, EPlusPlusN.apply) + case p: PMinusMinus => binaryExp(p.proc_1, p.proc_2, input, EMinusMinusN.apply) + + case p: PLt => binaryExp(p.proc_1, p.proc_2, input, ELtN.apply) + case p: PLte => binaryExp(p.proc_1, p.proc_2, input, ELteN.apply) + case p: PGt => binaryExp(p.proc_1, p.proc_2, input, EGtN.apply) + case p: PGte => binaryExp(p.proc_1, p.proc_2, input, EGteN.apply) + + case p: PEq => binaryExp(p.proc_1, p.proc_2, input, EEqN.apply) + case p: PNeq => binaryExp(p.proc_1, p.proc_2, input, ENeqN.apply) + + case p: PAnd => binaryExp(p.proc_1, p.proc_2, input, EAndN.apply) + case p: POr => binaryExp(p.proc_1, p.proc_2, input, EOrN.apply) + case p: PShortAnd => binaryExp(p.proc_1, p.proc_2, input, EShortAndN.apply) + case p: PShortOr => binaryExp(p.proc_1, p.proc_2, input, EShortOrN.apply) + case p: PMatches => PMatchesNormalizer.normalize(p, input) case p: PExprs => normalizeMatch[F](p.proc_, input) @@ -141,12 +142,12 @@ object ProcNormalizeMatcher { case p: PIf => PIfNormalizer - .normalize(p.proc_1, p.proc_2, new PNil(), input.copy(par = VectorPar())) - .map(n => n.copy(par = n.par ++ input.par)) + .normalize(p.proc_1, p.proc_2, new PNil(), input.copy(par = NilN)) + .map(n => n.copy(par = ParN.combine(n.par, input.par))) case p: PIfElse => PIfNormalizer - .normalize(p.proc_1, p.proc_2, p.proc_3, input.copy(par = VectorPar())) - .map(n => n.copy(par = n.par ++ input.par)) + .normalize(p.proc_1, p.proc_2, p.proc_3, input.copy(par = NilN)) + .map(n => n.copy(par = ParN.combine(n.par, input.par))) case _ => Sync[F].raiseError( @@ -165,18 +166,18 @@ object ProcNormalizeMatcher { * @param knownFree */ final case class ProcVisitInputs( - par: Par, + par: ParN, boundMapChain: BoundMapChain[VarSort], freeMap: FreeMap[VarSort] ) // Returns the update Par and an updated map of free variables. -final case class ProcVisitOutputs(par: Par, freeMap: FreeMap[VarSort]) +final case class ProcVisitOutputs(par: ParN, freeMap: FreeMap[VarSort]) final case class NameVisitInputs(boundMapChain: BoundMapChain[VarSort], freeMap: FreeMap[VarSort]) -final case class NameVisitOutputs(par: Par, freeMap: FreeMap[VarSort]) +final case class NameVisitOutputs(par: ParN, freeMap: FreeMap[VarSort]) final case class CollectVisitInputs( boundMapChain: BoundMapChain[VarSort], freeMap: FreeMap[VarSort] ) -final case class CollectVisitOutputs(expr: Expr, freeMap: FreeMap[VarSort]) +final case class CollectVisitOutputs(expr: ExprN, freeMap: FreeMap[VarSort]) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolNormalizeMatcher.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolNormalizeMatcher.scala index f3e115d251b..840b5c36309 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolNormalizeMatcher.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolNormalizeMatcher.scala @@ -1,12 +1,13 @@ package coop.rchain.rholang.interpreter.compiler.normalizer import coop.rchain.models.Expr.ExprInstance.GBool +import coop.rchain.models.rholangn.GBoolN import coop.rchain.rholang.ast.rholang_mercury.Absyn.{BoolFalse, BoolLiteral, BoolTrue} object BoolNormalizeMatcher { - def normalizeMatch(b: BoolLiteral): GBool = + def normalizeMatch(b: BoolLiteral): GBoolN = b match { - case _: BoolTrue => GBool(true) - case _: BoolFalse => GBool(false) + case _: BoolTrue => GBoolN(true) + case _: BoolFalse => GBoolN(false) } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectionNormalizeMatcher.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectionNormalizeMatcher.scala index 5dd54944bfd..bb9099c72d2 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectionNormalizeMatcher.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectionNormalizeMatcher.scala @@ -2,52 +2,36 @@ package coop.rchain.rholang.interpreter.compiler.normalizer import cats.effect.Sync import cats.syntax.all._ -import coop.rchain.models.rholang.implicits._ -import coop.rchain.models.{ - AlwaysEqual, - EList, - ETuple, - Expr, - HasLocallyFree, - Par, - ParMap, - ParSet, - Var -} +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ +import coop.rchain.models.{Par, Var} import coop.rchain.rholang.ast.rholang_mercury.Absyn.{KeyValuePair => AbsynKeyValuePair, _} import coop.rchain.rholang.interpreter.compiler._ -import cats.Eval import scala.jdk.CollectionConverters._ -import scala.collection.immutable.{BitSet, Vector} object CollectionNormalizeMatcher { def normalizeMatch[F[_]: Sync](c: Collection, input: CollectVisitInputs)( implicit env: Map[String, Par] ): F[CollectVisitOutputs] = { - def foldMatch[T]( + def foldMatch( knownFree: FreeMap[VarSort], listproc: List[Proc], - constructor: (Seq[Par], AlwaysEqual[BitSet], Boolean) => T - )(implicit toExpr: T => Expr): F[CollectVisitOutputs] = { - val init = (Vector[Par](), knownFree, BitSet(), false) + constructor: Seq[ParN] => ExprN + ): F[CollectVisitOutputs] = { + val init = (Vector[ParN](), knownFree) listproc .foldM(init) { (acc, proc) => ProcNormalizeMatcher - .normalizeMatch[F](proc, ProcVisitInputs(VectorPar(), input.boundMapChain, acc._2)) + .normalizeMatch[F](proc, ProcVisitInputs(NilN, input.boundMapChain, acc._2)) .map { result => - ( - result.par +: acc._1, - result.freeMap, - acc._3 | result.par.locallyFree, - acc._4 || result.par.connectiveUsed - ) + (result.par +: acc._1, result.freeMap) } } .map { - case (ps, resultKnownFree, locallyFree, connectiveUsed) => + case (ps, resultKnownFree) => CollectVisitOutputs( - constructor(ps.reverse, locallyFree, connectiveUsed), + constructor(ps.reverse), resultKnownFree ) } @@ -58,7 +42,7 @@ object CollectionNormalizeMatcher { remainder: Option[Var], listProc: List[AbsynKeyValuePair] ): F[CollectVisitOutputs] = { - val init = (Vector[(Par, Par)](), knownFree, BitSet(), false) + val init = (Seq[(ParN, ParN)](), knownFree) listProc .foldM(init) { (acc, e) => e match { @@ -66,34 +50,26 @@ object CollectionNormalizeMatcher { for { keyResult <- ProcNormalizeMatcher.normalizeMatch[F]( e.proc_1, - ProcVisitInputs(VectorPar(), input.boundMapChain, acc._2) + ProcVisitInputs(NilN, input.boundMapChain, acc._2) ) valResult <- ProcNormalizeMatcher.normalizeMatch[F]( e.proc_2, - ProcVisitInputs(VectorPar(), input.boundMapChain, keyResult.freeMap) + ProcVisitInputs( + NilN, + input.boundMapChain, + keyResult.freeMap + ) ) } yield ( - Vector((keyResult.par, valResult.par)) ++ acc._1, - valResult.freeMap, - acc._3 | keyResult.par.locallyFree | valResult.par.locallyFree, - acc._4 || keyResult.par.connectiveUsed || valResult.par.connectiveUsed + Seq((keyResult.par, valResult.par)) ++ acc._1, + valResult.freeMap ) - } } .map { folded => - val resultKnownFree = folded._2 - val remainderConnectiveUsed = remainder.exists(HasLocallyFree[Var].connectiveUsed(_)) - val remainderLocallyFree = - remainder.map(HasLocallyFree[Var].locallyFree(_, depth = 0)).getOrElse(BitSet()) - + val resultKnownFree = folded._2 CollectVisitOutputs( - ParMap( - seq = folded._1.reverse, - connectiveUsed = folded._4 || remainderConnectiveUsed, - locallyFree = folded._3 | remainderLocallyFree, - remainder = remainder - ), + EMapN(folded._1.reverse, remainder.map(fromProtoVarOpt)), resultKnownFree ) } @@ -105,15 +81,8 @@ object CollectionNormalizeMatcher { .normalizeMatchProc[F](cl.procremainder_, input.freeMap) .flatMap { case (optionalRemainder, knownFree) => - val constructor: Option[Var] => (Seq[Par], AlwaysEqual[BitSet], Boolean) => EList = - optionalRemainder => - (ps, lf, cu) => { - val tmpEList = EList(ps, lf, cu, optionalRemainder) - tmpEList.withConnectiveUsed( - tmpEList.connectiveUsed || optionalRemainder.isDefined - ) - } - + val constructor: Option[VarN] => Seq[ParN] => ExprN = + optionalRemainder => ps => EListN(ps, optionalRemainder) foldMatch(knownFree, cl.listproc_.asScala.toList, constructor(optionalRemainder)) } @@ -122,23 +91,15 @@ object CollectionNormalizeMatcher { case ts: TupleSingle => Seq(ts.proc_) case tm: TupleMultiple => Seq(tm.proc_) ++ tm.listproc_.asScala.toList } - foldMatch(input.freeMap, ps.toList, ETuple.apply) + foldMatch(input.freeMap, ps.toList, ETupleN.apply) case cs: CollectSet => RemainderNormalizeMatcher .normalizeMatchProc[F](cs.procremainder_, input.freeMap) .flatMap { case (optionalRemainder, knownFree) => - val constructor: Option[Var] => (Seq[Par], AlwaysEqual[BitSet], Boolean) => ParSet = - optionalRemainder => - (pars, locallyFree, connectiveUsed) => { - val tmpParSet = - ParSet(pars, connectiveUsed, Eval.later(locallyFree.get), optionalRemainder) - tmpParSet.copy( - connectiveUsed = tmpParSet.connectiveUsed || optionalRemainder.isDefined - ) - } - + val constructor: Option[VarN] => Seq[ParN] => ExprN = + optionalRemainder => pars => ESetN(pars, optionalRemainder) foldMatch(knownFree, cs.listproc_.asScala.toList, constructor(optionalRemainder)) } @@ -147,7 +108,11 @@ object CollectionNormalizeMatcher { .normalizeMatchProc[F](cm.procremainder_, input.freeMap) .flatMap { case (optionalRemainder, knownFree) => - foldMatchMap(knownFree, optionalRemainder, cm.listkeyvaluepair_.asScala.toList) + foldMatchMap( + knownFree, + optionalRemainder.map(toProtoVarOpt), + cm.listkeyvaluepair_.asScala.toList + ) } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundNormalizeMatcher.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundNormalizeMatcher.scala index 181e339addd..03e4f547ad6 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundNormalizeMatcher.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundNormalizeMatcher.scala @@ -2,28 +2,31 @@ package coop.rchain.rholang.interpreter.compiler.normalizer import cats.effect.Sync import cats.syntax.all._ -import coop.rchain.models.Expr -import coop.rchain.models.Expr.ExprInstance.{GBigInt, GInt, GString, GUri} +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ import coop.rchain.rholang.interpreter.errors.NormalizerError object GroundNormalizeMatcher { - def normalizeMatch[F[_]: Sync](g: Ground): F[Expr] = - g match { - case gb: GroundBool => Expr(BoolNormalizeMatcher.normalizeMatch(gb.boolliteral_)).pure[F] + def normalizeMatch[F[_]: Sync](g: Ground): F[ExprN] = { + val ground: F[ExprN] = g match { + case gb: GroundBool => + Sync[F].pure(BoolNormalizeMatcher.normalizeMatch(gb.boolliteral_)) case gi: GroundInt => Sync[F] .delay(gi.longliteral_.toLong) .adaptError { case e: NumberFormatException => NormalizerError(e.getMessage) } - .map(long => Expr(GInt(long))) + .map(long => GIntN(long)) case gbi: GroundBigInt => Sync[F] .delay(BigInt(gbi.longliteral_)) .adaptError { case e: NumberFormatException => NormalizerError(e.getMessage) } - .map(bigInt => Expr(GBigInt(bigInt))) - case gs: GroundString => Expr(GString(stripString(gs.stringliteral_))).pure[F] - case gu: GroundUri => Expr(GUri(stripUri(gu.uriliteral_))).pure[F] + .map(bigInt => GBigIntN(bigInt)) + case gs: GroundString => Sync[F].pure(GStringN(stripString(gs.stringliteral_))) + case gu: GroundUri => Sync[F].pure(GUriN(stripUri(gu.uriliteral_))) } + ground + } // This is necessary to remove the backticks. We don't use a regular // expression because they're always there. def stripUri(raw: String): String = { diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameNormalizeMatcher.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameNormalizeMatcher.scala index 8825ab5ff34..dff8a6cb4ac 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameNormalizeMatcher.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameNormalizeMatcher.scala @@ -2,21 +2,11 @@ package coop.rchain.rholang.interpreter.compiler.normalizer import cats.effect.Sync import cats.syntax.all._ -import coop.rchain.models.Var.VarInstance.{BoundVar, FreeVar, Wildcard} -import coop.rchain.models.rholang.implicits.{VectorPar, _} -import coop.rchain.models.{EVar, Par, Var} +import coop.rchain.models.Par +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn.{Name, NameQuote, NameVar, NameWildcard} -import coop.rchain.rholang.interpreter.compiler.{ - BoundContext, - FreeContext, - NameSort, - NameVisitInputs, - NameVisitOutputs, - ProcNormalizeMatcher, - ProcSort, - ProcVisitInputs, - SourcePosition -} +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.errors.{ UnexpectedNameContext, UnexpectedReuseOfNameContextFree @@ -30,11 +20,11 @@ object NameNormalizeMatcher { case wc: NameWildcard => val wildcardBindResult = input.freeMap.addWildcard(SourcePosition(wc.line_num, wc.col_num)) - NameVisitOutputs(EVar(Wildcard(Var.WildcardMsg())), wildcardBindResult).pure[F] + NameVisitOutputs(WildcardN, wildcardBindResult).pure[F] case n: NameVar => input.boundMapChain.get(n.var_) match { case Some(BoundContext(level, NameSort, _)) => { - NameVisitOutputs(EVar(BoundVar(level)), input.freeMap).pure[F] + NameVisitOutputs(BoundVarN(level), input.freeMap).pure[F] } case Some(BoundContext(_, ProcSort, sourcePosition)) => { Sync[F].raiseError( @@ -46,7 +36,8 @@ object NameNormalizeMatcher { case None => val newBindingsPair = input.freeMap.put((n.var_, NameSort, SourcePosition(n.line_num, n.col_num))) - NameVisitOutputs(EVar(FreeVar(input.freeMap.nextLevel)), newBindingsPair).pure[F] + NameVisitOutputs(FreeVarN(input.freeMap.nextLevel), newBindingsPair) + .pure[F] case Some(FreeContext(_, _, sourcePosition)) => Sync[F].raiseError( UnexpectedReuseOfNameContextFree( @@ -59,16 +50,13 @@ object NameNormalizeMatcher { } } - case n: NameQuote => { + case n: NameQuote => ProcNormalizeMatcher - .normalizeMatch[F]( - n.proc_, - ProcVisitInputs(VectorPar(), input.boundMapChain, input.freeMap) - ) + .normalizeMatch[F](n.proc_, ProcVisitInputs(NilN, input.boundMapChain, input.freeMap)) .map( procVisitResult => NameVisitOutputs(procVisitResult.par, procVisitResult.freeMap) ) - } + } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/RemainderNormalizeMatcher.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/RemainderNormalizeMatcher.scala index da323cd1b7b..711f62269a5 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/RemainderNormalizeMatcher.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/RemainderNormalizeMatcher.scala @@ -3,36 +3,22 @@ package coop.rchain.rholang.interpreter.compiler.normalizer import cats.effect.Sync import cats.syntax.all._ import coop.rchain.models.Var -import coop.rchain.models.Var.VarInstance.{FreeVar, Wildcard} -import coop.rchain.rholang.ast.rholang_mercury.Absyn.{ - NameRemainder, - NameRemainderEmpty, - NameRemainderVar, - ProcRemainder, - ProcRemainderEmpty, - ProcRemainderVar, - ProcVar, - ProcVarVar, - ProcVarWildcard -} -import coop.rchain.rholang.interpreter.compiler.{ - FreeContext, - FreeMap, - ProcSort, - SourcePosition, - VarSort -} +import coop.rchain.models.Var.VarInstance.FreeVar +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn._ +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.errors.UnexpectedReuseOfProcContextFree object RemainderNormalizeMatcher { def handleProcVar[F[_]: Sync]( pv: ProcVar, knownFree: FreeMap[VarSort] - ): F[(Option[Var], FreeMap[VarSort])] = + ): F[(Option[VarN], FreeMap[VarSort])] = pv match { case pvw: ProcVarWildcard => ( - Option(Var(Wildcard(Var.WildcardMsg()))), + Option(WildcardN: VarN), knownFree.addWildcard(SourcePosition(pvw.line_num, pvw.col_num)) ).pure[F] case pvv: ProcVarVar => @@ -40,7 +26,7 @@ object RemainderNormalizeMatcher { knownFree.get(pvv.var_) match { case None => val newBindingsPair = knownFree.put((pvv.var_, ProcSort, sourcePosition)) - (Option(Var(FreeVar(knownFree.nextLevel))), newBindingsPair).pure[F] + (Option(FreeVarN(knownFree.nextLevel): VarN), newBindingsPair).pure[F] case Some(FreeContext(_, _, firstSourcePosition)) => Sync[F].raiseError( UnexpectedReuseOfProcContextFree(pvv.var_, firstSourcePosition, sourcePosition) @@ -51,9 +37,9 @@ object RemainderNormalizeMatcher { def normalizeMatchProc[F[_]: Sync]( r: ProcRemainder, knownFree: FreeMap[VarSort] - ): F[(Option[Var], FreeMap[VarSort])] = + ): F[(Option[VarN], FreeMap[VarSort])] = r match { - case _: ProcRemainderEmpty => (None: Option[Var], knownFree).pure[F] + case _: ProcRemainderEmpty => (None: Option[VarN], knownFree).pure[F] case pr: ProcRemainderVar => handleProcVar[F](pr.procvar_, knownFree) } @@ -61,9 +47,9 @@ object RemainderNormalizeMatcher { def normalizeMatchName[F[_]: Sync]( nr: NameRemainder, knownFree: FreeMap[VarSort] - ): F[(Option[Var], FreeMap[VarSort])] = + ): F[(Option[VarN], FreeMap[VarSort])] = nr match { - case _: NameRemainderEmpty => (None: Option[Var], knownFree).pure[F] + case _: NameRemainderEmpty => (None: Option[VarN], knownFree).pure[F] case nr: NameRemainderVar => handleProcVar[F](nr.procvar_, knownFree) } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PBundleNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PBundleNormalizer.scala index 2d37b637dd5..682d7ab01ba 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PBundleNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PBundleNormalizer.scala @@ -1,9 +1,10 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.{BundleOps, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn._ import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ FreeContext, @@ -12,14 +13,6 @@ import coop.rchain.rholang.interpreter.compiler.{ SourcePosition } import coop.rchain.rholang.interpreter.errors.UnexpectedBundleContent -import coop.rchain.rholang.ast.rholang_mercury.Absyn.{ - BundleEquiv, - BundleRead, - BundleReadWrite, - BundleWrite, - PBundle -} -import coop.rchain.models.Bundle object PBundleNormalizer { def normalize[F[_]: Sync](b: PBundle, input: ProcVisitInputs)( @@ -50,16 +43,24 @@ object PBundleNormalizer { ) } - import BundleOps._ + def connectivesExistOnTop(p: ParN): Boolean = + p match { + case _: ConnectiveN => true + case pProc: ParProcN => pProc.ps.exists(connectivesExistOnTop) + case _ => false + } + for { - targetResult <- normalizeMatch[F](b.proc_, input.copy(par = VectorPar())) + targetResult <- normalizeMatch[F](b.proc_, input.copy(par = NilN)) + target = targetResult.par outermostBundle = b.bundle_ match { - case _: BundleReadWrite => Bundle(targetResult.par, writeFlag = true, readFlag = true) - case _: BundleRead => Bundle(targetResult.par, writeFlag = false, readFlag = true) - case _: BundleWrite => Bundle(targetResult.par, writeFlag = true, readFlag = false) - case _: BundleEquiv => Bundle(targetResult.par, writeFlag = false, readFlag = false) + case _: BundleReadWrite => BundleN(target, writeFlag = true, readFlag = true) + case _: BundleRead => BundleN(target, writeFlag = false, readFlag = true) + case _: BundleWrite => BundleN(target, writeFlag = true, readFlag = false) + case _: BundleEquiv => BundleN(target, writeFlag = false, readFlag = false) } - res <- if (targetResult.par.connectives.nonEmpty) { + + res <- if (connectivesExistOnTop(target)) { Sync[F].raiseError( UnexpectedBundleContent( s"Illegal top level connective in bundle at position: line: ${b.line_num}, column: ${b.col_num}." @@ -68,11 +69,12 @@ object PBundleNormalizer { } else if (targetResult.freeMap.wildcards.nonEmpty || targetResult.freeMap.levelBindings.nonEmpty) { error(targetResult) } else { - val newBundle: Bundle = targetResult.par.singleBundle() match { - case Some(single) => outermostBundle.merge(single) - case None => outermostBundle + val newBundle: BundleN = target match { + case b: BundleN => outermostBundle.merge(b) + case _ => outermostBundle } - ProcVisitOutputs(input.par.prepend(newBundle), input.freeMap).pure[F] + val outPar: ParN = ParN.combine(input.par, newBundle) + ProcVisitOutputs(outPar, input.freeMap).pure[F] } } yield res } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PCollectNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PCollectNormalizer.scala index 62f87d4f0d9..5a9c681b920 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PCollectNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PCollectNormalizer.scala @@ -1,16 +1,16 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync +import cats.syntax.all._ import coop.rchain.models.Par -import coop.rchain.models.rholang.implicits._ +import coop.rchain.models.rholangn.ParN +import coop.rchain.rholang.ast.rholang_mercury.Absyn.PCollect +import coop.rchain.rholang.interpreter.compiler.normalizer.CollectionNormalizeMatcher import coop.rchain.rholang.interpreter.compiler.{ CollectVisitInputs, ProcVisitInputs, ProcVisitOutputs } -import coop.rchain.rholang.ast.rholang_mercury.Absyn.PCollect -import coop.rchain.rholang.interpreter.compiler.normalizer.CollectionNormalizeMatcher object PCollectNormalizer { def normalize[F[_]: Sync](p: PCollect, input: ProcVisitInputs)( @@ -18,11 +18,8 @@ object PCollectNormalizer { ): F[ProcVisitOutputs] = CollectionNormalizeMatcher .normalizeMatch[F](p.collection_, CollectVisitInputs(input.boundMapChain, input.freeMap)) - .map( - collectResult => - ProcVisitOutputs( - input.par.prepend(collectResult.expr, input.boundMapChain.depth), - collectResult.freeMap - ) - ) + .map { collectResult => + val expr = collectResult.expr + ProcVisitOutputs(ParN.combine(input.par, expr), collectResult.freeMap) + } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PConjunctionNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PConjunctionNormalizer.scala index 18646d8300f..6e8cddaf7d1 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PConjunctionNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PConjunctionNormalizer.scala @@ -1,15 +1,12 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.Connective.ConnectiveInstance.ConnAndBody -import coop.rchain.models.{Connective, ConnectiveBody, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.PConjunction import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs, SourcePosition} -import coop.rchain.rholang.ast.rholang_mercury.Absyn.PConjunction - -import scala.collection.immutable.Vector object PConjunctionNormalizer { def normalize[F[_]: Sync](p: PConjunction, input: ProcVisitInputs)( @@ -18,24 +15,22 @@ object PConjunctionNormalizer { for { leftResult <- normalizeMatch[F]( p.proc_1, - ProcVisitInputs(VectorPar(), input.boundMapChain, input.freeMap) + ProcVisitInputs(NilN, input.boundMapChain, input.freeMap) ) rightResult <- normalizeMatch[F]( p.proc_2, - ProcVisitInputs(VectorPar(), input.boundMapChain, leftResult.freeMap) + ProcVisitInputs(NilN, input.boundMapChain, leftResult.freeMap) ) lp = leftResult.par - resultConnective = lp.singleConnective() match { - case Some(Connective(ConnAndBody(ConnectiveBody(ps)))) => - Connective(ConnAndBody(ConnectiveBody(ps :+ rightResult.par))) - case _ => - Connective(ConnAndBody(ConnectiveBody(Vector(lp, rightResult.par)))) - } + rp = rightResult.par + + resultConnective = ConnAndN(Seq(lp, rp)) + } yield ProcVisitOutputs( - input.par.prepend(resultConnective, input.boundMapChain.depth), + ParN.combine(input.par, resultConnective), rightResult.freeMap .addConnective( - resultConnective.connectiveInstance, + resultConnective, SourcePosition(p.line_num, p.col_num) ) ) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PContrNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PContrNormalizer.scala index aa25e141b06..6672b7d65d5 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PContrNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PContrNormalizer.scala @@ -1,26 +1,17 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.{Par, Receive, ReceiveBind} -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.{ - FreeMap, - NameVisitInputs, - NameVisitOutputs, - ProcNormalizeMatcher, - ProcVisitInputs, - ProcVisitOutputs, - VarSort -} +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn.{Name, PContr} +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.compiler.normalizer.{ NameNormalizeMatcher, RemainderNormalizeMatcher } import scala.jdk.CollectionConverters._ -import scala.collection.immutable.{BitSet, Vector} object PContrNormalizer { def normalize[F[_]: Sync](p: PContr, input: ProcVisitInputs)( @@ -35,7 +26,7 @@ object PContrNormalizer { p.name_, NameVisitInputs(input.boundMapChain, input.freeMap) ) - initAcc = (Vector[Par](), FreeMap.empty[VarSort], BitSet()) + initAcc = (Vector[ParN](), FreeMap.empty[VarSort]) // Note that we go over these in the order they were given and reverse // down below. This is because it makes more sense to number the free // variables in the order given, rather than in reverse. @@ -58,9 +49,7 @@ object PContrNormalizer { result => ( result.par +: acc._1, - result.freeMap, - acc._3 | ParLocallyFree - .locallyFree(result.par, input.boundMapChain.depth + 1) + result.freeMap ) ) } @@ -71,33 +60,24 @@ object PContrNormalizer { boundCount = remainderResult._2.countNoWildcards bodyResult <- ProcNormalizeMatcher.normalizeMatch[F]( p.proc_, - ProcVisitInputs(VectorPar(), newEnv, nameMatchResult.freeMap) + ProcVisitInputs(NilN, newEnv, nameMatchResult.freeMap) ) - } yield ProcVisitOutputs( - input.par.prepend( - Receive( - binds = List( - ReceiveBind( - formalsResults._1.reverse, - nameMatchResult.par, - remainderResult._1, - boundCount - ) - ), - body = bodyResult.par, - persistent = true, - peek = false, - bindCount = boundCount, - locallyFree = ParLocallyFree - .locallyFree(nameMatchResult.par, input.boundMapChain.depth) | formalsResults._3 - | (bodyResult.par.locallyFree - .rangeFrom(boundCount) - .map(x => x - boundCount)), - connectiveUsed = ParLocallyFree - .connectiveUsed(nameMatchResult.par) || bodyResult.par.connectiveUsed - ) - ), - bodyResult.freeMap - ) - + } yield { + val newReceive = ReceiveN( + ReceiveBindN( + formalsResults._1.reverse, + nameMatchResult.par, + remainderResult._1, + boundCount + ), + body = bodyResult.par, + persistent = true, + peek = false, + bindCount = boundCount + ) + ProcVisitOutputs( + ParN.combine(input.par, newReceive), + bodyResult.freeMap + ) + } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PDisjunctionNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PDisjunctionNormalizer.scala index ed55187ac4f..9f9890fb0f4 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PDisjunctionNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PDisjunctionNormalizer.scala @@ -1,10 +1,10 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.Connective.ConnectiveInstance.ConnOrBody -import coop.rchain.models.{Connective, ConnectiveBody, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.PDisjunction import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ FreeMap, @@ -12,9 +12,6 @@ import coop.rchain.rholang.interpreter.compiler.{ ProcVisitOutputs, SourcePosition } -import coop.rchain.rholang.ast.rholang_mercury.Absyn.PDisjunction - -import scala.collection.immutable.Vector object PDisjunctionNormalizer { def normalize[F[_]: Sync](p: PDisjunction, input: ProcVisitInputs)( @@ -23,24 +20,21 @@ object PDisjunctionNormalizer { for { leftResult <- normalizeMatch[F]( p.proc_1, - ProcVisitInputs(VectorPar(), input.boundMapChain, FreeMap.empty) + ProcVisitInputs(NilN, input.boundMapChain, FreeMap.empty) ) rightResult <- normalizeMatch[F]( p.proc_2, - ProcVisitInputs(VectorPar(), input.boundMapChain, FreeMap.empty) + ProcVisitInputs(NilN, input.boundMapChain, FreeMap.empty) ) - lp = leftResult.par - resultConnective = lp.singleConnective() match { - case Some(Connective(ConnOrBody(ConnectiveBody(ps)))) => - Connective(ConnOrBody(ConnectiveBody(ps :+ rightResult.par))) - case _ => - Connective(ConnOrBody(ConnectiveBody(Vector(lp, rightResult.par)))) - } + lp = leftResult.par + rp = rightResult.par + resultConnective = ConnOrN(Seq(lp, rp)) + } yield ProcVisitOutputs( - input.par.prepend(resultConnective, input.boundMapChain.depth), + ParN.combine(input.par, resultConnective), input.freeMap .addConnective( - resultConnective.connectiveInstance, + resultConnective, SourcePosition(p.line_num, p.col_num) ) ) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PEvalNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PEvalNormalizer.scala index 2a0f06b9181..e6365465e04 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PEvalNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PEvalNormalizer.scala @@ -1,12 +1,12 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync +import cats.syntax.all._ import coop.rchain.models.Par -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.{NameVisitInputs, ProcVisitInputs, ProcVisitOutputs} +import coop.rchain.models.rholangn.ParN import coop.rchain.rholang.ast.rholang_mercury.Absyn.PEval import coop.rchain.rholang.interpreter.compiler.normalizer.NameNormalizeMatcher +import coop.rchain.rholang.interpreter.compiler.{NameVisitInputs, ProcVisitInputs, ProcVisitOutputs} object PEvalNormalizer { def normalize[F[_]: Sync](p: PEval, input: ProcVisitInputs)( @@ -17,7 +17,7 @@ object PEvalNormalizer { .map( nameMatchResult => ProcVisitOutputs( - input.par ++ nameMatchResult.par, + ParN.combine(input.par, nameMatchResult.par), nameMatchResult.freeMap ) ) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PGroundNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PGroundNormalizer.scala index 1030e2e4c99..82721411784 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PGroundNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PGroundNormalizer.scala @@ -1,11 +1,11 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs} +import cats.syntax.all._ +import coop.rchain.models.rholangn.ParN import coop.rchain.rholang.ast.rholang_mercury.Absyn.PGround import coop.rchain.rholang.interpreter.compiler.normalizer.GroundNormalizeMatcher +import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs} object PGroundNormalizer { def normalize[F[_]: Sync](p: PGround, input: ProcVisitInputs): F[ProcVisitOutputs] = @@ -14,7 +14,7 @@ object PGroundNormalizer { .map( expr => ProcVisitOutputs( - input.par.prepend(expr, input.boundMapChain.depth), + ParN.combine(input.par, expr), input.freeMap ) ) diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PIfNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PIfNormalizer.scala index 559039d82e8..854ab4779a1 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PIfNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PIfNormalizer.scala @@ -1,15 +1,12 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.Expr.ExprInstance.GBool -import coop.rchain.models.{Match, MatchCase, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.Proc import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs} -import coop.rchain.rholang.ast.rholang_mercury.Absyn.Proc - -import scala.collection.immutable.Vector object PIfNormalizer { def normalize[F[_]: Sync]( @@ -24,21 +21,19 @@ object PIfNormalizer { targetResult <- normalizeMatch[F](valueProc, input) trueCaseBody <- normalizeMatch[F]( trueBodyProc, - ProcVisitInputs(VectorPar(), input.boundMapChain, targetResult.freeMap) + ProcVisitInputs(NilN, input.boundMapChain, targetResult.freeMap) ) falseCaseBody <- normalizeMatch[F]( falseBodyProc, - ProcVisitInputs(VectorPar(), input.boundMapChain, trueCaseBody.freeMap) + ProcVisitInputs(NilN, input.boundMapChain, trueCaseBody.freeMap) ) - desugaredIf = Match( + desugaredIf = MatchN( targetResult.par, - Vector( - MatchCase(GBool(true), trueCaseBody.par, 0), - MatchCase(GBool(false), falseCaseBody.par, 0) - ), - targetResult.par.locallyFree | trueCaseBody.par.locallyFree | falseCaseBody.par.locallyFree, - targetResult.par.connectiveUsed || trueCaseBody.par.connectiveUsed || falseCaseBody.par.connectiveUsed + Seq( + MatchCaseN(GBoolN(true), trueCaseBody.par), + MatchCaseN(GBoolN(false), falseCaseBody.par) + ) ) - } yield ProcVisitOutputs(input.par.prepend(desugaredIf), falseCaseBody.freeMap) + } yield ProcVisitOutputs(ParN.combine(input.par, desugaredIf), falseCaseBody.freeMap) } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PInputNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PInputNormalizer.scala index fbba1d56585..10930afbbea 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PInputNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PInputNormalizer.scala @@ -1,35 +1,25 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.{Par, Receive, Var} -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch -import coop.rchain.rholang.interpreter.compiler.{ - FreeContext, - FreeMap, - NameVisitInputs, - NameVisitOutputs, - ProcVisitInputs, - ProcVisitOutputs, - ReceiveBindsSortMatcher, - VarSort -} -import coop.rchain.rholang.interpreter.errors.{ - NormalizerError, - ReceiveOnSameChannelsError, - UnexpectedReuseOfNameContextFree -} +import cats.syntax.all._ +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ +import coop.rchain.models.{Par, ReceiveBind} import coop.rchain.rholang.ast.rholang_mercury.Absyn._ +import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.compiler.normalizer.processes.Utils.failOnInvalidConnective import coop.rchain.rholang.interpreter.compiler.normalizer.{ NameNormalizeMatcher, RemainderNormalizeMatcher } +import coop.rchain.rholang.interpreter.errors.{ + ReceiveOnSameChannelsError, + UnexpectedReuseOfNameContextFree +} -import scala.jdk.CollectionConverters._ -import scala.collection.immutable.{BitSet, Vector} import java.util.UUID +import scala.jdk.CollectionConverters._ object PInputNormalizer { @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) @@ -142,30 +132,28 @@ object PInputNormalizer { // We split this into parts. First we process all the sources, then we process all the bindings. def processSources( sources: Vector[Name] - ): F[(Vector[Par], FreeMap[VarSort], BitSet, Boolean)] = - sources.foldM((Vector.empty[Par], input.freeMap, BitSet.empty, false)) { - case ((vectorPar, knownFree, locallyFree, connectiveUsed), name) => + ): F[(Vector[ParN], FreeMap[VarSort])] = + sources.foldM((Vector.empty[ParN], input.freeMap)) { + case ((vectorPar, knownFree), name) => NameNormalizeMatcher .normalizeMatch[F](name, NameVisitInputs(input.boundMapChain, knownFree)) .map { case NameVisitOutputs(par, knownFree) => ( vectorPar :+ par, - knownFree, - locallyFree | ParLocallyFree.locallyFree(par, input.boundMapChain.depth), - connectiveUsed || ParLocallyFree.connectiveUsed(par) + knownFree ) } } def processPatterns( patterns: Vector[(Vector[Name], NameRemainder)] - ): F[Vector[(Vector[Par], Option[Var], FreeMap[VarSort], BitSet)]] = + ): F[Vector[(Vector[ParN], Option[VarN], FreeMap[VarSort])]] = patterns.traverse { case (names, nameRemainder) => names - .foldM((Vector.empty[Par], FreeMap.empty[VarSort], BitSet.empty)) { - case ((vectorPar, knownFree, locallyFree), name) => + .foldM((Vector.empty[ParN], FreeMap.empty[VarSort])) { + case ((vectorPar, knownFree), name) => NameNormalizeMatcher .normalizeMatch[F]( name, @@ -174,21 +162,15 @@ object PInputNormalizer { case nameVisitOutputs @ NameVisitOutputs(par, knownFree) => failOnInvalidConnective(input, nameVisitOutputs) .fold( - _.raiseError[F, (Vector[Par], FreeMap[VarSort], BitSet)], - _ => - ( - vectorPar :+ par, - knownFree, - locallyFree | ParLocallyFree - .locallyFree(par, input.boundMapChain.depth + 1) - ).pure[F] + _.raiseError[F, (Vector[ParN], FreeMap[VarSort])], + _ => (vectorPar :+ par, knownFree).pure[F] ) } } >>= { - case (vectorPar, knownFree, locallyFree) => + case (vectorPar, knownFree) => RemainderNormalizeMatcher.normalizeMatchName(nameRemainder, knownFree).map { case (optionalVar, knownFree) => - (vectorPar, optionalVar, knownFree, locallyFree) + (vectorPar, optionalVar, knownFree) } } } @@ -205,9 +187,7 @@ object PInputNormalizer { // all sources should be simple sources by this point case ss: SimpleSource => ss.name_ }) - }, false, false) - } case rr: ReceiptRepeated => rr.receiptrepeatedimpl_ match { @@ -232,16 +212,26 @@ object PInputNormalizer { val (patterns, names) = consumes.unzip + def fromReceiveBind(x: ReceiveBind): ReceiveBindN = { + val patterns = fromProto(x.patterns) + val source = fromProto(x.source) + val remainder = x.remainder.map(fromProtoVarOpt) + val freeCount = x.freeCount + ReceiveBindN(patterns, source, remainder, freeCount) + } + for { - processedSources <- processSources(names) - (sources, sourcesFree, sourcesLocallyFree, sourcesConnectiveUsed) = processedSources - processedPatterns <- processPatterns(patterns) + processedSources <- processSources(names) + (sources, sourcesFree) = processedSources + processedPatterns <- processPatterns(patterns) receiveBindsAndFreeMaps <- ReceiveBindsSortMatcher.preSortBinds[F, VarSort]( processedPatterns.zip(sources).map { - case ((a, b, c, _), e) => (a, b, e, c) + case ((a, b, c), e) => + (toProto(a), b.map(toProtoVarOpt), toProto(e), c) } ) - (receiveBinds, receiveBindFreeMaps) = receiveBindsAndFreeMaps.unzip + unz = receiveBindsAndFreeMaps.unzip + (receiveBinds, receiveBindFreeMaps) = (unz._1.map(fromReceiveBind), unz._2) channels = receiveBinds.map(_.source) hasSameChannels = channels.size > channels.toSet.size _ <- ReceiveOnSameChannelsError(p.line_num, p.col_num) @@ -262,29 +252,17 @@ object PInputNormalizer { procVisitOutputs <- normalizeMatch[F]( p.proc_, ProcVisitInputs( - VectorPar(), + NilN, input.boundMapChain.absorbFree(receiveBindsFreeMap), sourcesFree ) ) } yield { val bindCount = receiveBindsFreeMap.countNoWildcards + val receive = + ReceiveN(receiveBinds, procVisitOutputs.par, persistent, peek, bindCount) ProcVisitOutputs( - input.par.prepend( - Receive( - receiveBinds, - procVisitOutputs.par, - persistent, - peek, - bindCount, - sourcesLocallyFree | processedPatterns - .map(_._4) - .fold(BitSet.empty)(_ | _) | procVisitOutputs.par.locallyFree - .rangeFrom(bindCount) - .map(_ - bindCount), - sourcesConnectiveUsed || procVisitOutputs.par.connectiveUsed - ) - ), + ParN.combine(input.par, receive), procVisitOutputs.freeMap ) } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PLetNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PLetNormalizer.scala index 2743afc98d5..f3a754dfb3d 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PLetNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PLetNormalizer.scala @@ -1,26 +1,18 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.{EList, Match, MatchCase, Par, Var} -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch -import coop.rchain.rholang.interpreter.compiler.{ - FreeMap, - NameVisitInputs, - NameVisitOutputs, - ProcNormalizeMatcher, - ProcVisitInputs, - ProcVisitOutputs, - VarSort -} +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ +import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.compiler.normalizer.{ NameNormalizeMatcher, RemainderNormalizeMatcher } -import scala.collection.immutable.BitSet import java.util.UUID import scala.jdk.CollectionConverters._ @@ -129,27 +121,25 @@ object PLetNormalizer { knownFree: FreeMap[VarSort] ): F[ProcVisitOutputs] = listProc - .foldM((Vector.empty[Par], knownFree, BitSet.empty, false)) { - case ((vectorPar, knownFree, locallyFree, connectiveUsed), proc) => + .foldM((Vector.empty[ParN], knownFree)) { + case ((vectorPar, knownFree), proc) => ProcNormalizeMatcher .normalizeMatch[F]( proc, - ProcVisitInputs(VectorPar(), input.boundMapChain, knownFree) + ProcVisitInputs(NilN, input.boundMapChain, knownFree) ) .map { case ProcVisitOutputs(par, updatedKnownFree) => ( par +: vectorPar, - updatedKnownFree, - locallyFree | par.locallyFree, - connectiveUsed | par.connectiveUsed + updatedKnownFree ) } } .map { - case (vectorPar, knownFree, locallyFree, connectiveUsed) => + case (vectorPar, knownFree) => ProcVisitOutputs( - EList(vectorPar.reverse, locallyFree, connectiveUsed, none[Var]), + EListN(vectorPar.reverse, none), knownFree ) } @@ -163,8 +153,8 @@ object PLetNormalizer { .normalizeMatchName(nameRemainder, FreeMap.empty[VarSort]) >>= { case (optionalVar, remainderKnownFree) => listName - .foldM((Vector.empty[Par], remainderKnownFree, BitSet.empty)) { - case ((vectorPar, knownFree, locallyFree), name) => + .foldM((Vector.empty[ParN], remainderKnownFree)) { + case ((vectorPar, knownFree), name) => NameNormalizeMatcher .normalizeMatch[F]( name, @@ -174,18 +164,14 @@ object PLetNormalizer { case NameVisitOutputs(par, updatedKnownFree) => ( par +: vectorPar, - updatedKnownFree, - // Use input.env.depth + 1 because the pattern was evaluated w.r.t input.env.push, - // and more generally because locally free variables become binders in the pattern position - locallyFree | ParLocallyFree - .locallyFree(par, input.boundMapChain.depth + 1) + updatedKnownFree ) } } .map { - case (vectorPar, knownFree, locallyFree) => + case (vectorPar, knownFree) => ProcVisitOutputs( - EList(vectorPar.reverse, locallyFree, connectiveUsed = true, optionalVar), + EListN(vectorPar.reverse, optionalVar), knownFree ) } @@ -200,31 +186,23 @@ object PLetNormalizer { normalizeMatch[F]( newContinuation, ProcVisitInputs( - VectorPar(), + NilN, input.boundMapChain.absorbFree(patternKnownFree), valueKnownFree ) ).map { case ProcVisitOutputs(continuationPar, continuationKnownFree) => - ProcVisitOutputs( - input.par.prepend( - Match( - target = valueListPar, - cases = Seq( - MatchCase( - patternListPar, - continuationPar, - patternKnownFree.countNoWildcards - ) - ), - locallyFree = valueListPar.locallyFree | patternListPar.locallyFree | continuationPar.locallyFree - .rangeFrom(patternKnownFree.countNoWildcards) - .map(_ - patternKnownFree.countNoWildcards), - connectiveUsed = valueListPar.connectiveUsed || continuationPar.connectiveUsed + val m = MatchN( + target = valueListPar, + cases = Seq( + MatchCaseN( + patternListPar, + continuationPar, + patternKnownFree.countNoWildcards ) - ), - continuationKnownFree + ) ) + ProcVisitOutputs(ParN.combine(input.par, m), continuationKnownFree) } } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchNormalizer.scala index dc7a36fd1c9..e1f217b33fd 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchNormalizer.scala @@ -1,16 +1,15 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes import cats.Applicative -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.{Match, MatchCase, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.{Case, CaseImpl, PMatch, Proc} import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{FreeMap, ProcVisitInputs, ProcVisitOutputs} import coop.rchain.rholang.interpreter.errors.UnrecognizedNormalizerError -import coop.rchain.rholang.ast.rholang_mercury.Absyn.{Case, CaseImpl, PMatch, Proc} -import scala.collection.immutable.{BitSet, Vector} import scala.jdk.CollectionConverters._ object PMatchNormalizer { @@ -25,10 +24,10 @@ object PMatchNormalizer { } for { - targetResult <- normalizeMatch[F](p.proc_, input.copy(par = VectorPar())) + targetResult <- normalizeMatch[F](p.proc_, input.copy(par = NilN)) cases <- p.listcase_.asScala.toList.traverse(liftCase) - initAcc = (Vector[MatchCase](), targetResult.freeMap, BitSet(), false) + initAcc = (Vector[MatchCaseN](), targetResult.freeMap) casesResult <- cases.foldM(initAcc)( (acc, caseImpl) => caseImpl match { @@ -37,7 +36,7 @@ object PMatchNormalizer { patternResult <- normalizeMatch[F]( pattern, ProcVisitInputs( - VectorPar(), + NilN, input.boundMapChain.push, FreeMap.empty ) @@ -46,29 +45,22 @@ object PMatchNormalizer { boundCount = patternResult.freeMap.countNoWildcards caseBodyResult <- normalizeMatch[F]( caseBody, - ProcVisitInputs(VectorPar(), caseEnv, acc._2) + ProcVisitInputs(NilN, caseEnv, acc._2) ) } yield ( - MatchCase(patternResult.par, caseBodyResult.par, boundCount) +: acc._1, - caseBodyResult.freeMap, - acc._3 | patternResult.par.locallyFree | caseBodyResult.par.locallyFree - .rangeFrom(boundCount) - .map(x => x - boundCount), - acc._4 || caseBodyResult.par.connectiveUsed + MatchCaseN( + patternResult.par, + caseBodyResult.par, + boundCount + ) +: acc._1, + caseBodyResult.freeMap ) } } ) - } yield ProcVisitOutputs( - input.par.prepend( - Match( - targetResult.par, - casesResult._1.reverse, - casesResult._3 | targetResult.par.locallyFree, - casesResult._4 || targetResult.par.connectiveUsed - ) - ), - casesResult._2 - ) + } yield { + val m = MatchN(targetResult.par, casesResult._1.reverse) + ProcVisitOutputs(ParN.combine(input.par, m), casesResult._2) + } } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchesNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchesNormalizer.scala index 87e12fb3f45..9a087f90157 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchesNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMatchesNormalizer.scala @@ -1,12 +1,12 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.{EMatches, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.PMatches import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{FreeMap, ProcVisitInputs, ProcVisitOutputs} -import coop.rchain.rholang.ast.rholang_mercury.Absyn.PMatches object PMatchesNormalizer { def normalize[F[_]: Sync](p: PMatches, input: ProcVisitInputs)( @@ -18,17 +18,17 @@ object PMatchesNormalizer { // "match target { pattern => true ; _ => false} // so free variables from pattern should not be visible at the top level for { - leftResult <- normalizeMatch[F](p.proc_1, input.copy(par = VectorPar())) + leftResult <- normalizeMatch[F](p.proc_1, input.copy(par = NilN)) rightResult <- normalizeMatch[F]( p.proc_2, ProcVisitInputs( - VectorPar(), + NilN, input.boundMapChain.push, FreeMap.empty ) ) } yield ProcVisitOutputs( - input.par.prepend(EMatches(leftResult.par, rightResult.par), input.boundMapChain.depth), + ParN.combine(input.par, EMatchesN(leftResult.par, rightResult.par)), leftResult.freeMap ) } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMethodNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMethodNormalizer.scala index 4a064b156ba..15f3dd900d2 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMethodNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PMethodNormalizer.scala @@ -2,50 +2,40 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes import cats.effect.Sync import cats.syntax.all._ -import coop.rchain.models.rholang.implicits._ -import coop.rchain.models.{EMethod, Par} +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn.PMethod import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs} import scala.jdk.CollectionConverters._ -import scala.collection.immutable.BitSet object PMethodNormalizer { def normalize[F[_]: Sync](p: PMethod, input: ProcVisitInputs)( implicit env: Map[String, Par] ): F[ProcVisitOutputs] = for { - targetResult <- normalizeMatch[F](p.proc_, input.copy(par = Par())) + targetResult <- normalizeMatch[F](p.proc_, input.copy(NilN)) target = targetResult.par initAcc = ( - List[Par](), - ProcVisitInputs(Par(), input.boundMapChain, targetResult.freeMap), - BitSet(), - false + Vector[ParN](), + ProcVisitInputs(NilN, input.boundMapChain, targetResult.freeMap) ) argResults <- p.listproc_.asScala.toList.reverse.foldM(initAcc)((acc, e) => { normalizeMatch[F](e, acc._2).map( procMatchResult => ( procMatchResult.par +: acc._1, - ProcVisitInputs(Par(), input.boundMapChain, procMatchResult.freeMap), - acc._3 | procMatchResult.par.locallyFree, - acc._4 || procMatchResult.par.connectiveUsed + ProcVisitInputs( + NilN, + input.boundMapChain, + procMatchResult.freeMap + ) ) ) }) - } yield ProcVisitOutputs( - input.par.prepend( - EMethod( - p.var_, - targetResult.par, - argResults._1, - target.locallyFree | argResults._3, - target.connectiveUsed || argResults._4 - ), - input.boundMapChain.depth - ), - argResults._2.freeMap - ) + } yield { + val method = EMethodN(p.var_, target, argResults._1) + ProcVisitOutputs(ParN.combine(input.par, method), argResults._2.freeMap) + } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNegationNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNegationNormalizer.scala index db467625e82..8d5057abeee 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNegationNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNegationNormalizer.scala @@ -1,10 +1,10 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.Connective.ConnectiveInstance.ConnNotBody -import coop.rchain.models.{Connective, Par} -import coop.rchain.models.rholang.implicits._ +import cats.syntax.all._ +import coop.rchain.models.Par +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.PNegation import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ FreeMap, @@ -12,7 +12,6 @@ import coop.rchain.rholang.interpreter.compiler.{ ProcVisitOutputs, SourcePosition } -import coop.rchain.rholang.ast.rholang_mercury.Absyn.PNegation object PNegationNormalizer { def normalize[F[_]: Sync](p: PNegation, input: ProcVisitInputs)( @@ -20,16 +19,15 @@ object PNegationNormalizer { ): F[ProcVisitOutputs] = normalizeMatch[F]( p.proc_, - ProcVisitInputs(VectorPar(), input.boundMapChain, FreeMap.empty) - ).map( - bodyResult => - ProcVisitOutputs( - input.par.prepend(Connective(ConnNotBody(bodyResult.par)), input.boundMapChain.depth), - input.freeMap - .addConnective( - ConnNotBody(bodyResult.par), - SourcePosition(p.line_num, p.col_num) - ) + ProcVisitInputs(NilN, input.boundMapChain, FreeMap.empty) + ).map { bodyResult => + val conn = ConnNotN(bodyResult.par) + ProcVisitOutputs( + ParN.combine(input.par, conn), + input.freeMap.addConnective( + conn, + SourcePosition(p.line_num, p.col_num) ) - ) + ) + } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNewNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNewNormalizer.scala index 81c7491bee0..38cc23f60e0 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNewNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PNewNormalizer.scala @@ -2,8 +2,9 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes import cats.effect.Sync import cats.syntax.all._ -import coop.rchain.models.rholang.implicits._ -import coop.rchain.models.{New, Par} +import coop.rchain.models.Par +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn.{NameDeclSimpl, NameDeclUrn, PNew} import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.normalizer.GroundNormalizeMatcher @@ -41,16 +42,14 @@ object PNewNormalizer { val newEnv = input.boundMapChain.put(newBindings.toList) val newCount = newEnv.count - input.boundMapChain.count - normalizeMatch[F](p.proc_, ProcVisitInputs(VectorPar(), newEnv, input.freeMap)).map { - bodyResult => - val resultNew = New( - bindCount = newCount, - p = bodyResult.par, - uri = uris, - injections = env, - locallyFree = bodyResult.par.locallyFree.rangeFrom(newCount).map(x => x - newCount) - ) - ProcVisitOutputs(input.par.prepend(resultNew), bodyResult.freeMap) + normalizeMatch[F](p.proc_, ProcVisitInputs(NilN, newEnv, input.freeMap)).map { bodyResult => + val resultNew = NewN( + bindCount = newCount, + p = bodyResult.par, + uri = uris, + injections = env.map { case (s, par) => (s, fromProto(par)) } + ) + ProcVisitOutputs(ParN.combine(input.par, resultNew), bodyResult.freeMap) } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PParNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PParNormalizer.scala index 24cb759518e..8d7d3844bce 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PParNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PParNormalizer.scala @@ -1,11 +1,11 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync +import cats.syntax.all._ import coop.rchain.models.Par +import coop.rchain.rholang.ast.rholang_mercury.Absyn.PPar import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs} -import coop.rchain.rholang.ast.rholang_mercury.Absyn.PPar object PParNormalizer { def normalize[F[_]: Sync](p: PPar, input: ProcVisitInputs)( diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSendNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSendNormalizer.scala index 39076700d63..cd290f831f3 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSendNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSendNormalizer.scala @@ -1,18 +1,16 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync +import cats.syntax.all._ import coop.rchain.models.Par -import coop.rchain.models.rholang.implicits._ -import coop.rchain.models.Send +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.{PSend, SendMultiple, SendSingle} import coop.rchain.rholang.interpreter.compiler.ProcNormalizeMatcher.normalizeMatch +import coop.rchain.rholang.interpreter.compiler.normalizer.NameNormalizeMatcher import coop.rchain.rholang.interpreter.compiler.{NameVisitInputs, ProcVisitInputs, ProcVisitOutputs} import scala.jdk.CollectionConverters._ -import coop.rchain.rholang.ast.rholang_mercury.Absyn.{PSend, SendMultiple, SendSingle} -import coop.rchain.rholang.interpreter.compiler.normalizer.NameNormalizeMatcher - -import scala.collection.immutable.{BitSet, Vector} object PSendNormalizer { def normalize[F[_]: Sync](p: PSend, input: ProcVisitInputs)( @@ -24,10 +22,8 @@ object PSendNormalizer { NameVisitInputs(input.boundMapChain, input.freeMap) ) initAcc = ( - Vector[Par](), - ProcVisitInputs(VectorPar(), input.boundMapChain, nameMatchResult.freeMap), - BitSet(), - false + Vector[ParN](), + ProcVisitInputs(NilN, input.boundMapChain, nameMatchResult.freeMap) ) dataResults <- p.listproc_.asScala.toList.reverse.foldM(initAcc)( (acc, e) => { @@ -36,12 +32,10 @@ object PSendNormalizer { ( procMatchResult.par +: acc._1, ProcVisitInputs( - VectorPar(), + NilN, input.boundMapChain, procMatchResult.freeMap - ), - acc._3 | procMatchResult.par.locallyFree, - acc._4 || procMatchResult.par.connectiveUsed + ) ) ) } @@ -50,17 +44,10 @@ object PSendNormalizer { case _: SendSingle => false case _: SendMultiple => true } + send = SendN(nameMatchResult.par, dataResults._1, persistent) + par = ParN.combine(input.par, send) } yield ProcVisitOutputs( - input.par.prepend( - Send( - nameMatchResult.par, - dataResults._1, - persistent, - ParLocallyFree - .locallyFree(nameMatchResult.par, input.boundMapChain.depth) | dataResults._3, - ParLocallyFree.connectiveUsed(nameMatchResult.par) || dataResults._4 - ) - ), + par, dataResults._2.freeMap ) } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSimpleTypeNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSimpleTypeNormalizer.scala index 186fa992675..4ec1edd2a4f 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSimpleTypeNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PSimpleTypeNormalizer.scala @@ -2,9 +2,7 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes import cats.effect.Sync import cats.syntax.all._ -import coop.rchain.models.Connective -import coop.rchain.models.Connective.ConnectiveInstance._ -import coop.rchain.models.rholang.implicits._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ import coop.rchain.rholang.interpreter.compiler.{ProcVisitInputs, ProcVisitOutputs} @@ -12,46 +10,16 @@ object PSimpleTypeNormalizer { def normalize[F[_]: Sync](p: PSimpleType, input: ProcVisitInputs): F[ProcVisitOutputs] = p.simpletype_ match { case _: SimpleTypeBool => - ProcVisitOutputs( - input.par - .prepend(Connective(ConnBool(true)), input.boundMapChain.depth) - .withConnectiveUsed(true), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnBoolN), input.freeMap).pure[F] case _: SimpleTypeInt => - ProcVisitOutputs( - input.par - .prepend(Connective(ConnInt(true)), input.boundMapChain.depth) - .withConnectiveUsed(true), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnIntN), input.freeMap).pure[F] case _: SimpleTypeBigInt => - ProcVisitOutputs( - input.par - .prepend(Connective(ConnBigInt(true)), input.boundMapChain.depth) - .withConnectiveUsed(true), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnBigIntN), input.freeMap).pure[F] case _: SimpleTypeString => - ProcVisitOutputs( - input.par - .prepend(Connective(ConnString(true)), input.boundMapChain.depth) - .withConnectiveUsed(true), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnStringN), input.freeMap).pure[F] case _: SimpleTypeUri => - ProcVisitOutputs( - input.par - .prepend(Connective(ConnUri(true)), input.boundMapChain.depth) - .withConnectiveUsed(true), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnUriN), input.freeMap).pure[F] case _: SimpleTypeByteArray => - ProcVisitOutputs( - input.par - .prepend(Connective(ConnByteArray(true)), input.boundMapChain.depth) - .withConnectiveUsed(true), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnByteArrayN), input.freeMap).pure[F] } } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarNormalizer.scala index a3bc3ef5451..af440784c2c 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarNormalizer.scala @@ -1,24 +1,14 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.Var.VarInstance.{BoundVar, FreeVar, Wildcard} -import coop.rchain.models.{EVar, Var} -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.{ - BoundContext, - FreeContext, - NameSort, - ProcSort, - ProcVisitInputs, - ProcVisitOutputs, - SourcePosition -} +import cats.syntax.all._ +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.{PVar, ProcVarVar, ProcVarWildcard} +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.errors.{ UnexpectedProcContext, UnexpectedReuseOfProcContextFree } -import coop.rchain.rholang.ast.rholang_mercury.Absyn.{PVar, ProcVarVar, ProcVarWildcard} object PVarNormalizer { def normalize[F[_]: Sync](p: PVar, input: ProcVisitInputs): F[ProcVisitOutputs] = @@ -27,7 +17,7 @@ object PVarNormalizer { input.boundMapChain.get(pvv.var_) match { case Some(BoundContext(level, ProcSort, _)) => ProcVisitOutputs( - input.par.prepend(EVar(BoundVar(level)), input.boundMapChain.depth), + ParN.combine(input.par, BoundVarN(level)), input.freeMap ).pure[F] case Some(BoundContext(_, NameSort, sourcePosition)) => @@ -46,9 +36,7 @@ object PVarNormalizer { (pvv.var_, ProcSort, SourcePosition(pvv.line_num, pvv.col_num)) ) ProcVisitOutputs( - input.par - .prepend(EVar(FreeVar(input.freeMap.nextLevel)), input.boundMapChain.depth) - .withConnectiveUsed(true), + ParN.combine(input.par, FreeVarN(input.freeMap.nextLevel)), newBindingsPair ).pure[F] case Some(FreeContext(_, _, firstSourcePosition)) => @@ -63,9 +51,7 @@ object PVarNormalizer { } case _: ProcVarWildcard => ProcVisitOutputs( - input.par - .prepend(EVar(Wildcard(Var.WildcardMsg())), input.boundMapChain.depth) - .withConnectiveUsed(true), + ParN.combine(input.par, WildcardN), input.freeMap.addWildcard(SourcePosition(p.line_num, p.col_num)) ).pure[F] } diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarRefNormalizer.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarRefNormalizer.scala index 61fcfa1f6a8..5ac18f90935 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarRefNormalizer.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/PVarRefNormalizer.scala @@ -1,24 +1,15 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes -import cats.syntax.all._ import cats.effect.Sync -import coop.rchain.models.Connective.ConnectiveInstance.VarRefBody -import coop.rchain.models.{Connective, VarRef} -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.{ - BoundContext, - NameSort, - ProcSort, - ProcVisitInputs, - ProcVisitOutputs, - SourcePosition -} +import cats.syntax.all._ +import coop.rchain.models.rholangn._ +import coop.rchain.rholang.ast.rholang_mercury.Absyn.{PVarRef, VarRefKindName, VarRefKindProc} +import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.errors.{ UnboundVariableRef, UnexpectedNameContext, UnexpectedProcContext } -import coop.rchain.rholang.ast.rholang_mercury.Absyn.{PVarRef, VarRefKindName, VarRefKindProc} object PVarRefNormalizer { def normalize[F[_]: Sync](p: PVarRef, input: ProcVisitInputs): F[ProcVisitOutputs] = @@ -30,14 +21,8 @@ object PVarRefNormalizer { case ProcSort => p.varrefkind_ match { case _: VarRefKindProc => - ProcVisitOutputs( - input.par - .prepend( - Connective(VarRefBody(VarRef(idx, depth))), - input.boundMapChain.depth - ), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnVarRefN(idx, depth)), input.freeMap) + .pure[F] case _ => Sync[F].raiseError( UnexpectedProcContext( @@ -50,11 +35,8 @@ object PVarRefNormalizer { case NameSort => p.varrefkind_ match { case _: VarRefKindName => - ProcVisitOutputs( - input.par - .prepend(Connective(VarRefBody(VarRef(idx, depth))), input.boundMapChain.depth), - input.freeMap - ).pure[F] + ProcVisitOutputs(ParN.combine(input.par, ConnVarRefN(idx, depth)), input.freeMap) + .pure[F] case _ => Sync[F].raiseError( UnexpectedNameContext( diff --git a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/Utils.scala b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/Utils.scala index d6ada3d23b5..40679406ac3 100644 --- a/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/Utils.scala +++ b/rholang/src/main/scala/coop/rchain/rholang/interpreter/compiler/normalizer/processes/Utils.scala @@ -1,7 +1,7 @@ package coop.rchain.rholang.interpreter.compiler.normalizer.processes import cats.syntax.all._ -import coop.rchain.models.Connective.ConnectiveInstance.{ConnNotBody, ConnOrBody} +import coop.rchain.models.rholangn._ import coop.rchain.rholang.interpreter.compiler.{NameVisitOutputs, ProcVisitInputs} import coop.rchain.rholang.interpreter.errors.{InterpreterError, PatternReceiveError} @@ -15,9 +15,9 @@ object Utils { .fromOption( nameRes.freeMap.connectives .collectFirst { - case (_: ConnOrBody, sourcePosition) => + case (_: ConnOrN, sourcePosition) => PatternReceiveError(s"\\/ (disjunction) at $sourcePosition") - case (_: ConnNotBody, sourcePosition) => + case (_: ConnNotN, sourcePosition) => PatternReceiveError(s"~ (negation) at $sourcePosition") }, nameRes diff --git a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala index 9f9b21d1c3f..74a7fbe7877 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/StackSafetySpec.scala @@ -48,7 +48,7 @@ object StackSafetySpec extends Assertions { println(s"Calculated max recursion depth is $maxDepth") // Because of OOM errors on CI depth recursion is limited - val maxDepthLimited = Math.min(1500, maxDepth) + val maxDepthLimited = Math.min(50, maxDepth) println(s"Used recursion depth is limited to $maxDepthLimited") maxDepthLimited } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/EvalTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/EvalTest.scala new file mode 100644 index 00000000000..a99e4317d7c --- /dev/null +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/EvalTest.scala @@ -0,0 +1,55 @@ +package coop.rchain.rholang.interpreter + +import cats.effect.IO +import cats.effect.unsafe.implicits.global +import coop.rchain.metrics +import coop.rchain.metrics.{Metrics, NoopSpan, Span} +import coop.rchain.models.Expr.ExprInstance.GString +import coop.rchain.models.Par +import coop.rchain.models.rholang.implicits._ +import coop.rchain.rholang.Resources.mkRuntime +import coop.rchain.rholang.interpreter.compiler.Compiler +import coop.rchain.rholang.syntax._ +import coop.rchain.shared.Log +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec + +class EvalTest extends AnyWordSpec with Matchers { + implicit val logF: Log[IO] = Log.log[IO] + implicit val noopMetrics: Metrics[IO] = new metrics.Metrics.MetricsNOP[IO] + implicit val noopSpan: Span[IO] = NoopSpan[IO]() + + val outcomeCh = "ret" + val reduceErrorMsg = "Error: index out of bound: -1" + + private def execute(source: String): IO[Par] = + mkRuntime[IO]("rholang-eval-spec") + .use { runtime => + for { + _ <- runtime.evaluate(source) + data <- runtime.getData(GString(outcomeCh)).map(_.head) + } yield data.a.pars.head + } + + "runtime" should { + "convert term to Par and evalue it" in { +// val term = """{ "key11"|"key12":"data1", "key2":"data2"}""" + val term = """ new x, y in { *x + *y } """ + val ast = Compiler[IO].sourceToADT(term).unsafeRunSync() + println("AST:") + println(ast) + println("prettyAST:") + println(PrettyPrinter().buildString(ast)) + + val term2 = s"""@"$outcomeCh"!($term)""" + val evalTerm = execute(term2).unsafeRunSync() + println("evalTerm:") + println(evalTerm) + + val term3 = s""" @"chan"!( $term ) | for(@q <- @"chan") { @"$outcomeCh"!(q) } """ + val processedTerm = execute(term3).unsafeRunSync() + println("processedTerm:") + println(processedTerm) + } + } +} diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PrettyPrinterTest.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PrettyPrinterTest.scala index 00340cc4714..64340586101 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/PrettyPrinterTest.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/PrettyPrinterTest.scala @@ -3,7 +3,7 @@ package coop.rchain.rholang.interpreter import java.io.StringReader import coop.rchain.models.Expr.ExprInstance._ -import coop.rchain.models.rholang.implicits.{GPrivateBuilder, _} +import coop.rchain.models.rholang.implicits._ import coop.rchain.models.{Send, _} import coop.rchain.rholang.interpreter.compiler.{ BoundMapChain, @@ -30,6 +30,8 @@ import org.scalatest.matchers.should.Matchers import coop.rchain.catscontrib.effect.implicits.sEval import scala.collection.immutable.BitSet +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ class BoolPrinterSpec extends AnyFlatSpec with Matchers { @@ -74,7 +76,7 @@ class GroundPrinterSpec extends AnyFlatSpec with Matchers { class CollectPrinterSpec extends AnyFlatSpec with Matchers { val inputs = ProcVisitInputs( - Par(), + NilN, BoundMapChain .empty[VarSort] .put(List(("P", ProcSort, SourcePosition(0, 0)), ("x", NameSort, SourcePosition(0, 0)))), @@ -162,7 +164,7 @@ class CollectPrinterSpec extends AnyFlatSpec with Matchers { } class ProcPrinterSpec extends AnyFlatSpec with Matchers { - val inputs = ProcVisitInputs(Par(), BoundMapChain.empty, FreeMap.empty) + val inputs = ProcVisitInputs(NilN, BoundMapChain.empty, FreeMap.empty) implicit val normalizerEnv: Map[String, Par] = Map.empty "New" should "use 0-based indexing" in { @@ -699,7 +701,7 @@ class ProcPrinterSpec extends AnyFlatSpec with Matchers { ProcNormalizeMatcher.normalizeMatch[Eval](basicInput1, inputs).value.par ) val target = - """for( @{x0}, @{for( @{y0}, @{y1} <- @{Nil} ) { y1 | y0 | x1 }} <- @{Nil} ) { + """for( @{x0}, @{for( @{y0}, @{y1} <- @{Nil} ) { x1 | y0 | y1 }} <- @{Nil} ) { | @{x0}!(x1) |}""".stripMargin result shouldBe target @@ -971,7 +973,7 @@ class ProcPrinterSpec extends AnyFlatSpec with Matchers { val result = PrettyPrinter().buildString( ProcNormalizeMatcher.normalizeMatch[Eval](input, inputs).value.par ) - result shouldBe """for( @{match x0 | x1 { 47 => { Nil } }} <- @{Nil} ) { + result shouldBe """for( @{match x1 | x0 { 47 => { Nil } }} <- @{Nil} ) { | Nil |}""".stripMargin } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala index c8878f1e5da..2970a3082fb 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/accounting/CostAccountingSpec.scala @@ -204,7 +204,7 @@ class CostAccountingSpec } } | loop!(10) - }""".stripMargin, 3892L), + }""".stripMargin, 3868L), ("""42 | @0!(2) | for (x <- @0) { Nil }""", 336L), ("""@1!(1) | for(x <- @1) { Nil } | @@ -213,7 +213,7 @@ class CostAccountingSpec 38 => Nil 42 => @3!(42) } - """.stripMargin, 1264L), + """.stripMargin, 1234L), // test that we charge for system processes ("""new ret, keccak256Hash(`rho:crypto:keccak256Hash`) in { | keccak256Hash!("TEST".toByteArray(), *ret) | diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolMatcherSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolMatcherSpec.scala index 995f0bacbf1..638cd9380dc 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolMatcherSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/BoolMatcherSpec.scala @@ -1,21 +1,19 @@ package coop.rchain.rholang.interpreter.compiler.normalizer +import coop.rchain.models.rholangn.GBoolN import coop.rchain.rholang.ast.rholang_mercury.Absyn._ - import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.models.Expr.ExprInstance._ - class BoolMatcherSpec extends AnyFlatSpec with Matchers { "BoolTrue" should "Compile as GBool(true)" in { val btrue = new BoolTrue() - BoolNormalizeMatcher.normalizeMatch(btrue) should be(GBool(true)) + BoolNormalizeMatcher.normalizeMatch(btrue) should be(GBoolN(true)) } "BoolFalse" should "Compile as GBool(false)" in { val bfalse = new BoolFalse() - BoolNormalizeMatcher.normalizeMatch(bfalse) should be(GBool(false)) + BoolNormalizeMatcher.normalizeMatch(bfalse) should be(GBoolN(false)) } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectMatcherSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectMatcherSpec.scala index 91751db97dc..34f7c168b3a 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectMatcherSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/CollectMatcherSpec.scala @@ -1,34 +1,21 @@ package coop.rchain.rholang.interpreter.compiler.normalizer +import cats.Eval +import coop.rchain.catscontrib.effect.implicits.sEval +import coop.rchain.models._ +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ - +import coop.rchain.rholang.interpreter.ParBuilderUtil +import coop.rchain.rholang.interpreter.compiler._ +import coop.rchain.rholang.interpreter.errors._ import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.immutable.BitSet -import coop.rchain.models.Expr.ExprInstance._ -import coop.rchain.models.Var.VarInstance._ -import coop.rchain.models._ -import coop.rchain.rholang.interpreter.errors._ -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.ParBuilderUtil -import coop.rchain.rholang.interpreter.compiler.{ - BoundMapChain, - FreeMap, - NameSort, - ProcNormalizeMatcher, - ProcSort, - ProcVisitInputs, - SourcePosition, - VarSort -} -import cats.Eval -import coop.rchain.catscontrib.effect.implicits.sEval - class CollectMatcherSpec extends AnyFlatSpec with Matchers { val inputs = ProcVisitInputs( - Par(), + NilN, BoundMapChain .empty[VarSort] .put(List(("P", ProcSort, SourcePosition(0, 0)), ("x", NameSort, SourcePosition(0, 0)))), @@ -47,15 +34,7 @@ class CollectMatcherSpec extends AnyFlatSpec with Matchers { val list = new PCollect(new CollectList(listData, new ProcRemainderEmpty())) val result = ProcNormalizeMatcher.normalizeMatch[Eval](list, inputs).value - result.par should be( - inputs.par.prepend( - EList( - List[Par](EVar(BoundVar(1)), EVar(BoundVar(0)), GInt(7)), - locallyFree = BitSet(0, 1) - ), - 0 - ) - ) + result.par should be(EListN(Seq(BoundVarN(1), BoundVarN(0), GIntN(7)))) result.freeMap should be(inputs.freeMap) } "List" should "sort the insides of their elements" in { @@ -84,6 +63,7 @@ class CollectMatcherSpec extends AnyFlatSpec with Matchers { | )}""".stripMargin assertEqualNormalized(rho1, rho2) } + "Tuple" should "delegate" in { val tupleData = new ListProc() tupleData.add(new PEval(new NameVar("y"))) @@ -91,19 +71,7 @@ class CollectMatcherSpec extends AnyFlatSpec with Matchers { new PCollect(new CollectTuple(new TupleMultiple(new PVar(new ProcVarVar("Q")), tupleData))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](tuple, inputs).value - result.par should be( - inputs.par.prepend( - ETuple( - List[Par]( - EVar(FreeVar(0)), - EVar(FreeVar(1)) - ), - locallyFree = BitSet(), - connectiveUsed = true - ), - 0 - ) - ) + result.par should be(ETupleN(Seq(FreeVarN(0), FreeVarN(1)))) result.freeMap should be( inputs.freeMap.put( List(("Q", ProcSort, SourcePosition(0, 0)), ("y", NameSort, SourcePosition(0, 0))) @@ -124,6 +92,7 @@ class CollectMatcherSpec extends AnyFlatSpec with Matchers { "Tuple" should "sort the insides of their elements" in { assertEqualNormalized("@0!(({1 | 2}))", "@0!(({2 | 1}))") } + "Set" should "delegate" in { val setData = new ListProc() setData.add(new PAdd(new PVar(new ProcVarVar("P")), new PVar(new ProcVarVar("R")))) @@ -134,16 +103,9 @@ class CollectMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](set, inputs).value result.par should be( - inputs.par.prepend( - ParSet( - Seq[Par]( - EPlus(EVar(BoundVar(1)), EVar(FreeVar(1))), - GInt(7), - GInt(8).prepend(EVar(FreeVar(2)), 0) - ), - remainder = Some(FreeVar(0)) - ), - depth = 0 + ESetN( + Seq(EPlusN(BoundVarN(1), FreeVarN(1)), GIntN(7), ParN.combine(GIntN(8), FreeVarN(2))), + Some(FreeVarN(0)) ) ) val newBindings = List( @@ -169,18 +131,7 @@ class CollectMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](map, inputs).value result.par should be( - inputs.par.prepend( - ParMap( - List[(Par, Par)]( - (GInt(7), GString("Seven")), - (EVar(BoundVar(1)), EVar(FreeVar(1))) - ), - locallyFree = BitSet(1), - connectiveUsed = true, - remainder = Some(Var(FreeVar(0))) - ), - depth = 0 - ) + EMapN(Seq(GIntN(7) -> GStringN("Seven"), BoundVarN(1) -> FreeVarN(1)), Some(FreeVarN(0))) ) val newBindings = List( ("Z", ProcSort, SourcePosition(0, 0)), diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundMatcherSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundMatcherSpec.scala index 6e5734e8359..89c1b2f9a4d 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundMatcherSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/GroundMatcherSpec.scala @@ -1,35 +1,31 @@ package coop.rchain.rholang.interpreter.compiler.normalizer +import cats.Eval +import coop.rchain.catscontrib.effect.implicits.sEval +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ - import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.models.Expr.ExprInstance._ -import coop.rchain.models._ -import coop.rchain.models.rholang.implicits._ -import cats.Eval -import coop.rchain.catscontrib.effect.implicits.sEval - class GroundMatcherSpec extends AnyFlatSpec with Matchers { "GroundInt" should "Compile as GInt" in { - val gi = new GroundInt("7") - val expectedResult: Expr = GInt(7) + val gi = new GroundInt("7") + val expectedResult = GIntN(7) GroundNormalizeMatcher.normalizeMatch[Eval](gi).value should be(expectedResult) } "Positive groundBigInt" should "Compile GBigInt" in { - val gbi = new GroundBigInt("9999999999999999999999999999999999999999") - val expectedResult: Expr = GBigInt(BigInt("9999999999999999999999999999999999999999")) + val gbi = new GroundBigInt("9999999999999999999999999999999999999999") + val expectedResult = GBigIntN(BigInt("9999999999999999999999999999999999999999")) GroundNormalizeMatcher.normalizeMatch[Eval](gbi).value should be(expectedResult) } "GroundString" should "Compile as GString" in { - val gs = new GroundString("\"String\"") - val expectedResult: Expr = GString("String") + val gs = new GroundString("\"String\"") + val expectedResult = GStringN("String") GroundNormalizeMatcher.normalizeMatch[Eval](gs).value should be(expectedResult) } "GroundUri" should "Compile as GUri" in { - val gu = new GroundUri("`rho:uri`") - val expectedResult: Expr = GUri("rho:uri") + val gu = new GroundUri("`rho:uri`") + val expectedResult = GUriN("rho:uri") GroundNormalizeMatcher.normalizeMatch[Eval](gu).value should be(expectedResult) } } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameMatcherSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameMatcherSpec.scala index 677469cd7e9..3f6f98bb489 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameMatcherSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/NameMatcherSpec.scala @@ -1,33 +1,24 @@ package coop.rchain.rholang.interpreter.compiler.normalizer +import cats.Eval +import coop.rchain.catscontrib.effect.implicits.sEval +import coop.rchain.models._ +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn._ +import coop.rchain.rholang.interpreter.compiler._ +import coop.rchain.rholang.interpreter.errors._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.models.Expr.ExprInstance._ -import coop.rchain.models.Var.VarInstance._ -import coop.rchain.models._ -import coop.rchain.rholang.interpreter.errors._ -import coop.rchain.models.rholang.implicits._ -import coop.rchain.rholang.interpreter.compiler.{ - BoundMapChain, - FreeMap, - NameSort, - NameVisitInputs, - ProcSort, - SourcePosition, - VarSort -} -import cats.Eval -import coop.rchain.catscontrib.effect.implicits.sEval class NameMatcherSpec extends AnyFlatSpec with Matchers { val inputs = NameVisitInputs(BoundMapChain.empty[VarSort], FreeMap.empty[VarSort]) implicit val normalizerEnv: Map[String, Par] = Map.empty "NameWildcard" should "add a wildcard count to knownFree" in { - val nw = new NameWildcard() - val result = NameNormalizeMatcher.normalizeMatch[Eval](nw, inputs).value - val expectedResult: Par = EVar(Wildcard(Var.WildcardMsg())) + val nw = new NameWildcard() + val result = NameNormalizeMatcher.normalizeMatch[Eval](nw, inputs).value + val expectedResult = WildcardN result.par should be(expectedResult) result.freeMap.count shouldEqual 1 } @@ -38,14 +29,14 @@ class NameMatcherSpec extends AnyFlatSpec with Matchers { val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", NameSort, SourcePosition(0, 0)))) - val result = NameNormalizeMatcher.normalizeMatch[Eval](nvar, boundInputs).value - val expectedResult: Par = EVar(BoundVar(0)) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nvar, boundInputs).value + val expectedResult = BoundVarN(0) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } "NameVar" should "Compile as FreeVar if it's not in env" in { - val result = NameNormalizeMatcher.normalizeMatch[Eval](nvar, inputs).value - val expectedResult: Par = EVar(FreeVar(0)) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nvar, inputs).value + val expectedResult = FreeVarN(0) result.par should be(expectedResult) result.freeMap shouldEqual (inputs.freeMap.put(("x", NameSort, SourcePosition(0, 0)))) @@ -72,24 +63,24 @@ class NameMatcherSpec extends AnyFlatSpec with Matchers { "NameQuote" should "compile to a var if the var is bound" in { val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) - val nqvar = new NameQuote(new PVar(new ProcVarVar("x"))) - val result = NameNormalizeMatcher.normalizeMatch[Eval](nqvar, boundInputs).value - val expectedResult: Par = EVar(BoundVar(0)) + val nqvar = new NameQuote(new PVar(new ProcVarVar("x"))) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nqvar, boundInputs).value + val expectedResult = BoundVarN(0) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } "NameQuote" should "return a free use if the quoted proc has a free var" in { - val result = NameNormalizeMatcher.normalizeMatch[Eval](nqvar, inputs).value - val expectedResult: Par = EVar(FreeVar(0)) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nqvar, inputs).value + val expectedResult = FreeVarN(0) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap.put(("x", ProcSort, SourcePosition(0, 0)))) } "NameQuote" should "compile to a ground" in { - val nqground = new NameQuote(new PGround(new GroundInt("7"))) - val result = NameNormalizeMatcher.normalizeMatch[Eval](nqground, inputs).value - val expectedResult: Par = GInt(7) + val nqground = new NameQuote(new PGround(new GroundInt("7"))) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nqground, inputs).value + val expectedResult = GIntN(7) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } @@ -98,8 +89,8 @@ class NameMatcherSpec extends AnyFlatSpec with Matchers { val nqeval = new NameQuote(new PEval(new NameVar("x"))) val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", NameSort, SourcePosition(0, 0)))) - val result = NameNormalizeMatcher.normalizeMatch[Eval](nqeval, boundInputs).value - val expectedResult: Par = EVar(BoundVar(0)) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nqeval, boundInputs).value + val expectedResult = BoundVarN(0) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } @@ -108,10 +99,9 @@ class NameMatcherSpec extends AnyFlatSpec with Matchers { val nqeval = new NameQuote(new PPar(new PEval(new NameVar("x")), new PEval(new NameVar("x")))) val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", NameSort, SourcePosition(0, 0)))) - val result = NameNormalizeMatcher.normalizeMatch[Eval](nqeval, boundInputs).value - val expectedResult: Par = EVar(BoundVar(0)).prepend(EVar(BoundVar(0)), 0) + val result = NameNormalizeMatcher.normalizeMatch[Eval](nqeval, boundInputs).value + val expectedResult = ParN.combine(BoundVarN(0), BoundVarN(0)) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } - } diff --git a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/ProcMatcherSpec.scala b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/ProcMatcherSpec.scala index 9d64ce5ecc3..5352666b779 100644 --- a/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/ProcMatcherSpec.scala +++ b/rholang/src/test/scala/coop/rchain/rholang/interpreter/compiler/normalizer/ProcMatcherSpec.scala @@ -1,11 +1,12 @@ package coop.rchain.rholang.interpreter.compiler.normalizer -import coop.rchain.models.Connective.ConnectiveInstance._ +import cats.Eval +import coop.rchain.catscontrib.effect.implicits.sEval import coop.rchain.models.Expr.ExprInstance._ import coop.rchain.models._ -import coop.rchain.models.Var.VarInstance._ -import coop.rchain.models.Var.WildcardMsg import coop.rchain.models.rholang.implicits._ +import coop.rchain.models.rholangn.Bindings._ +import coop.rchain.models.rholangn._ import coop.rchain.rholang.ast.rholang_mercury.Absyn.{ Bundle => _, Ground => _, @@ -15,16 +16,12 @@ import coop.rchain.rholang.ast.rholang_mercury.Absyn.{ } import coop.rchain.rholang.interpreter.compiler._ import coop.rchain.rholang.interpreter.errors._ -import cats.Eval import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import coop.rchain.catscontrib.effect.implicits.sEval - -import scala.collection.immutable.BitSet class ProcMatcherSpec extends AnyFlatSpec with Matchers { - val inputs = ProcVisitInputs(Par(), BoundMapChain.empty[VarSort], FreeMap.empty[VarSort]) + val inputs = ProcVisitInputs(NilN, BoundMapChain.empty[VarSort], FreeMap.empty[VarSort]) implicit val normalizerEnv: Map[String, Par] = Map.empty "PNil" should "Compile as no modification to the par object" in { @@ -41,13 +38,12 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pvar, boundInputs).value - result.par should be(inputs.par.prepend(EVar(BoundVar(0)), 0)) + result.par should be(BoundVarN(0)) result.freeMap should be(inputs.freeMap) - result.par.locallyFree.get should be(BitSet(0)) } "PVar" should "Compile as FreeVar if it's not in env" in { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pvar, inputs).value - result.par should be(inputs.par.prepend(EVar(FreeVar(0)), 0)) + result.par should be(FreeVarN(0)) result.freeMap shouldEqual (inputs.freeMap.put(("x", ProcSort, SourcePosition(0, 0)))) } @@ -74,7 +70,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", NameSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pEval, boundInputs).value - result.par should be(inputs.par.prepend(EVar(BoundVar(0)), 0)) + result.par should be(BoundVarN(0)) result.freeMap should be(inputs.freeMap) } "PEval" should "Collapse a quote" in { @@ -85,7 +81,9 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pEval, boundInputs).value - result.par should be(inputs.par.prepend(EVar(BoundVar(0)), 0).prepend(EVar(BoundVar(0)), 0)) + result.par should be( + ParProcN(Seq(BoundVarN(0), BoundVarN(0))) + ) result.freeMap should be(inputs.freeMap) } @@ -93,7 +91,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val pNot = new PNot(new PGround(new GroundBool(new BoolFalse()))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pNot, inputs).value - result.par should be(inputs.par.prepend(ENot(GBool(false)), 0)) + result.par should be(ENotN(GBoolN(false))) result.freeMap should be(inputs.freeMap) } @@ -103,7 +101,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pNeg, boundInputs).value - result.par should be(inputs.par.prepend(ENeg(EVar(BoundVar(0))), 0)) + result.par should be(ENegN(BoundVarN(0))) result.freeMap should be(inputs.freeMap) } @@ -113,7 +111,9 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMult, boundInputs).value - result.par should be(inputs.par.prepend(EMult(EVar(BoundVar(0)), EVar(FreeVar(0))), 0)) + result.par should be( + EMultN(BoundVarN(0), FreeVarN(0)) + ) result.freeMap should be(inputs.freeMap.put(("y", ProcSort, SourcePosition(0, 0)))) } @@ -121,7 +121,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val pDiv = new PDiv(new PGround(new GroundInt("7")), new PGround(new GroundInt("2"))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pDiv, inputs).value - result.par should be(inputs.par.prepend(EDiv(GInt(7), GInt(2)), 0)) + result.par should be(EDivN(GIntN(7), GIntN(2))) result.freeMap should be(inputs.freeMap) } @@ -140,12 +140,9 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { ) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pPercentPercent, inputs).value result.par should be( - inputs.par.prepend( - EPercentPercent( - GString("Hi ${name}"), - ParMap(seq = List[(Par, Par)]((GString("name"), GString("Alice")))) - ), - 0 + EPercentPercentN( + GStringN("Hi ${name}"), + EMapN(Seq((GStringN("name"), GStringN("Alice")))) ) ) result.freeMap should be(inputs.freeMap) @@ -160,7 +157,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { ) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pAdd, boundInputs).value - result.par should be(inputs.par.prepend(EPlus(EVar(BoundVar(1)), EVar(BoundVar(0))), 0)) + result.par should be(EPlusN(BoundVarN(1), BoundVarN(0))) result.freeMap should be(inputs.freeMap) } @@ -182,7 +179,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMinus, boundInputs).value result.par should be( - inputs.par.prepend(EMinus(EVar(BoundVar(2)), EMult(EVar(BoundVar(1)), EVar(BoundVar(0)))), 0) + EMinusN(BoundVarN(2), EMultN(BoundVarN(1), BoundVarN(0))) ) result.freeMap should be(inputs.freeMap) } @@ -193,7 +190,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { new PGround(new GroundString("\"def\"")) ) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pPlusPlus, inputs).value - result.par should be(inputs.par.prepend(EPlusPlus(GString("abc"), GString("def")), 0)) + result.par should be(EPlusPlusN(GStringN("abc"), GStringN("def"))) result.freeMap should be(inputs.freeMap) } @@ -203,7 +200,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { new PGround(new GroundString("\"def\"")) ) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMinusMinus, inputs).value - result.par should be(inputs.par.prepend(EMinusMinus(GString("abc"), GString("def")), 0)) + result.par should be(EMinusMinusN(GStringN("abc"), GStringN("def"))) result.freeMap should be(inputs.freeMap) } @@ -214,9 +211,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val pSend = new PSend(new NameQuote(new PNil()), new SendSingle(), sentData) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pSend, inputs).value - result.par should be( - inputs.par.prepend(Send(Par(), List[Par](GInt(7), GInt(8)), false, BitSet())) - ) + result.par should be(SendN(NilN, Seq(GIntN(7), GIntN(8)))) result.freeMap should be(inputs.freeMap) } @@ -229,9 +224,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", NameSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pSend, boundInputs).value - result.par should be( - inputs.par.prepend(Send(EVar(BoundVar(0)), List[Par](GInt(7), GInt(8)), false, BitSet(0))) - ) + result.par should be(SendN(BoundVarN(0), Seq(GIntN(7), GIntN(8)))) result.freeMap should be(inputs.freeMap) } @@ -293,7 +286,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { "PPar" should "Compile both branches into a par object" in { val parGround = new PPar(new PGround(new GroundInt("7")), new PGround(new GroundInt("8"))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](parGround, inputs).value - result.par should be(inputs.par.copy(exprs = List(GInt(8), GInt(7)))) + result.par should be(ParProcN(Seq(GIntN(8), GIntN(7)))) result.freeMap should be(inputs.freeMap) } @@ -303,24 +296,22 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](parDoubleBound, boundInputs).value - result.par should be( - inputs.par.copy(exprs = List(EVar(BoundVar(0)), EVar(BoundVar(0))), locallyFree = BitSet(0)) - ) + result.par should be(ParProcN(Seq(BoundVarN(0), BoundVarN(0)))) result.freeMap should be(inputs.freeMap) } + "PPar" should "Not compile if both branches use the same free variable" in { val parDoubleFree = new PPar(new PVar(new ProcVarVar("x")), new PVar(new ProcVarVar("x"))) an[UnexpectedReuseOfProcContextFree] should be thrownBy { ProcNormalizeMatcher.normalizeMatch[Eval](parDoubleFree, inputs).value } } + "PPar" should "Accumulate free counts from both branches" in { val parDoubleFree = new PPar(new PVar(new ProcVarVar("x")), new PVar(new ProcVarVar("y"))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](parDoubleFree, inputs).value - result.par should be( - inputs.par.copy(exprs = List(EVar(FreeVar(1)), EVar(FreeVar(0))), connectiveUsed = true) - ) + result.par should be(ParProcN(Seq(FreeVarN(1), FreeVarN(0)))) result.freeMap should be( inputs.freeMap.put( List(("x", ProcSort, SourcePosition(0, 0)), ("y", ProcSort, SourcePosition(0, 0))) @@ -329,7 +320,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { } "PPar" should "normalize without StackOverflowError-s even for huge programs" in { - val hugePPar = (1 to 50000) + val hugePPar = (1 to 100) // TODO: Change to 50000 after creation stacksafe new rho Pars .map(x => new PGround(new GroundInt(x.toString))) .reduce((l: Proc, r: Proc) => new PPar(l, r)) noException should be thrownBy { @@ -363,26 +354,12 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pBasicContr, boundInputs).value result.par should be( - inputs.par.prepend( - Receive( - List( - ReceiveBind( - List(EVar(FreeVar(0)), EVar(FreeVar(1)), EVar(FreeVar(2))), - EVar(BoundVar(0)), - freeCount = 3 - ) - ), - Send( - EVar(BoundVar(2)), - List[Par](EPlus(EVar(BoundVar(1)), EVar(BoundVar(0)))), - false, - BitSet(0, 1, 2) - ), - true, // persistent - peek = false, - bindCount, - BitSet(0) - ) + ReceiveN( + Seq(ReceiveBindN(Seq(FreeVarN(0), FreeVarN(1), FreeVarN(2)), BoundVarN(0), freeCount = 3)), + SendN(BoundVarN(2), EPlusN(BoundVarN(1), BoundVarN(0))), + persistent = true, // persistent + peek = false, + bindCount ) ) result.freeMap should be(inputs.freeMap) @@ -414,21 +391,12 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pBasicContr, boundInputs).value result.par should be( - inputs.par.prepend( - Receive( - List( - ReceiveBind( - List(EVar(FreeVar(0)), Par().copy(exprs = List(GInt(5)))), - EVar(BoundVar(0)), - freeCount = 1 - ) - ), - Send(EVar(BoundVar(0)), List(Par().copy(exprs = List(GInt(5)))), false, BitSet(0)), - true, // persistent - peek = false, - bindCount, - BitSet(0) - ) + ReceiveN( + Seq(ReceiveBindN(Seq(FreeVarN(0), GIntN(5)), BoundVarN(0), freeCount = 1)), + SendN(BoundVarN(0), GIntN(5)), + persistent = true, // persistent + peek = false, + bindCount ) ) result.freeMap should be(inputs.freeMap) @@ -460,23 +428,12 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](basicInput, inputs).value result.par should be( - inputs.par.prepend( - Receive( - List( - ReceiveBind(List(EVar(FreeVar(0)), EVar(FreeVar(1))), Par(), freeCount = 2) - ), - Send( - EVar(BoundVar(1)), - List[Par](EVar(BoundVar(0))), - false, - BitSet(0, 1) - ), - persistent = false, - peek = false, - bindCount, - BitSet(), - connectiveUsed = false - ) + ReceiveN( + Seq(ReceiveBindN(Seq(FreeVarN(0), FreeVarN(1)), NilN, freeCount = 2)), + SendN(BoundVarN(1), BoundVarN(0)), + persistent = false, + peek = false, + bindCount ) ) result.freeMap should be(inputs.freeMap) @@ -486,7 +443,10 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { (for { basicInput <- Compiler[Eval].sourceToAST("""for ( x, y <<- @Nil ) { x!(*y) }""") result <- ProcNormalizeMatcher.normalizeMatch[Eval](basicInput, inputs) - } yield result.par.receives.head.peek shouldBe true).value + } yield result.par match { + case r: ReceiveN => r.peek shouldBe true + case _ => assert(false, "result.par did not match ReceiveN") + }).value } "PInput" should "Handle a more complicated receive" in { @@ -530,33 +490,15 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pInput, inputs).value result.par should be( - inputs.par.prepend( - Receive( - List( - ReceiveBind( - List(EVar(FreeVar(0)), EVar(FreeVar(1))), - Par(), - freeCount = 2 - ), - ReceiveBind( - List(EVar(FreeVar(0)), EVar(FreeVar(1))), - GInt(1), - freeCount = 2 - ) - ), - Par().copy( - sends = List( - Send(EVar(BoundVar(1)), List[Par](EVar(BoundVar(2))), false, BitSet(1, 2)), - Send(EVar(BoundVar(3)), List[Par](EVar(BoundVar(0))), false, BitSet(0, 3)) - ), - locallyFree = BitSet(0, 1, 2, 3) - ), - persistent = false, - peek = false, - bindCount, - BitSet(), - connectiveUsed = false - ) + ReceiveN( + List( + ReceiveBindN(Seq(FreeVarN(0), FreeVarN(1)), NilN, freeCount = 2), + ReceiveBindN(Seq(FreeVarN(0), FreeVarN(1)), GIntN(1), freeCount = 2) + ), + ParProcN(Seq(SendN(BoundVarN(1), BoundVarN(2)), SendN(BoundVarN(3), BoundVarN(0)))), + persistent = false, + peek = false, + bindCount ) ) result.freeMap should be(inputs.freeMap) @@ -585,28 +527,14 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val bindCount = 1 val pInput = new PInput(listReceipt, new PNil()) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pInput, inputs).value - val expected = inputs.par.prepend( - Receive( - List( - ReceiveBind( - List( - Par( - connectiveUsed = true, - exprs = List(EList(connectiveUsed = true, remainder = Some(FreeVar(0)))) - ) - ), - Par(), - freeCount = 1 - ) - ), - Par(), + val expected = + ReceiveN( + ReceiveBindN(Seq(EListN(Seq(), Some(FreeVarN(0)))), NilN, freeCount = 1), + NilN, persistent = false, peek = false, - bindCount, - BitSet(), - connectiveUsed = false + bindCount ) - ) result.par should be(expected) } @@ -756,14 +684,14 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pNew, inputs).value result.par should be( - inputs.par.prepend( - New( - bindCount = 3, - p = Send(EVar(BoundVar(2)), List[Par](GInt(7)), false, BitSet(2)) - .prepend(Send(EVar(BoundVar(1)), List[Par](GInt(8)), false, BitSet(1))) - .prepend(Send(EVar(BoundVar(0)), List[Par](GInt(9)), false, BitSet(0))), - uri = Vector.empty, - locallyFree = BitSet() + NewN( + bindCount = 3, + ParProcN( + Seq( + SendN(BoundVarN(2), GIntN(7)), + SendN(BoundVarN(1), GIntN(8)), + SendN(BoundVarN(0), GIntN(9)) + ) ) ) ) @@ -807,23 +735,21 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pNew, inputs).value result.par should be( - inputs.par.prepend( - New( - bindCount = 5, - p = Send(EVar(BoundVar(4)), List[Par](GInt(7)), false, BitSet(4)) - .prepend(Send(EVar(BoundVar(3)), List[Par](GInt(8)), false, BitSet(3))) - .prepend(Send(EVar(BoundVar(1)), List[Par](GInt(9)), false, BitSet(1))) - .prepend(Send(EVar(BoundVar(0)), List[Par](GInt(10)), false, BitSet(0))) - .prepend(Send(EVar(BoundVar(2)), List[Par](GInt(11)), false, BitSet(2))), - uri = Vector("rho:registry", "rho:stdout"), - locallyFree = BitSet() - ) + NewN( + bindCount = 5, + p = ParProcN( + Seq( + SendN(BoundVarN(4), GIntN(7)), + SendN(BoundVarN(3), GIntN(8)), + SendN(BoundVarN(1), GIntN(9)), + SendN(BoundVarN(0), GIntN(10)), + SendN(BoundVarN(2), GIntN(11)) + ) + ), + uri = Vector("rho:registry", "rho:stdout"), + Seq() ) ) - result.par.news(0).p.sends.map(x => x.locallyFree.get) should be( - List(BitSet(2), BitSet(0), BitSet(1), BitSet(3), BitSet(4)) - ) - result.par.news(0).p.locallyFree.get should be(BitSet(0, 1, 2, 3, 4)) } "PMatch" should "Handle a match inside a for comprehension" in { @@ -860,24 +786,21 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val bindCount = 1 val expectedResult = - inputs.par - .prepend(Send(Par(), List[Par](GInt(47)), false, BitSet())) - .prepend( - Receive( - List(ReceiveBind(List(EVar(FreeVar(0))), Par(), freeCount = 1)), - Match( - EVar(BoundVar(0)), - List(MatchCase(GInt(42), Par()), MatchCase(EVar(FreeVar(0)), Par(), freeCount = 1)), - BitSet(0) + ParProcN( + Seq( + SendN(NilN, GIntN(47)), + ReceiveN( + Seq(ReceiveBindN(FreeVarN(0), NilN, freeCount = 1)), + MatchN( + BoundVarN(0), + Seq(MatchCaseN(GIntN(42), NilN), MatchCaseN(FreeVarN(0), NilN, freeCount = 1)) ), persistent = false, peek = false, - bindCount, - BitSet(), - connectiveUsed = false + bindCount ) ) - + ) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } @@ -896,25 +819,18 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMatch, boundInputs).value - - val expectedResult = - inputs.par.prepend( - Match( - EVar(BoundVar(0)), - List( - MatchCase( - EList(Seq[Par](EVar(FreeVar(0)), EVar(Wildcard(Var.WildcardMsg()))), BitSet(), true), - Par(), - freeCount = 1 - ), - MatchCase(EVar(Wildcard(Var.WildcardMsg())), Par()) - ), - BitSet(0), - false - ) + val expectedResult = MatchN( + BoundVarN(0), + Seq( + MatchCaseN( + EListN(Seq(FreeVarN(0), WildcardN)), + NilN, + freeCount = 1 + ), + MatchCaseN(WildcardN, NilN) ) + ) result.par should be(expectedResult) - result.par.matches.head.cases.head.freeCount should be(1) } "PIf" should "Desugar to match with true/false cases" in { @@ -927,16 +843,9 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](basicInput, inputs).value result.par should be( - inputs.par.prepend( - Match( - GBool(true), - List( - MatchCase(GBool(true), Send(Par(), List[Par](GInt(47)), false, BitSet())), - MatchCase(GBool(false), Par()) - // TODO: Fill in type error case - ), - BitSet() - ) + MatchN( + GBoolN(true), + Seq(MatchCaseN(GBoolN(true), SendN(NilN, GIntN(47))), MatchCaseN(GBoolN(false), NilN)) ) ) result.freeMap should be(inputs.freeMap) @@ -946,16 +855,19 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val rightProc = new PIf(new PGround(new GroundBool(new BoolTrue())), new PGround(new GroundInt("10"))) - val input = inputs.copy(par = Par(exprs = Seq(GInt(7)))) + val input = inputs.copy(par = GIntN(7)) val result = ProcNormalizeMatcher.normalizeMatch[Eval](rightProc, input).value result.freeMap should be(inputs.freeMap) result.par should be( - inputs.par.copy( - matches = Seq( - Match(GBool(true), Seq(MatchCase(GBool(true), GInt(10)), MatchCase(GBool(false), Par()))) - ), - exprs = Seq(GInt(7)) + ParProcN( + Seq( + MatchN( + GBoolN(true), + Seq(MatchCaseN(GBoolN(true), GIntN(10)), MatchCaseN(GBoolN(false), NilN)) + ), + GIntN(7) + ) ) ) } @@ -983,34 +895,28 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](basicInput, inputs).value result.par should be( - inputs.par.prepend( - Match( - EEq(GInt(47), GInt(47)), - List( - MatchCase( - GBool(true), - New( - bindCount = 1, - p = Send(EVar(BoundVar(0)), List[Par](GInt(47)), false, BitSet(0)), - uri = Vector.empty, - locallyFree = BitSet() - ) - ), - MatchCase( - GBool(false), - New( - bindCount = 1, - p = Send(EVar(BoundVar(0)), List[Par](GInt(47)), false, BitSet(0)), - uri = Vector.empty, - locallyFree = BitSet() - ) + MatchN( + EEqN(GIntN(47), GIntN(47)), + Seq( + MatchCaseN( + GBoolN(true), + NewN( + bindCount = 1, + p = SendN(BoundVarN(0), GIntN(47)) ) - // TODO: Fill in type error case ), - BitSet() + MatchCaseN( + GBoolN(false), + NewN( + bindCount = 1, + p = SendN(BoundVarN(0), GIntN(47)) + ) + ) + // TODO: Fill in type error case ) ) ) + result.freeMap should be(inputs.freeMap) } "PMatch" should "Fail if a free variable is used twice in the target" in { @@ -1054,25 +960,20 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](input, inputs).value val bindCount = 2 - val matchTarget = EVar(FreeVar(1)).prepend(EVar(FreeVar(0)), 0) + val matchTarget = ParProcN(Seq(FreeVarN(1), FreeVarN(0))) val expectedResult = - inputs.par.prepend( - Receive( - List( - ReceiveBind( - List( - Match(matchTarget, List(MatchCase(GInt(47), Par())), connectiveUsed = true) - ), - Par(), - freeCount = 2 - ) + ReceiveN( + ReceiveBindN( + Seq( + MatchN(matchTarget, Seq(MatchCaseN(GIntN(47), NilN))) ), - Par(), - persistent = false, - peek = false, - bindCount, - connectiveUsed = false - ) + NilN, + freeCount = 2 + ), + NilN, + persistent = false, + peek = false, + bindCount ) result.par should be(expectedResult) @@ -1090,8 +991,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMethod, boundInputs).value val expectedResult = - inputs.par - .prepend(EMethod(methodName, EVar(BoundVar(0)), List(GInt(0)), BitSet(0), false), 0) + EMethodN(methodName, BoundVarN(0), GIntN(0)) result.par === expectedResult && result.freeMap === inputs.freeMap } methods.forall(m => test(m)) @@ -1102,14 +1002,8 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val pbundle = new PBundle(new BundleReadWrite(), new PVar(new ProcVarVar("x"))) val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) - - val result = ProcNormalizeMatcher.normalizeMatch[Eval](pbundle, boundInputs).value - - val expectedResult = - inputs.par - .withBundles(List(Bundle(EVar(BoundVar(0)), writeFlag = true, readFlag = true))) - .withLocallyFree(BitSet(0)) - + val result = ProcNormalizeMatcher.normalizeMatch[Eval](pbundle, boundInputs).value + val expectedResult = BundleN(BoundVarN(0), writeFlag = true, readFlag = true) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) } @@ -1172,9 +1066,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) def expectedResults(writeFlag: Boolean, readFlag: Boolean) = - inputs.par - .withBundles(List(Bundle(EVar(BoundVar(0)), writeFlag = writeFlag, readFlag = readFlag))) - .withLocallyFree(BitSet(0)) + BundleN(BoundVarN(0), writeFlag = writeFlag, readFlag = readFlag) def test(readOnly: Boolean, writeOnly: Boolean) = withClue(s"for bundle with flags readOnly=$readOnly writeOnly=$writeOnly") { @@ -1198,9 +1090,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val boundInputs = inputs.copy(boundMapChain = inputs.boundMapChain.put(("x", ProcSort, SourcePosition(0, 0)))) - val expectedResults = inputs.par - .withBundles(List(Bundle(EVar(BoundVar(0)), writeFlag = false, readFlag = true))) - .withLocallyFree(BitSet(0)) + val expectedResults = BundleN(BoundVarN(0), writeFlag = false, readFlag = true) val result = ProcNormalizeMatcher.normalizeMatch[Eval](nestedBundle, input = boundInputs).value @@ -1212,10 +1102,8 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { "PNegation" should "delegate, but not count any free variables inside" in { val proc = new PNegation(new PVar(new ProcVarVar("x"))) - val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, inputs).value - val expectedResult = inputs.par - .addConnectives(Connective(ConnNotBody(EVar(FreeVar(0))))) - .withConnectiveUsed(true) + val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, inputs).value + val expectedResult = ConnNotN(FreeVarN(0)) result.par should be(expectedResult) result.freeMap.levelBindings should be(inputs.freeMap.levelBindings) @@ -1225,12 +1113,8 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { "PConjunction" should "delegate, and count any free variables inside" in { val proc = new PConjunction(new PVar(new ProcVarVar("x")), new PVar(new ProcVarVar("y"))) - val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, inputs).value - val expectedResult = inputs.par - .addConnectives( - Connective(ConnAndBody(ConnectiveBody(Vector(EVar(FreeVar(0)), EVar(FreeVar(1)))))) - ) - .withConnectiveUsed(true) + val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, inputs).value + val expectedResult = ConnAndN(Seq(FreeVarN(0), FreeVarN(1))) result.par should be(expectedResult) @@ -1245,12 +1129,8 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { "PDisjunction" should "delegate, but not count any free variables inside" in { val proc = new PDisjunction(new PVar(new ProcVarVar("x")), new PVar(new ProcVarVar("x"))) - val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, inputs).value - val expectedResult = inputs.par - .addConnectives( - Connective(ConnOrBody(ConnectiveBody(Vector(EVar(FreeVar(0)), EVar(FreeVar(0)))))) - ) - .withConnectiveUsed(true) + val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, inputs).value + val expectedResult = ConnOrN(FreeVarN(0), FreeVarN(0)) result.par should be(expectedResult) result.freeMap.levelBindings should be(inputs.freeMap.levelBindings) @@ -1268,24 +1148,17 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val proc = new PMatch(new PGround(new GroundInt("7")), listCases) val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, boundInputs).value - val expectedResult = inputs.par - .addMatches( - Match( - target = GInt(7), - cases = List( - MatchCase( - pattern = Connective(VarRefBody(VarRef(0, 1))).withLocallyFree(BitSet(0)), - source = Par() - ) - ), - locallyFree = BitSet(0) + val expectedResult = MatchN( + target = GIntN(7), + cases = Seq( + MatchCaseN( + pattern = ConnVarRefN(0, 1), + source = NilN ) ) - .withLocallyFree(BitSet(0)) + ) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) - // Make sure that variable references in patterns are reflected - result.par.locallyFree.get should be(BitSet(0)) } it should "do a deep lookup in a receive case" in { @@ -1313,23 +1186,12 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { // format: off val result = ProcNormalizeMatcher.normalizeMatch[Eval](proc, boundInputs).value - val expectedResult = inputs.par - .addReceives( - Receive( - binds = List( - ReceiveBind( - patterns = List( - Connective(VarRefBody(VarRef(0, 1))).withLocallyFree(BitSet(0))), - source = Par())), - body = Par(), - persistent = false, - bindCount = 0, - locallyFree = BitSet(0))) - .withLocallyFree(BitSet(0)) + val expectedResult = ReceiveN( + ReceiveBindN(ConnVarRefN(0, 1), NilN), + body = NilN, + bindCount = 0) result.par should be(expectedResult) result.freeMap should be(inputs.freeMap) - result.par.locallyFree.get should be(BitSet(0)) - // format: on } "PSimpleType" should "result in a connective of the correct type" in { @@ -1347,24 +1209,12 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val resultUri = ProcNormalizeMatcher.normalizeMatch[Eval](procUri, inputs).value val resultByteArray = ProcNormalizeMatcher.normalizeMatch[Eval](procByteArray, inputs).value - resultBool.par should be( - Par(connectives = Seq(Connective(ConnBool(true))), connectiveUsed = true) - ) - resultInt.par should be( - Par(connectives = Seq(Connective(ConnInt(true))), connectiveUsed = true) - ) - resultBigInt.par should be( - Par(connectives = Seq(Connective(ConnBigInt(true))), connectiveUsed = true) - ) - resultString.par should be( - Par(connectives = Seq(Connective(ConnString(true))), connectiveUsed = true) - ) - resultUri.par should be( - Par(connectives = Seq(Connective(ConnUri(true))), connectiveUsed = true) - ) - resultByteArray.par should be( - Par(connectives = Seq(Connective(ConnByteArray(true))), connectiveUsed = true) - ) + resultBool.par should be(ConnBoolN) + resultInt.par should be(ConnIntN) + resultBigInt.par should be(ConnBigIntN) + resultString.par should be(ConnStringN) + resultUri.par should be(ConnUriN) + resultByteArray.par should be(ConnByteArrayN) } "1 matches _" should "normalize correctly" in { @@ -1372,7 +1222,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMatches, inputs).value - val expectedPar = inputs.par.prepend(EMatches(GInt(1), EVar(Wildcard(WildcardMsg()))), 0) + val expectedPar = EMatchesN(GIntN(1), WildcardN) result.par shouldBe expectedPar result.par.connectiveUsed should be(false) @@ -1383,7 +1233,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMatches, inputs).value - val expectedPar = inputs.par.prepend(EMatches(GInt(1), GInt(2)), 0) + val expectedPar = EMatchesN(GIntN(1), GIntN(2)) result.par shouldBe expectedPar result.par.connectiveUsed should be(false) @@ -1394,7 +1244,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { new PMatches(new PGround(new GroundInt("1")), new PNegation(new PGround(new GroundInt("1")))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMatches, inputs).value - val expectedPar = inputs.par.prepend(EMatches(GInt(1), Connective(ConnNotBody(GInt(1)))), 0) + val expectedPar = EMatchesN(GIntN(1), ConnNotN(GIntN(1))) result.par shouldBe expectedPar result.par.connectiveUsed should be(false) @@ -1405,7 +1255,7 @@ class ProcMatcherSpec extends AnyFlatSpec with Matchers { new PMatches(new PNegation(new PGround(new GroundInt("1"))), new PGround(new GroundInt("1"))) val result = ProcNormalizeMatcher.normalizeMatch[Eval](pMatches, inputs).value - val expectedPar = inputs.par.prepend(EMatches(Connective(ConnNotBody(GInt(1))), GInt(1)), 0) + val expectedPar = EMatchesN(ConnNotN(GIntN(1)), GIntN(1)) result.par shouldBe expectedPar result.par.connectiveUsed should be(true) diff --git a/rspace-bench/src/test/scala/coop/rchain/models/rholang/ParBench.scala b/rspace-bench/src/test/scala/coop/rchain/models/rholang/ParBench.scala new file mode 100644 index 00000000000..19f712bd5ce --- /dev/null +++ b/rspace-bench/src/test/scala/coop/rchain/models/rholang/ParBench.scala @@ -0,0 +1,167 @@ +package coop.rchain.models.rholang + +import cats.Eval +import coop.rchain.catscontrib.effect.implicits.sEval +import org.openjdk.jmh.annotations._ +import scodec.bits.ByteVector + +import java.util.concurrent.TimeUnit +import scala.annotation.tailrec +import coop.rchain.models.Expr.ExprInstance._ +import coop.rchain.models._ +import coop.rchain.models.serialization.implicits._ +import coop.rchain.shared.Serialize +import coop.rchain.models.rholang.implicits._ + +@Fork(value = 1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OperationsPerInvocation(value = 100) +@State(Scope.Benchmark) +class ParBench { + + @tailrec + final def createNestedPar(n: Int, par: Par = Par(exprs = Seq(GInt(0)))): Par = + if (n == 0) par + else createNestedPar(n - 1, Par(exprs = Seq(EList(Seq(par))))) + + final def createParProc(n: Int): Par = { + val elSize = 33 + def el(i: Int) = EListBody(EList(Seq.fill(elSize)(GInt(i.toLong)))) + Par(exprs = Seq.tabulate(n)(el)) + } + + final def appendTest(n: Int): Par = { + val elSize = 33 + def el(i: Int) = EListBody(EList(Seq.fill(elSize)(GInt(i.toLong)))) + val seq = Seq.tabulate(n)(el) + seq.foldLeft(Par()) { (acc, p) => + acc.addExprs(p) + } + } + + val nestedSize: Int = 500 + var nestedPar: Par = _ + var nestedAnotherPar: Par = _ + var nestedParSData: ByteVector = _ + + val parProcSize: Int = 500 + var parProc: Par = _ + var parProcAnother: Par = _ + var parProcSData: ByteVector = _ + + @Setup(Level.Iteration) + def setup(): Unit = { + nestedPar = createNestedPar(nestedSize) + nestedAnotherPar = createNestedPar(nestedSize) + nestedParSData = Serialize[Par].encode(nestedPar) + + parProc = createParProc(parProcSize) + parProcSData = Serialize[Par].encode(parProc) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedCreation(): Unit = { + val _ = createNestedPar(nestedSize) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedSerialization(): Unit = { + val _ = Serialize[Par].encode(nestedPar) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedDeserialization(): Unit = { + val _ = Serialize[Par].decode(nestedParSData) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedSerializedSize(): Unit = { + val _ = ProtoM.serializedSize(nestedPar).value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedHash(): Unit = { + val _ = HashM[Par].hash[Eval](nestedPar).value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedEqual(): Unit = { + val _ = EqualM[Par].equal[Eval](nestedPar, nestedAnotherPar).value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedAdd(): Unit = { + val _ = nestedPar.addExprs(GInt(0)) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcCreation(): Unit = { + val _ = createParProc(parProcSize) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcSerialization(): Unit = { + val _ = Serialize[Par].encode(parProc) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcDeserialization(): Unit = { + val _ = Serialize[Par].decode(parProcSData) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcSerializedSize(): Unit = { + val _ = ProtoM.serializedSize(parProc).value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcHash(): Unit = { + val _ = HashM[Par].hash[Eval](parProc).value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcEqual(): Unit = { + val _ = EqualM[Par].equal[Eval](parProc, parProcAnother).value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcAdd(): Unit = { + val _ = parProc.addExprs(GInt(0)) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def manyAppends(): Unit = { + val _ = appendTest(parProcSize) + } +} diff --git a/rspace-bench/src/test/scala/coop/rchain/models/rholangn/ParBench.scala b/rspace-bench/src/test/scala/coop/rchain/models/rholangn/ParBench.scala new file mode 100644 index 00000000000..c5ed6af9725 --- /dev/null +++ b/rspace-bench/src/test/scala/coop/rchain/models/rholangn/ParBench.scala @@ -0,0 +1,163 @@ +package coop.rchain.models.rholangn + +import coop.rchain.models.rholangn.parmanager.{Manager, Serialization} +import org.openjdk.jmh.annotations._ + +import java.util.concurrent.TimeUnit +import scala.annotation.tailrec + +@Fork(value = 1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OperationsPerInvocation(value = 100) +@State(Scope.Benchmark) +class ParBench { + + @tailrec + final def createNestedPar(n: Int, par: ParN = GIntN(0)): ParN = + if (n == 0) par + else createNestedPar(n - 1, EListN(par)) + + final def createParProc(n: Int): ParN = { + val elSize = 33 + def el(i: Int) = EListN(Seq.fill(elSize)(GIntN(i.toLong))) + val seq = Seq.tabulate(n)(el) + ParN.makeParProc(seq) + } + + final def appendTest(n: Int): ParN = { + val elSize = 33 + def el(i: Int) = EListN(Seq.fill(elSize)(GIntN(i.toLong))) + + val seq = Seq.tabulate(n)(el) + seq.foldLeft(NilN: ParN) { (acc, p) => + ParN.combine(acc, p) + } + } + val nestedSize: Int = 500 + var nestedPar: ParN = _ + var nestedAnotherPar: ParN = _ + var nestedParSData: Array[Byte] = _ + + val parProcSize: Int = 500 + var parProc: ParN = _ + var parProcAnother: ParN = _ + var parProcSData: Array[Byte] = _ + + @Setup(Level.Iteration) + def setup(): Unit = { + nestedPar = createNestedPar(nestedSize) + nestedAnotherPar = createNestedPar(nestedSize) + nestedParSData = nestedPar.serialized.value + + parProc = createParProc(parProcSize) + parProcAnother = createParProc(parProcSize) + parProcSData = parProc.serialized.value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedCreation(): Unit = { + val _ = createNestedPar(nestedSize) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedSerialization(): Unit = { + val _ = nestedPar.serialized.value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedDeserialization(): Unit = { + val _ = Manager.protoDeserialize(nestedParSData) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedSerializedSize(): Unit = { + val _ = nestedPar.serializedSize.value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedHash(): Unit = { + val _ = nestedPar.rhoHash + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedEqual(): Unit = { + val _ = nestedPar.equals(nestedAnotherPar) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def nestedAdd(): Unit = + ParProcN(Seq(nestedPar, GIntN(0))) + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcCreation(): Unit = { + val _ = createParProc(parProcSize) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcSerialization(): Unit = { + val _ = parProc.serialized.value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcDeserialization(): Unit = { + val _ = Manager.protoDeserialize(parProcSData) + } + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcSerializedSize(): Unit = { + val _ = parProc.serializedSize.value + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcHash(): Unit = { + val _ = parProc.rhoHash + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcEqual(): Unit = { + val _ = parProc.equals(parProcAnother) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def parProcAdd(): Unit = { + val _ = parProc match { + case proc: ParProcN => ParN.combine(proc, GIntN(0)) + case _ => assert(false) + } + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def manyAppends(): Unit = { + val _ = appendTest(parProcSize) + } +} diff --git a/rspace-bench/src/test/scala/coop/rchain/models/rholangn/SetBench.scala b/rspace-bench/src/test/scala/coop/rchain/models/rholangn/SetBench.scala new file mode 100644 index 00000000000..c093816fc5c --- /dev/null +++ b/rspace-bench/src/test/scala/coop/rchain/models/rholangn/SetBench.scala @@ -0,0 +1,26 @@ +package coop.rchain.models.rholangn + +import org.openjdk.jmh.annotations._ + +import java.util.concurrent.TimeUnit +import scala.util.Random + +@Fork(value = 1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OperationsPerInvocation(value = 100) +@State(Scope.Benchmark) +class SetBench { + + final def setCreation(n: Int): ESetN = + (1 to n).foldLeft(ESetN()) { (acc, _) => + acc + GIntN(Random.nextLong()) + } + + @Benchmark + @BenchmarkMode(Array(Mode.AverageTime)) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + def manyAppends(): Unit = { + val _ = setCreation(5000) + } +}