diff --git a/rholang/src/main/scala/io/rhonix/rholang/Bindings.scala b/rholang/src/main/scala/io/rhonix/rholang/Bindings.scala new file mode 100644 index 00000000000..c76edd8a501 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/Bindings.scala @@ -0,0 +1,31 @@ +package io.rhonix.rholang + +import coop.rchain.models.* +import io.rhonix.rholang.types.* + +object Bindings { + def toProto(p: ParN): Par = BindingsToProto.toProto(p) + def toProto(ps: Seq[ParN]): Seq[Par] = ps.map(toProto) + def toProtoVarOpt(p: VarN): Var = BindingsToProto.toVar(p) + def toProtoExpr(e: ExprN): Expr = BindingsToProto.toExpr(e) + def toProtoVar(v: VarN): Var = BindingsToProto.toVar(v) + def toProtoUnforgeable(u: UnforgeableN): GUnforgeable = BindingsToProto.toUnforgeable(u) + def toProtoConnective(c: ConnectiveN): Connective = BindingsToProto.toConnective(c) + def toProtoSend(x: SendN): Send = BindingsToProto.toSend(x) + def toProtoReceive(x: ReceiveN): Receive = BindingsToProto.toReceive(x) + def toProtoMatch(x: MatchN): Match = BindingsToProto.toMatch(x) + def toProtoNew(x: NewN): New = BindingsToProto.toNew(x) + + def fromProto(p: Par): ParN = BindingsFromProto.fromProto(p) + def fromProto(ps: Seq[Par]): Seq[ParN] = ps.map(fromProto) + def fromProtoVarOpt(p: Var): VarN = BindingsFromProto.fromVar(p) + def fromProtoExpr(e: Expr): ExprN = BindingsFromProto.fromExpr(e) + def fromProtoVar(v: Var): VarN = BindingsFromProto.fromVar(v) + def fromProtoUnforgeable(u: GUnforgeable): UnforgeableN = BindingsFromProto.fromUnforgeable(u) + def fromProtoConnective(c: Connective): ConnectiveN = BindingsFromProto.fromConnective(c) + def fromProtoSend(x: Send): SendN = BindingsFromProto.fromSend(x) + def fromProtoReceive(x: Receive): ReceiveN = BindingsFromProto.fromReceive(x) + def fromProtoMatch(x: Match): MatchN = BindingsFromProto.fromMatch(x) + def fromProtoNew(x: New): NewN = BindingsFromProto.fromNew(x) + +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/BindingsFromProto.scala b/rholang/src/main/scala/io/rhonix/rholang/BindingsFromProto.scala new file mode 100644 index 00000000000..c1e529ca1dc --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/BindingsFromProto.scala @@ -0,0 +1,428 @@ +package io.rhonix.rholang + +import cats.syntax.all.* +import coop.rchain.models.Connective.ConnectiveInstance.* +import coop.rchain.models.Expr.ExprInstance.* +import coop.rchain.models.GUnforgeable.UnfInstance.* +import coop.rchain.models.Var.VarInstance.* +import coop.rchain.models.* +import io.rhonix.rholang.types.* +import scalapb.GeneratedMessage + +import scala.annotation.unused + +object BindingsFromProto { + + def fromProto(p: Par): ParN = { + val ps = + Seq(p.sends, p.receives, p.news, p.exprs, p.matches, p.unforgeables, p.bundles, p.connectives) + .filter(_.nonEmpty) + .flatten + .map(fromProtoMessage) + ps match { + case Nil => NilN + case p :: Nil => p + case _ => ParProcN(ps) + } + } + + private def fromProtoMessage(m: GeneratedMessage): ParN = m match { + + /** Basic types */ + case x: Send => fromSend(x) + case x: Receive => fromReceive(x) + case x: Match => fromMatch(x) + case x: New => fromNew(x) + + /** Expressions */ + case e: Expr => fromExpr(e) + + /** Unforgeable names */ + case u: GUnforgeable => fromUnforgeable(u) + + /** Connective */ + case c: Connective => fromConnective(c) + + /** Other types */ + case x: Bundle => fromBundle(x) + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromExpr(e: Expr): ExprN = e.exprInstance match { + + /** Ground types */ + case x: GBool => fromGBool(x) + case x: GInt => fromGInt(x) + case x: GBigInt => fromGBigInt(x) + case x: GString => fromGString(x) + case x: GByteArray => fromGByteArray(x) + case x: GUri => fromGUri(x) + + /** Collections */ + case x: EListBody => fromEList(x.value) + case x: ETupleBody => fromETuple(x.value) + case x: ESetBody => fromParSet(x.value) + case x: EMapBody => fromParMap(x.value) + + /** Vars */ + case x: EVarBody => fromVar(x.value.v) + + /** Operations */ + case x: ENegBody => fromENeg(x.value) + case x: ENotBody => fromENot(x.value) + case x: EPlusBody => fromEPlus(x.value) + case x: EMinusBody => fromEMinus(x.value) + case x: EMultBody => fromEMult(x.value) + case x: EDivBody => fromEDiv(x.value) + case x: EModBody => fromEMod(x.value) + case x: ELtBody => fromELt(x.value) + case x: ELteBody => fromELte(x.value) + case x: EGtBody => fromEGt(x.value) + case x: EGteBody => fromEGte(x.value) + case x: EEqBody => fromEEq(x.value) + case x: ENeqBody => fromENeq(x.value) + case x: EAndBody => fromEAnd(x.value) + case x: EShortAndBody => fromEShortAnd(x.value) + case x: EOrBody => fromEOr(x.value) + case x: EShortOrBody => fromEShortOr(x.value) + case x: EPlusPlusBody => fromEPlusPlus(x.value) + case x: EMinusMinusBody => fromEMinusMinus(x.value) + case x: EPercentPercentBody => fromEPercentPercent(x.value) + case x: EMethodBody => fromEMethod(x.value) + case x: EMatchesBody => fromEMatches(x.value) + + case _ => throw new Exception("Unknown type for Expr conversion") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromVar(x: Var): VarN = x.varInstance match { + case n: BoundVar => fromBoundVar(n) + case n: FreeVar => fromFreeVar(n) + case _: Wildcard => WildcardN + case _ => throw new Exception("Unknown type for Var conversion") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromUnforgeable(u: GUnforgeable): UnforgeableN = + u.unfInstance match { + case x: GPrivateBody => fromPrivate(x.value) + case x: GDeployIdBody => fromDeployId(x.value) + case x: GDeployerIdBody => fromDeployerId(x.value) + case x: GSysAuthTokenBody => fromGSysAuthToken(x.value) + case _ => throw new Exception("Unknown type for GUnforgeable conversion") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def fromConnective(c: Connective): ConnectiveN = c.connectiveInstance match { + case _: ConnBool => ConnBoolN + case _: ConnInt => ConnIntN + case _: ConnBigInt => ConnBigIntN + case _: ConnString => ConnStringN + case _: ConnUri => ConnUriN + case _: ConnByteArray => ConnByteArrayN + case x: ConnNotBody => fromConnNotBody(x) + case x: ConnAndBody => fromConnAndBody(x) + case x: ConnOrBody => fromConnOrBody(x) + case x: VarRefBody => fromVarRefBody(x) + case _ => throw new Exception("Unknown type for Connective conversion") + } + + private def fromProto(ps: Seq[Par]): Seq[ParN] = ps.map(fromProto) + private def fromProto(varOpt: Option[Var]): Option[VarN] = varOpt.map(fromVar) + private def fromProtoKVPairs(ps: Seq[(Par, Par)]): Seq[(ParN, ParN)] = + ps.map { case (k, v) => (fromProto(k), fromProto(v)) } + private def fromProtoInjections(ps: Seq[(String, Par)]): Seq[(String, ParN)] = + ps.map { case (str, p) => (str, fromProto(p)) } + + /** Basic types */ + def fromSend(x: Send): SendN = { + val chan = fromProto(x.chan) + val data = fromProto(x.data) + val persistent = x.persistent + SendN(chan, data, persistent) + } + + def fromReceive(x: Receive): ReceiveN = { + val binds = x.binds.map(fromReceiveBind) + val body = fromProto(x.body) + val persistent = x.persistent + val peek = x.peek + val bindCount = x.bindCount + ReceiveN(binds, body, persistent, peek, bindCount) + } + + private def fromReceiveBind(x: ReceiveBind): ReceiveBindN = { + val patterns = fromProto(x.patterns) + val source = fromProto(x.source) + val remainder = fromProto(x.remainder) + val freeCount = x.freeCount + ReceiveBindN(patterns, source, remainder, freeCount) + } + + def fromMatch(x: Match): MatchN = { + val target = fromProto(x.target) + val cases = x.cases.map(fromMatchCase) + MatchN(target, cases) + } + + private def fromMatchCase(x: MatchCase): MatchCaseN = { + val pattern = fromProto(x.pattern) + val source = fromProto(x.source) + val freeCount = x.freeCount + MatchCaseN(pattern, source, freeCount) + } + + def fromNew(x: New): NewN = { + val bindCount = x.bindCount + val p = fromProto(x.p) + val uri = x.uri + val injections: Seq[(String, ParN)] = fromProtoInjections(x.injections.toSeq) + NewN(bindCount, p, uri.map(GStringN(_)), injections.map(_.bimap(GStringN(_), identity))) + } + + /** Ground types */ + private def fromGBool(x: GBool): GBoolN = { + val v = x.value + GBoolN(v) + } + + private def fromGInt(x: GInt): GIntN = { + val v = x.value + GIntN(v) + } + + private def fromGBigInt(x: GBigInt): GBigIntN = { + val v = x.value + GBigIntN(v) + } + + private def fromGString(x: GString): GStringN = { + val v = x.value + GStringN(v) + } + + private def fromGByteArray(x: GByteArray): GByteArrayN = { + val v = x.value.toByteArray + GByteArrayN(v) + } + + private def fromGUri(x: GUri): GUriN = { + val v = x.value + GUriN(v) + } + + /** Collections */ + private def fromEList(x: EList): EListN = { + val ps = fromProto(x.ps) + val remainder = fromProto(x.remainder) + EListN(ps, remainder) + } + + private def fromETuple(x: ETuple): ETupleN = { + val ps = fromProto(x.ps) + ETupleN(ps) + } + + private def fromParSet(x: ParSet): ESetN = { + val ps = fromProto(x.ps.sortedPars) + val remainder = fromProto(x.remainder) + ESetN(ps, remainder) + } + + private def fromParMap(x: ParMap): EMapN = { + val ps = fromProtoKVPairs(x.ps.sortedList) + val remainder = fromProto(x.remainder) + EMapN(ps, remainder) + } + + /** Vars */ + private def fromBoundVar(x: BoundVar): BoundVarN = { + val idx = x.value + BoundVarN(idx) + } + + private def fromFreeVar(x: FreeVar): FreeVarN = { + val idx = x.value + FreeVarN(idx) + } + + /** Unforgeable names */ + private def fromPrivate(x: GPrivate): UPrivateN = { + val v = x.id.toByteArray + UPrivateN(v) + } + + private def fromDeployId(x: GDeployId): UDeployIdN = { + val v = x.sig.toByteArray + UDeployIdN(v) + } + + private def fromDeployerId(x: GDeployerId): UDeployerIdN = { + val v = x.publicKey.toByteArray + UDeployerIdN(v) + } + private def fromGSysAuthToken(@unused x: GSysAuthToken): USysAuthTokenN = + USysAuthTokenN() + + /** Operations */ + private def fromENeg(x: ENeg): ENegN = { + val p = fromProto(x.p) + ENegN(p) + } + + private def fromENot(x: ENot): ENotN = { + val p = fromProto(x.p) + ENotN(p) + } + + private def fromEPlus(x: EPlus): EPlusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EPlusN(p1, p2) + } + + private def fromEMinus(x: EMinus): EMinusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EMinusN(p1, p2) + } + + private def fromEMult(x: EMult): EMultN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EMultN(p1, p2) + } + + private def fromEDiv(x: EDiv): EDivN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EDivN(p1, p2) + } + + private def fromEMod(x: EMod): EModN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EModN(p1, p2) + } + + private def fromELt(x: ELt): ELtN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + ELtN(p1, p2) + } + + private def fromELte(x: ELte): ELteN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + ELteN(p1, p2) + } + + private def fromEGt(x: EGt): EGtN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EGtN(p1, p2) + } + + private def fromEGte(x: EGte): EGteN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EGteN(p1, p2) + } + + private def fromEEq(x: EEq): EEqN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EEqN(p1, p2) + } + + private def fromENeq(x: ENeq): ENeqN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + ENeqN(p1, p2) + } + + private def fromEAnd(x: EAnd): EAndN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EAndN(p1, p2) + } + + private def fromEShortAnd(x: EShortAnd): EShortAndN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EShortAndN(p1, p2) + } + + private def fromEOr(x: EOr): EOrN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EOrN(p1, p2) + } + + private def fromEShortOr(x: EShortOr): EShortOrN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EShortOrN(p1, p2) + } + + private def fromEPlusPlus(x: EPlusPlus): EPlusPlusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EPlusPlusN(p1, p2) + } + + private def fromEMinusMinus(x: EMinusMinus): EMinusMinusN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EMinusMinusN(p1, p2) + } + + private def fromEPercentPercent(x: EPercentPercent): EPercentPercentN = { + val p1 = fromProto(x.p1) + val p2 = fromProto(x.p2) + EPercentPercentN(p1, p2) + } + + private def fromEMethod(x: EMethod): EMethodN = { + val target = fromProto(x.target) + val methodName = x.methodName + val arguments = fromProto(x.arguments) + EMethodN(target, methodName, arguments) + } + + private def fromEMatches(x: EMatches): EMatchesN = { + val target = fromProto(x.target) + val pattern = fromProto(x.pattern) + EMatchesN(target, pattern) + } + + /** Connective */ + private def fromConnNotBody(x: ConnNotBody): ConnNotN = { + val p = fromProto(x.value) + ConnNotN(p) + } + + private def fromConnAndBody(x: ConnAndBody): ConnAndN = { + val ps = fromProto(x.value.ps) + ConnAndN(ps) + } + + private def fromConnOrBody(x: ConnOrBody): ConnOrN = { + val ps = fromProto(x.value.ps) + ConnOrN(ps) + } + + private def fromVarRefBody(x: VarRefBody): ConnVarRefN = { + val index = x.value.index + val depth = x.value.depth + ConnVarRefN(index, depth) + } + + /** Other types */ + private def fromBundle(x: Bundle): BundleN = { + val body = fromProto(x.body) + val writeFlag = x.writeFlag + val readFlag = x.readFlag + BundleN(body, writeFlag, readFlag) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/BindingsToProto.scala b/rholang/src/main/scala/io/rhonix/rholang/BindingsToProto.scala new file mode 100644 index 00000000000..c6f1b0953b1 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/BindingsToProto.scala @@ -0,0 +1,445 @@ +package io.rhonix.rholang + +import cats.Eval +import cats.effect.Sync +import com.google.protobuf.ByteString +import coop.rchain.catscontrib.effect.implicits.sEval +import coop.rchain.models.Connective.ConnectiveInstance.* +import coop.rchain.models.Expr.ExprInstance.* +import coop.rchain.models.Var.VarInstance.* +import coop.rchain.models.Var.WildcardMsg +import coop.rchain.models.* +import coop.rchain.models.rholang.implicits.* +import io.rhonix.rholang.types.* + +import scala.annotation.unused +import scala.collection.immutable.BitSet + +object BindingsToProto { + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toProto(p: ParN): Par = p match { + + /** Basic types */ + case x: NilN.type => Par() + case x: ParProcN => toParProc(x) + case x: SendN => toSend(x) + case x: ReceiveN => toReceive(x) + case x: MatchN => toMatch(x) + case x: NewN => toNew(x) + + /** Expressions */ + case e: ExprN => toExpr(e) + + /** Unforgeable names */ + case u: UnforgeableN => toUnforgeable(u) + + /** Connective */ + case c: ConnectiveN => toConnective(c) + + /** Other types */ + case x: BundleN => toBundle(x) + + case _ => throw new Exception("Unknown type for toProto conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toExpr(e: ExprN): Expr = e match { + + /** Ground types */ + case x: GBoolN => toGBool(x) + case x: GIntN => toGInt(x) + case x: GBigIntN => toGBigInt(x) + case x: GStringN => toGString(x) + case x: GByteArrayN => toGByteArray(x) + case x: GUriN => toGUri(x) + + /** Collections */ + case x: EListN => toEList(x) + case x: ETupleN => toETuple(x) + case x: ESetN => toParSet(x) + case x: EMapN => toParMap(x) + + /** Vars */ + case v: VarN => EVar(toVar(v)) + + /** Operations */ + case x: ENegN => toENeg(x) + case x: ENotN => toENot(x) + case x: EPlusN => toEPlus(x) + case x: EMinusN => toEMinus(x) + case x: EMultN => toEMult(x) + case x: EDivN => toEDiv(x) + case x: EModN => toEMod(x) + case x: ELtN => toELt(x) + case x: ELteN => toELte(x) + case x: EGtN => toEGt(x) + case x: EGteN => toEGte(x) + case x: EEqN => toEEq(x) + case x: ENeqN => toENeq(x) + case x: EAndN => toEAnd(x) + case x: EShortAndN => toEShortAnd(x) + case x: EOrN => toEOr(x) + case x: EShortOrN => toEShortOr(x) + case x: EPlusPlusN => toEPlusPlus(x) + case x: EMinusMinusN => toEMinusMinus(x) + case x: EPercentPercentN => toEPercentPercent(x) + case x: EMethodN => toEMethod(x) + case x: EMatchesN => toEMatches(x) + + case _ => throw new Exception("Unknown type for Expression conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toVar(x: VarN): Var = x match { + case n: BoundVarN => toBoundVar(n) + case n: FreeVarN => toFreeVar(n) + case _: WildcardN.type => Wildcard(WildcardMsg()) + case _ => throw new Exception("Unknown type for Var conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toUnforgeable(u: UnforgeableN): GUnforgeable = u match { + case x: UPrivateN => toPrivate(x) + case x: UDeployIdN => toDeployId(x) + case x: UDeployerIdN => toDeployerId(x) + case x: USysAuthTokenN => toGSysAuthToken(x) + case _ => throw new Exception("Unknown type for Unforgeable conversation") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def toConnective(c: ConnectiveN): Connective = c match { + case _: ConnBoolN.type => Connective(ConnBool(true)) + case _: ConnIntN.type => Connective(ConnInt(true)) + case _: ConnBigIntN.type => Connective(ConnBigInt(true)) + case _: ConnStringN.type => Connective(ConnString(true)) + case _: ConnUriN.type => Connective(ConnUri(true)) + case _: ConnByteArrayN.type => Connective(ConnByteArray(true)) + case x: ConnNotN => Connective(toConnNotBody(x)) + case x: ConnAndN => Connective(toConnAndBody(x)) + case x: ConnOrN => Connective(toConnOrBody(x)) + case x: ConnVarRefN => Connective(toVarRefBody(x)) + case _ => throw new Exception("Unknown type for Connective conversation") + } + + private def toProto(ps: Seq[ParN]): Seq[Par] = ps.map(toProto) + private def toProto(varOpt: Option[VarN]): Option[Var] = varOpt.map(toVar) + private def toProtoKVPairs(ps: Seq[(ParN, ParN)]): Seq[(Par, Par)] = + ps.map { case (k, v) => (toProto(k), toProto(v)) } + private def toProtoInjections(injections: Seq[(String, ParN)]): Seq[(String, Par)] = + injections.map { case (str, p) => (str, toProto(p)) } + + /** Basic types */ + private def toParProc(x: ParProcN): Par = { + val p = x.ps.foldLeft(Par())((acc, pN) => acc ++ toProto(pN)) + p.withConnectiveUsed(x.connectiveUsed.value) + } + + def toSend(x: SendN): Send = { + val chan = toProto(x.chan) + val data = toProto(x.args) + val persistent = x.persistent + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + Send(chan, data, persistent, locallyFree, connectiveUsed) + } + + def toReceive(x: ReceiveN): Receive = { + val binds = x.binds.map(toReceiveBind) + val body = toProto(x.body) + val persistent = x.persistent + val peek = x.peek + val bindCount = x.bindCount + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + Receive(binds, body, persistent, peek, bindCount, locallyFree, connectiveUsed) + } + + private def toReceiveBind(x: ReceiveBindN): ReceiveBind = { + val patterns = toProto(x.patterns) + val source = toProto(x.source) + val remainder = toProto(x.remainder) + val freeCount = x.freeCount + ReceiveBind(patterns, source, remainder, freeCount) + } + + def toMatch(x: MatchN): Match = { + val target = toProto(x.target) + val cases = x.cases.map(toMatchCase) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + Match(target, cases, locallyFree, connectiveUsed) + } + + private def toMatchCase(x: MatchCaseN): MatchCase = { + val pattern = toProto(x.pattern) + val source = toProto(x.source) + val freeCount = x.freeCount + MatchCase(pattern, source, freeCount) + } + + def toNew(x: NewN): New = { + val bindCount = x.bindCount + val p = toProto(x.p) + val uri = x.uri + val injections: Map[String, Par] = Map.from(toProtoInjections(x.injectionsStrKeys.toSeq)) + val locallyFree = BitSet() + New(bindCount, p, uri.map(_.v), injections, locallyFree) + } + + /** Ground types */ + private def toGBool(x: GBoolN): GBool = { + val v = x.v + GBool(v) + } + + private def toGInt(x: GIntN): GInt = { + val v = x.v + GInt(v) + } + + private def toGBigInt(x: GBigIntN): GBigInt = { + val v = x.v + GBigInt(v) + } + + private def toGString(x: GStringN): GString = { + val v = x.v + GString(v) + } + + private def toGByteArray(x: GByteArrayN): GByteArray = { + val v = ByteString.copyFrom(x.v) + GByteArray(v) + } + + private def toGUri(x: GUriN): GUri = { + val v = x.v + GUri(v) + } + + /** Collections */ + private def toEList(x: EListN): EList = { + val ps = toProto(x.ps) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + val remainder = toProto(x.remainder) + EList(ps, locallyFree, connectiveUsed, remainder) + } + + private def toETuple(x: ETupleN): ETuple = { + val ps = toProto(x.ps) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + ETuple(ps, locallyFree, connectiveUsed) + } + + private def toParSet(x: ESetN): ParSet = { + val ps = toProto(x.psSorted.value) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + val remainder = toProto(x.remainder) + ParSet(ps, connectiveUsed, Sync[Eval].delay(locallyFree), remainder) + } + + private def toParMap(x: EMapN): ParMap = { + val ps = toProtoKVPairs(x.psSorted.value) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + val remainder = toProto(x.remainder) + ParMap(ps, connectiveUsed, Sync[Eval].delay(locallyFree), remainder) + } + + /** Vars */ + private def toBoundVar(x: BoundVarN): BoundVar = { + val idx = x.idx + BoundVar(idx) + } + + private def toFreeVar(x: FreeVarN): FreeVar = { + val idx = x.idx + FreeVar(idx) + } + + /** Unforgeable names */ + private def toPrivate(x: UPrivateN): GPrivate = { + val v = ByteString.copyFrom(x.v) + GPrivate(v) + } + + private def toDeployId(x: UDeployIdN): GDeployId = { + val v = ByteString.copyFrom(x.v) + GDeployId(v) + } + + private def toDeployerId(x: UDeployerIdN): GDeployerId = { + val v = ByteString.copyFrom(x.v) + GDeployerId(v) + } + + private def toGSysAuthToken(@unused x: USysAuthTokenN): GSysAuthToken = + GSysAuthToken() + + /** Operations */ + private def toENeg(x: ENegN): ENeg = { + val p = toProto(x.p) + ENeg(p) + } + + private def toENot(x: ENotN): ENot = { + val p = toProto(x.p) + ENot(p) + } + + private def toEPlus(x: EPlusN): EPlus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EPlus(p1, p2) + } + + private def toEMinus(x: EMinusN): EMinus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMinus(p1, p2) + } + + private def toEMult(x: EMultN): EMult = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMult(p1, p2) + } + + private def toEDiv(x: EDivN): EDiv = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EDiv(p1, p2) + } + + private def toEMod(x: EModN): EMod = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMod(p1, p2) + } + + private def toELt(x: ELtN): ELt = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + ELt(p1, p2) + } + + private def toELte(x: ELteN): ELte = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + ELte(p1, p2) + } + + private def toEGt(x: EGtN): EGt = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EGt(p1, p2) + } + + private def toEGte(x: EGteN): EGte = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EGte(p1, p2) + } + + private def toEEq(x: EEqN): EEq = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EEq(p1, p2) + } + + private def toENeq(x: ENeqN): ENeq = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + ENeq(p1, p2) + } + + private def toEAnd(x: EAndN): EAnd = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EAnd(p1, p2) + } + + private def toEShortAnd(x: EShortAndN): EShortAnd = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EShortAnd(p1, p2) + } + + private def toEOr(x: EOrN): EOr = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EOr(p1, p2) + } + + private def toEShortOr(x: EShortOrN): EShortOr = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EShortOr(p1, p2) + } + + private def toEPlusPlus(x: EPlusPlusN): EPlusPlus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EPlusPlus(p1, p2) + } + + private def toEMinusMinus(x: EMinusMinusN): EMinusMinus = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EMinusMinus(p1, p2) + } + + private def toEPercentPercent(x: EPercentPercentN): EPercentPercent = { + val p1 = toProto(x.p1) + val p2 = toProto(x.p2) + EPercentPercent(p1, p2) + } + + private def toEMethod(x: EMethodN): EMethod = { + val methodName = x.methodName + val target = toProto(x.target) + val arguments = toProto(x.args) + val locallyFree = BitSet() + val connectiveUsed = x.connectiveUsed.value + EMethod(methodName, target, arguments, locallyFree, connectiveUsed) + } + + private def toEMatches(x: EMatchesN): EMatches = { + val target = toProto(x.target) + val pattern = toProto(x.pattern) + EMatches(target, pattern) + } + + /** Connective */ + private def toConnNotBody(x: ConnNotN): ConnNotBody = { + val p = toProto(x.p) + ConnNotBody(p) + } + + private def toConnAndBody(x: ConnAndN): ConnAndBody = { + val ps = ConnectiveBody(toProto(x.ps)) + ConnAndBody(ps) + } + + private def toConnOrBody(x: ConnOrN): ConnOrBody = { + val ps = ConnectiveBody(toProto(x.ps)) + ConnOrBody(ps) + } + + private def toVarRefBody(x: ConnVarRefN): VarRefBody = { + val index = x.index + val depth = x.depth + VarRefBody(VarRef(index, depth)) + } + + /** Other types */ + private def toBundle(x: BundleN): Bundle = { + val body = toProto(x.body) + val writeFlag = x.writeFlag + val readFlag = x.readFlag + Bundle(body, writeFlag, readFlag) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/BundleNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/BundleNormalizer.scala new file mode 100644 index 00000000000..0ee3ddefca5 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/BundleNormalizer.scala @@ -0,0 +1,44 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.SourcePosition +import coop.rchain.rholang.interpreter.errors.{InterpreterError, UnexpectedBundleContent} +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.NestingInfoWriter +import io.rhonix.rholang.types.{BundleN, ConnectiveN, ParN, ParProcN} + +object BundleNormalizer { + def normalizeBundle[F[_]: Sync: NormalizerRec: NestingInfoWriter](p: PBundle): F[BundleN] = { + def connectivesExistOnTop(p: ParN): Boolean = + p match { + case _: ConnectiveN => true + case pProc: ParProcN => pProc.ps.exists(connectivesExistOnTop) + case _ => false + } + + def returnError: F[InterpreterError] = + UnexpectedBundleContent( + s"Illegal top level connective in bundle at: ${SourcePosition(p.line_num, p.col_num)}." + ).raiseError + + for { + // Inside bundle target is prohibited to have free variables and wildcards. + target <- NormalizerRec[F].normalize(p.proc_).withinBundle() + // Inside bundle target is prohibited to have connectives on top level. + _ <- returnError.whenA(connectivesExistOnTop(target)) + + outermostBundle = p.bundle_ match { + case _: BundleReadWrite => BundleN(target, writeFlag = true, readFlag = true) + case _: BundleRead => BundleN(target, writeFlag = false, readFlag = true) + case _: BundleWrite => BundleN(target, writeFlag = true, readFlag = false) + case _: BundleEquiv => BundleN(target, writeFlag = false, readFlag = false) + } + + } yield target match { + case b: BundleN => outermostBundle.merge(b) + case _ => outermostBundle + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/CollectNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/CollectNormalizer.scala new file mode 100644 index 00000000000..0742a542db7 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/CollectNormalizer.scala @@ -0,0 +1,43 @@ +package io.rhonix.rholang.normalizer + +import cats.Applicative +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.types.{CollectionN, EListN, EMapN, ESetN, ETupleN} + +import scala.jdk.CollectionConverters.* + +object CollectNormalizer { + def normalizeCollect[F[_]: Applicative: NormalizerRec](p: PCollect): F[CollectionN] = + // NOTE: Order of processing remainder or processes doesn't matter because they are independent. + p.collection_ match { + case cl: CollectList => + ( + NormalizerRec[F].normalize(cl.procremainder_), + cl.listproc_.asScala.toList.traverse(NormalizerRec[F].normalize) + ).mapN((remainder, ps) => EListN(ps, remainder)) + + case ct: CollectTuple => + val ps = ct.tuple_ match { + case ts: TupleSingle => List(ts.proc_) + case tm: TupleMultiple => tm.proc_ +: tm.listproc_.asScala.toList + } + ps.traverse(NormalizerRec[F].normalize).map(ETupleN.apply) + + case cs: CollectSet => + ( + NormalizerRec[F].normalize(cs.procremainder_), + cs.listproc_.asScala.toList.traverse(NormalizerRec[F].normalize) + ).mapN((remainder, ps) => ESetN(ps, remainder)) + + case cm: CollectMap => + ( + NormalizerRec[F].normalize(cm.procremainder_), + cm.listkeyvaluepair_.asScala.toList.traverse { + case kv: KeyValuePairImpl => + (NormalizerRec[F].normalize(kv.proc_1), NormalizerRec[F].normalize(kv.proc_2)) + .mapN((_, _)) + } + ).mapN((remainder, ps) => EMapN(ps, remainder)) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/ConjunctionNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/ConjunctionNormalizer.scala new file mode 100644 index 00000000000..881315bbd8b --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/ConjunctionNormalizer.scala @@ -0,0 +1,24 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.SourcePosition +import coop.rchain.rholang.interpreter.errors.TopLevelLogicalConnectivesNotAllowedError +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.NestingInfoReader +import io.rhonix.rholang.types.ConnAndN +import sdk.syntax.all.* + +object ConjunctionNormalizer { + def normalizeConjunction[F[_]: Sync: NormalizerRec]( + p: PConjunction + )(implicit nestingInfo: NestingInfoReader): F[ConnAndN] = + if (nestingInfo.insidePattern) + (p.proc_1, p.proc_2) + .nmap(NormalizerRec[F].normalize) + .mapN((left, right) => ConnAndN(Seq(left, right))) + else { + def pos = SourcePosition(p.line_num, p.col_num) + TopLevelLogicalConnectivesNotAllowedError(s"/\\ (conjunction) at $pos").raiseError + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/ContractNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/ContractNormalizer.scala new file mode 100644 index 00000000000..889c39df899 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/ContractNormalizer.scala @@ -0,0 +1,33 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.types.{ReceiveBindN, ReceiveN} + +import scala.jdk.CollectionConverters.* + +object ContractNormalizer { + def normalizeContract[ + F[_]: Sync: NormalizerRec: BoundVarScope: FreeVarScope: NestingInfoWriter, + T: BoundVarWriter: FreeVarReader + ](p: PContr): F[ReceiveN] = + for { + source <- NormalizerRec[F].normalize(p.name_) + + normalizePattern = for { + patterns <- p.listname_.asScala.toList.traverse(NormalizerRec[F].normalize) + reminder <- NormalizerRec[F].normalize(p.nameremainder_) + freeCount = FreeVarReader[T].getFreeVars.size + } yield ReceiveBindN(patterns, source, reminder, freeCount) + patternTuple <- normalizePattern.withinPatternGetFreeVars(withinReceive = true) + + (bind, freeVars) = patternTuple + + // Normalize body in the copy of bound scope with added free variables as bounded + continuation <- NormalizerRec[F].normalize(p.proc_).withAbsorbedFreeVars(freeVars) + + } yield ReceiveN(bind, continuation, persistent = true, peek = false, freeVars.size) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/DisjunctionNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/DisjunctionNormalizer.scala new file mode 100644 index 00000000000..4d155152287 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/DisjunctionNormalizer.scala @@ -0,0 +1,40 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.SourcePosition +import coop.rchain.rholang.interpreter.errors.{ + PatternReceiveError, + TopLevelLogicalConnectivesNotAllowedError +} +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.NestingInfoReader +import io.rhonix.rholang.types.ConnOrN +import sdk.syntax.all.* + +object DisjunctionNormalizer { + def normalizeDisjunction[F[_]: Sync: NormalizerRec]( + p: PDisjunction + )(implicit nestingInfo: NestingInfoReader): F[ConnOrN] = { + def pos = SourcePosition(p.line_num, p.col_num) + + if (nestingInfo.insidePattern) + if (!nestingInfo.insideTopLevelReceivePattern) + (p.proc_1, p.proc_2) + .nmap(NormalizerRec[F].normalize) + .mapN((left, right) => ConnOrN(Seq(left, right))) + else + // TODO: According to Rholang documentation: + // https://github.com/rchain/rchain/blob/25e523580a339db9ce2e8abdc9dcab44618d4c5c/docs/rholang/rholangtut.md?plain=1#L244-L252 + // Since we cannot rely on a specific pattern matching order, + // we cannot use patterns separated by \/ to bind any variables in top level receive. + // But, if part of the connectives does not contain free variables, disjunction and negation can be used. + // For example, this code: for(@{ @"grade"!(10) \/ @"grade"!(11) } <- ... ){ ... } is acceptable. + // Therefore, this condition contradicts the documentation, and it is preserved for compatibility with the legacy normalizer. + // In the future, it will be necessary to analyze whether the left and right parts of the connective contain free variables + // and only in such cases return a PatternReceiveError. + PatternReceiveError(s"\\/ (disjunction) at $pos").raiseError + else + TopLevelLogicalConnectivesNotAllowedError(s"\\/ (disjunction) at $pos").raiseError + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/GroundNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/GroundNormalizer.scala new file mode 100644 index 00000000000..b8434bd55d0 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/GroundNormalizer.scala @@ -0,0 +1,35 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.normalizer.GroundNormalizeMatcher.{ + stripString, + stripUri +} +import coop.rchain.rholang.interpreter.errors.NormalizerError +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.types.{ExprN, GBigIntN, GBoolN, GIntN, GStringN, GUriN} + +object GroundNormalizer { + def normalizeGround[F[_]: Sync](p: PGround): F[ExprN] = Sync[F].defer { + p.ground_ match { + case gb: GroundBool => + gb.boolliteral_ match { + case _: BoolFalse => Sync[F].pure(GBoolN(false)) + case _: BoolTrue => Sync[F].pure(GBoolN(true)) + } + case gi: GroundInt => + Sync[F] + .delay(gi.longliteral_.toLong) + .adaptError { case e: NumberFormatException => NormalizerError(e.getMessage) } + .map(GIntN.apply) + case gbi: GroundBigInt => + Sync[F] + .delay(BigInt(gbi.longliteral_)) + .adaptError { case e: NumberFormatException => NormalizerError(e.getMessage) } + .map(GBigIntN.apply) + case gs: GroundString => Sync[F].delay(GStringN(stripString(gs.stringliteral_))) + case gu: GroundUri => Sync[F].delay(GUriN(stripUri(gu.uriliteral_))) + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/IfNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/IfNormalizer.scala new file mode 100644 index 00000000000..c346284b8dd --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/IfNormalizer.scala @@ -0,0 +1,31 @@ +package io.rhonix.rholang.normalizer + +import cats.Apply +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.{PIf, PIfElse} +import io.rhonix.rholang.types.{GBoolN, MatchCaseN, MatchN, NilN} +import sdk.syntax.all.* + +object IfNormalizer { + def normalizeIf[F[_]: Apply: NormalizerRec](p: PIf): F[MatchN] = + (p.proc_1, p.proc_2) + .nmap(NormalizerRec[F].normalize) + .mapN( + (target, trueCaseBody) => + MatchN( + target, + Seq(MatchCaseN(GBoolN(true), trueCaseBody), MatchCaseN(GBoolN(false), NilN)) + ) + ) + + def normalizeIfElse[F[_]: Apply: NormalizerRec](p: PIfElse): F[MatchN] = + (p.proc_1, p.proc_2, p.proc_3) + .nmap(NormalizerRec[F].normalize) + .mapN( + (target, trueCaseBody, falseCaseBody) => + MatchN( + target, + Seq(MatchCaseN(GBoolN(true), trueCaseBody), MatchCaseN(GBoolN(false), falseCaseBody)) + ) + ) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/InputNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/InputNormalizer.scala new file mode 100644 index 00000000000..3d65a93f953 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/InputNormalizer.scala @@ -0,0 +1,236 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.models.ReceiveBind +import coop.rchain.rholang.interpreter.errors.ReceiveOnSameChannelsError +import io.rhonix.rholang.{*, types} +import io.rhonix.rholang.Bindings.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.types.{ParN, ReceiveBindN, ReceiveN} + +import java.util.UUID +import scala.jdk.CollectionConverters.* + +object InputNormalizer { + @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) + def normalizeInput[ + F[_]: Sync: NormalizerRec: BoundVarScope: FreeVarScope: NestingInfoWriter, + T: BoundVarWriter: FreeVarReader + ](p: PInput): F[ParN] = { + if (p.listreceipt_.size() > 1) { + NormalizerRec[F].normalize( + p.listreceipt_.asScala.reverse.foldLeft(p.proc_) { (proc, receipt) => + val listReceipt = new ListReceipt() + listReceipt.add(receipt) + new PInput(listReceipt, proc) + } + ) + } else { + + val receiptContainsComplexSource: Boolean = + p.listreceipt_.asScala.head match { + case rl: ReceiptLinear => + rl.receiptlinearimpl_ match { + case ls: LinearSimple => + ls.listlinearbind_.asScala.exists { + case lbi: LinearBindImpl => + lbi.namesource_ match { + case _: SimpleSource => false + case _ => true + } + + } + case _ => false + } + case _ => false + } + + if (receiptContainsComplexSource) { + p.listreceipt_.asScala.head match { + case rl: ReceiptLinear => + rl.receiptlinearimpl_ match { + case ls: LinearSimple => + val listReceipt = new ListReceipt() + val listLinearBind = new ListLinearBind() + val listNameDecl = new ListNameDecl() + listReceipt.add(new ReceiptLinear(new LinearSimple(listLinearBind))) + val (sends, continuation) = + ls.listlinearbind_.asScala.foldLeft((new PNil: Proc, p.proc_)) { + case ((sends, continuation), lb) => + lb match { + case lbi: LinearBindImpl => + lbi.namesource_ match { + case _: SimpleSource => + listLinearBind.add(lbi) + (sends, continuation) + case _ => + val identifier = UUID.randomUUID().toString + val r = new NameVar(identifier) + lbi.namesource_ match { + case rss: ReceiveSendSource => + lbi.listname_.asScala.prepend(r) + listLinearBind.add( + new LinearBindImpl( + lbi.listname_, + lbi.nameremainder_, + new SimpleSource(rss.name_) + ) + ) + ( + sends, + new PPar( + new PSend(r, new SendSingle, new ListProc()), + continuation + ) + ) + case srs: SendReceiveSource => + listNameDecl.add(new NameDeclSimpl(identifier)) + listLinearBind.add( + new LinearBindImpl( + lbi.listname_, + lbi.nameremainder_, + new SimpleSource(r) + ) + ) + srs.listproc_.asScala.prepend(new PEval(r)) + ( + new PPar( + new PSend(srs.name_, new SendSingle, srs.listproc_), + sends + ): Proc, + continuation + ) + } + } + } + } + val pInput = new PInput(listReceipt, continuation) + NormalizerRec[F].normalize( + if (listNameDecl.isEmpty) pInput + else new PNew(listNameDecl, new PPar(sends, pInput)) + ) + } + + } + } else { + + // To handle the most common case where we can sort the binds because + // they're from different sources, Each channel's list of patterns starts its free variables at 0. + // We check for overlap at the end after sorting. We could check before, but it'd be an extra step. + // We split this into parts. First we process all the sources, then we process all the bindings. + + // If we get to this point, we know p.listreceipt.size() == 1 + val (consumes, persistent, peek) = + p.listreceipt_.asScala.head match { + case rl: ReceiptLinear => + rl.receiptlinearimpl_ match { + case ls: LinearSimple => + ( + ls.listlinearbind_.asScala.toVector.map { + case lbi: LinearBindImpl => + ( + (lbi.listname_.asScala.toVector, lbi.nameremainder_), + lbi.namesource_ match { + // all sources should be simple sources by this point + case ss: SimpleSource => ss.name_ + } + ) + }, + false, + false + ) + } + case rr: ReceiptRepeated => + rr.receiptrepeatedimpl_ match { + case rs: RepeatedSimple => + ( + rs.listrepeatedbind_.asScala.toVector.map { + case rbi: RepeatedBindImpl => + ((rbi.listname_.asScala.toVector, rbi.nameremainder_), rbi.name_) + }, + true, + false + ) + + } + case rp: ReceiptPeek => + rp.receiptpeekimpl_ match { + case ps: PeekSimple => + ( + ps.listpeekbind_.asScala.toVector.map { + case pbi: PeekBindImpl => + ((pbi.listname_.asScala.toVector, pbi.nameremainder_), pbi.name_) + }, + false, + true + ) + + } + + } + + val (patterns, names) = consumes.unzip + + def createBinds( + patterns: Vector[(Vector[Name], NameRemainder)], + normalizedSources: Vector[ParN] + ): F[Vector[ReceiveBindN]] = + (patterns zip normalizedSources) + .traverse { + case ((names, remainder), source) => + for { + initFreeCount <- Sync[F].delay(FreeVarReader[T].getFreeVars.size) + rbNames <- names.traverse(NormalizerRec[F].normalize) + rbRemainder <- NormalizerRec[F].normalize(remainder) + freeCount = FreeVarReader[T].getFreeVars.size - initFreeCount + } yield ReceiveBindN(rbNames, source, rbRemainder, freeCount) + } + + def sortBinds(binds: Seq[ReceiveBindN]): F[Seq[ReceiveBindN]] = { + import coop.rchain.models.rholang.sorter.ReceiveSortMatcher.sortBind + def from(x: ReceiveBind): ReceiveBindN = + ReceiveBindN( + fromProto(x.patterns), + fromProto(x.source), + x.remainder.map(fromProtoVar), + x.freeCount + ) + def to(x: ReceiveBindN): ReceiveBind = + ReceiveBind( + toProto(x.patterns), + toProto(x.source), + x.remainder.map(toProtoVar), + x.freeCount + ) + binds.traverse(b => sortBind(to(b))).map(_.sorted.map(x => from(x.term))) + } + + for { + processedSources <- names.traverse(NormalizerRec[F].normalize) + + patternTuple <- createBinds(patterns, processedSources).withinPatternGetFreeVars( + withinReceive = true + ) + + (unsortedBinds, freeVars) = patternTuple + + // TODO: The sorting will be removed after the old Rholang types are removed. + // With the new types, sorting is unnecessary as they are always sorted by hash. + binds <- sortBinds(unsortedBinds) + + thereAreDuplicatesInSources = processedSources.distinct.size != processedSources.size + _ <- ReceiveOnSameChannelsError(p.line_num, p.col_num) + .raiseError[F, Unit] + .whenA(thereAreDuplicatesInSources) + + // Normalize body in the copy of bound scope with added free variables as bounded + continuation <- NormalizerRec[F].normalize(p.proc_).withAbsorbedFreeVars(freeVars) + + } yield ReceiveN(binds, continuation, persistent, peek, freeVars.size) + } + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/LetNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/LetNormalizer.scala new file mode 100644 index 00000000000..28018ea5623 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/LetNormalizer.scala @@ -0,0 +1,140 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.types.{EListN, MatchCaseN, MatchN, ParN} +import sdk.syntax.all.* + +import java.util.UUID +import scala.jdk.CollectionConverters.* + +object LetNormalizer { + def normalizeLet[ + F[_]: Sync: NormalizerRec: BoundVarScope: FreeVarScope: NestingInfoWriter, + T: BoundVarWriter: FreeVarReader + ](p: PLet): F[ParN] = + p.decls_ match { + + case concDeclsImpl: ConcDeclsImpl => + def extractNamesAndProcs(decl: Decl): (ListName, NameRemainder, ListProc) = + decl match { + case declImpl: DeclImpl => + (declImpl.listname_, declImpl.nameremainder_, declImpl.listproc_) + + } + + val (listNames, listNameRemainders, listProcs) = + (extractNamesAndProcs(p.decl_) :: concDeclsImpl.listconcdecl_.asScala.toList.map { + case concDeclImpl: ConcDeclImpl => extractNamesAndProcs(concDeclImpl.decl_) + }).unzip3 + + /* + It is not necessary to use UUIDs to achieve concurrent let declarations. + While there is the possibility for collisions with either variables declared by the user + or variables declared within this translation, the chances for collision are astronomically + small (see analysis here: https://towardsdatascience.com/are-uuids-really-unique-57eb80fc2a87). + A strictly correct approach would be one that performs a ADT rather than an AST translation, which + was not done here due to time constraints. + */ + val variableNames = List.fill(listNames.size)(UUID.randomUUID().toString) + + val psends = variableNames.zip(listProcs).map { + case (variableName, listProc) => + new PSend(new NameVar(variableName), new SendSingle(), listProc) + } + + val pinput = { + val listLinearBind = new ListLinearBind() + variableNames + .zip(listNames) + .zip(listNameRemainders) + .map { + case ((variableName, listName), nameRemainder) => + new LinearBindImpl( + listName, + nameRemainder, + new SimpleSource(new NameVar(variableName)) + ) + } + .foreach(listLinearBind.add) + val listReceipt = new ListReceipt() + listReceipt.add(new ReceiptLinear(new LinearSimple(listLinearBind))).void() + new PInput(listReceipt, p.proc_) + } + + val ppar = { + val procs = psends :+ pinput + procs.drop(2).foldLeft(new PPar(procs.head, procs(1))) { + case (ppar, proc) => + new PPar(ppar, proc) + } + } + + val pnew = { + val listNameDecl = new ListNameDecl() + variableNames.map(new NameDeclSimpl(_)).foreach(listNameDecl.add) + new PNew(listNameDecl, ppar) + } + NormalizerRec[F].normalize(pnew) + + case _ => + /* + Let processes with a single bind or with sequential binds ";" are converted into match processes rather + than input processes, so that each sequential bind doesn't add a new unforgeable name to the tuplespace. + The Rholang 1.1 spec defines them as the latter. Because the Rholang 1.1 spec defines let processes in terms + of a output process in concurrent composition with an input process, the let process appears to quote the + process on the RHS of "<-" and bind it to the pattern on LHS. For example, in + let x <- 1 in { Nil } + the process (value) "1" is quoted and bound to "x" as a name. There is no way to perform an AST transformation + of sequential let into a match process and still preserve these semantics, so we have to do an ADT transformation. + */ + def convertDecls(d: Decls): Proc = d match { + case _: EmptyDeclImpl => p.proc_ + case linearDeclsImpl: LinearDeclsImpl => + val newDecl = + linearDeclsImpl.listlineardecl_.asScala.head match { + case impl: LinearDeclImpl => impl.decl_ + } + val newDecls = + if (linearDeclsImpl.listlineardecl_.size == 1) + new EmptyDeclImpl() + else { + val newListLinearDecls = new ListLinearDecl() + linearDeclsImpl.listlineardecl_.asScala.tail.foreach(newListLinearDecls.add) + new LinearDeclsImpl(newListLinearDecls) + } + new PLet(newDecl, newDecls, p.proc_) + } + + p.decl_ match { + case declImpl: DeclImpl => + for { + values <- declImpl.listproc_.asScala.toList.traverse(NormalizerRec[F].normalize) + normalizePattern = for { + rem <- NormalizerRec[F].normalize(declImpl.nameremainder_) + ps <- declImpl.listname_.asScala.toList.traverse(NormalizerRec[F].normalize) + } yield EListN(ps, rem) + patternTuple <- normalizePattern.withinPatternGetFreeVars() + + (patterns, patternFreeVars) = patternTuple + + // Normalize body in the copy of bound scope with added free variables as bounded + continuation <- NormalizerRec[F] + .normalize(convertDecls(p.decls_)) + .withAbsorbedFreeVars(patternFreeVars) + } yield MatchN( + target = EListN(values), + cases = Seq( + MatchCaseN( + patterns, + continuation, + patternFreeVars.size + ) + ) + ) + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/MatchNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/MatchNormalizer.scala new file mode 100644 index 00000000000..ab7c671c5d7 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/MatchNormalizer.scala @@ -0,0 +1,40 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.errors.UnrecognizedNormalizerError +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.types.{MatchCaseN, MatchN} + +import scala.jdk.CollectionConverters.* + +object MatchNormalizer { + def normalizeMatch[ + F[_]: Sync: NormalizerRec: BoundVarScope: FreeVarScope: NestingInfoWriter, + T: BoundVarWriter: FreeVarReader + ](p: PMatch): F[MatchN] = { + def normalizeCase(c: Case): F[MatchCaseN] = c match { + case ci: CaseImpl => + val (pattern, caseBody) = (ci.proc_1, ci.proc_2) + + for { + // Normalize pattern in a fresh bound and free variables scope + patternTuple <- NormalizerRec[F].normalize(pattern).withinPatternGetFreeVars() + + (patternResult, freeVars) = patternTuple + + // Normalize body in the copy of bound scope with added free variables as bounded + caseBodyResult <- NormalizerRec[F].normalize(caseBody).withAbsorbedFreeVars(freeVars) + + } yield MatchCaseN(patternResult, caseBodyResult, freeVars.length) + + case c => + UnrecognizedNormalizerError(s"Unexpected Case implementation `${c.getClass}`.").raiseError + } + + (NormalizerRec[F].normalize(p.proc_), p.listcase_.asScala.toVector.traverse(normalizeCase)) + .mapN(MatchN.apply) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/MatchesNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/MatchesNormalizer.scala new file mode 100644 index 00000000000..c0a71f17065 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/MatchesNormalizer.scala @@ -0,0 +1,20 @@ +package io.rhonix.rholang.normalizer + +import cats.Apply +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.{PMatches, Proc} +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.types.EMatchesN + +object MatchesNormalizer { + def normalizeMatches[F[_]: Apply: NormalizerRec: BoundVarScope: FreeVarScope: NestingInfoWriter]( + p: PMatches + ): F[EMatchesN] = { + // The expression "target matches pattern" should have the same semantics as "match target { pattern => true ; _ => false}". + // Therefore, there is no need to bind free variables in the pattern because the case body will always be true. + def normalizePattern(proc: Proc) = NormalizerRec[F].normalize(proc).withinPattern() + + (NormalizerRec[F].normalize(p.proc_1), normalizePattern(p.proc_2)).mapN(EMatchesN.apply) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/MethodNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/MethodNormalizer.scala new file mode 100644 index 00000000000..35e5213c272 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/MethodNormalizer.scala @@ -0,0 +1,16 @@ +package io.rhonix.rholang.normalizer + +import cats.Applicative +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.PMethod +import io.rhonix.rholang.types.EMethodN + +import scala.jdk.CollectionConverters.CollectionHasAsScala + +object MethodNormalizer { + def normalizeMethod[F[_]: Applicative: NormalizerRec](p: PMethod): F[EMethodN] = + ( + NormalizerRec[F].normalize(p.proc_), + p.listproc_.asScala.toList.traverse(NormalizerRec[F].normalize) + ).mapN((target, args) => EMethodN(target, p.var_, args)) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/NegationNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NegationNormalizer.scala new file mode 100644 index 00000000000..730320b7046 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NegationNormalizer.scala @@ -0,0 +1,34 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.SourcePosition +import coop.rchain.rholang.interpreter.errors.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.NestingInfoReader +import io.rhonix.rholang.types.ConnNotN + +object NegationNormalizer { + def normalizeNegation[F[_]: Sync: NormalizerRec]( + p: PNegation + )(implicit nestingInfo: NestingInfoReader): F[ConnNotN] = { + val pos = SourcePosition(p.line_num, p.col_num) + + if (nestingInfo.insidePattern) + if (!nestingInfo.insideTopLevelReceivePattern) + NormalizerRec[F].normalize(p.proc_).map(ConnNotN(_)) + else + // TODO: According to Rholang documentation: + // https://github.com/rchain/rchain/blob/25e523580a339db9ce2e8abdc9dcab44618d4c5c/docs/rholang/rholangtut.md?plain=1#L244-L252 + // Since we cannot rely on a specific pattern matching order, + // we cannot use patterns separated by \/ to bind any variables in top level receive. + // But, if part of the connectives does not contain free variables, disjunction and negation can be used. + // For example, this code: for(@{ @"grade"!(10) \/ @"grade"!(11) } <- ... ){ ... } is acceptable. + // Therefore, this condition contradicts the documentation, and it is preserved for compatibility with the legacy normalizer. + // In the future, it will be necessary to analyze whether the left and right parts of the connective contain free variables + // and only in such cases return a PatternReceiveError. + PatternReceiveError(s"~ (negation) at $pos").raiseError + else + TopLevelLogicalConnectivesNotAllowedError(s"~ (negation) at $pos").raiseError + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/NewNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NewNormalizer.scala new file mode 100644 index 00000000000..78f3d391cc4 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NewNormalizer.scala @@ -0,0 +1,51 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.normalizer.GroundNormalizeMatcher +import coop.rchain.rholang.interpreter.compiler.{NameSort, SourcePosition, VarSort} +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.normalizer.syntax.all.* +import io.rhonix.rholang.types.{NewN, ParN} + +import scala.jdk.CollectionConverters.* + +object NewNormalizer { + def normalizeNew[F[_]: Sync: NormalizerRec: BoundVarScope, T >: VarSort: BoundVarWriter]( + p: PNew + ): F[NewN] = + Sync[F].defer { + val simpleBindings = p.listnamedecl_.asScala.toSeq.collect { + case n: NameDeclSimpl => + (n.var_, NameSort, SourcePosition(n.line_num, n.col_num)) + } // Unsorted simple bindings + + val sortedUrnData = p.listnamedecl_.asScala.toSeq + .collect { + case n: NameDeclUrn => + ( + GroundNormalizeMatcher.stripUri(n.uriliteral_), + (n.var_, NameSort, SourcePosition(n.line_num, n.col_num)) + ) + } + .sortBy(_._1) // Sort by uris in lexicographical order + + val (uris, urnBindings) = sortedUrnData.unzip + + val boundVars = simpleBindings ++ urnBindings + + NormalizerRec[F] + .normalize(p.proc_) + .withAddedBoundVars[T](boundVars) + .map { + case (normalizedPar, indices) => + NewN( + bindCount = indices.size, + p = normalizedPar, + uri = uris, + injections = Map[String, ParN]() + ) + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/NormalizerRec.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NormalizerRec.scala new file mode 100644 index 00000000000..95c98716226 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NormalizerRec.scala @@ -0,0 +1,20 @@ +package io.rhonix.rholang.normalizer + +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.types.{ParN, VarN} + +trait NormalizerRec[F[_]] { + def normalize(proc: Proc): F[ParN] + + def normalize(proc: Name): F[ParN] + + // TODO: Remove when reminder will be replaced with more general spread operator. + + def normalize(proc: ProcRemainder): F[Option[VarN]] + + def normalize(proc: NameRemainder): F[Option[VarN]] +} + +object NormalizerRec { + def apply[F[_]](implicit instance: NormalizerRec[F]): instance.type = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/NormalizerRecImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NormalizerRecImpl.scala new file mode 100644 index 00000000000..2a49a573a2c --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/NormalizerRecImpl.scala @@ -0,0 +1,126 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.* +import coop.rchain.rholang.interpreter.errors.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.types.* +import sdk.syntax.all.* + +final case class NormalizerRecImpl[ + F[+_]: Sync: BoundVarScope: FreeVarScope: NestingInfoWriter, + T >: VarSort: BoundVarWriter: BoundVarReader: FreeVarWriter: FreeVarReader +]()(implicit nestingInfo: NestingInfoReader) + extends NormalizerRec[F] { + + implicit val nRec: NormalizerRec[F] = this + + override def normalize(proc: Proc): F[ParN] = NormalizerRecImpl.normalize[F, T](proc) + + override def normalize(name: Name): F[ParN] = name match { + case nv: NameVar => + VarNormalizer + .normalizeBoundVar[F, T](nv.var_, SourcePosition(nv.line_num, nv.col_num), NameSort) + case nq: NameQuote => NormalizerRec[F].normalize(nq.proc_) + case wc: NameWildcard => + VarNormalizer.normalizeWildcard[F](SourcePosition(wc.line_num, wc.col_num)) + } + + override def normalize(remainder: ProcRemainder): F[Option[VarN]] = remainder match { + case _: ProcRemainderEmpty => none.pure + case pr: ProcRemainderVar => VarNormalizer.normalizeRemainder[F, T](pr.procvar_).map(_.some) + } + + override def normalize(remainder: NameRemainder): F[Option[VarN]] = remainder match { + case _: NameRemainderEmpty => none.pure + case nr: NameRemainderVar => VarNormalizer.normalizeRemainder[F, T](nr.procvar_).map(_.some) + } +} + +object NormalizerRecImpl { + + /** Normalizes parser AST types to core Rholang AST types + * + * @param proc input parser AST object + * @return core Rholang AST object [[ParN]] + */ + def normalize[ + F[+_]: Sync: NormalizerRec: BoundVarScope: FreeVarScope: NestingInfoWriter, + T >: VarSort: BoundVarWriter: BoundVarReader: FreeVarWriter: FreeVarReader + ](proc: Proc)(implicit nestingInfo: NestingInfoReader): F[ParN] = { + + def unaryExp(subProc: Proc, constructor: ParN => ExprN): F[ParN] = + NormalizerRec[F].normalize(subProc).map(constructor) + + def binaryExp( + subProcLeft: Proc, + subProcRight: Proc, + constructor: (ParN, ParN) => ExprN + ): F[ParN] = + (subProcLeft, subProcRight).nmap(NormalizerRec[F].normalize).mapN(constructor) + + // Dispatch to normalizer methods depending on parser AST type + proc match { + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + case _: PNil => (NilN: ParN).pure + case p: PGround => GroundNormalizer.normalizeGround[F](p) + case p: PVar => VarNormalizer.normalizeVar[F, T](p) + case p: PVarRef => VarRefNormalizer.normalizeVarRef[F, T](p) + case p: PSimpleType => Sync[F].delay(SimpleTypeNormalizer.normalizeSimpleType(p)) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + case p: PBundle => BundleNormalizer.normalizeBundle[F](p) + case p: PNegation => NegationNormalizer.normalizeNegation[F](p) + case p: PEval => NormalizerRec[F].normalize(p.name_) + case p: PExprs => NormalizerRec[F].normalize(p.proc_) + case p: PNot => unaryExp(p.proc_, ENotN.apply) + case p: PNeg => unaryExp(p.proc_, ENegN.apply) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + case p: PPar => ParNormalizer.normalizePar[F](p) + case p: PMatches => MatchesNormalizer.normalizeMatches[F](p) + case p: PConjunction => ConjunctionNormalizer.normalizeConjunction[F](p) + case p: PDisjunction => DisjunctionNormalizer.normalizeDisjunction[F](p) + case p: PMult => binaryExp(p.proc_1, p.proc_2, EMultN.apply) + case p: PDiv => binaryExp(p.proc_1, p.proc_2, EDivN.apply) + case p: PMod => binaryExp(p.proc_1, p.proc_2, EModN.apply) + case p: PPercentPercent => binaryExp(p.proc_1, p.proc_2, EPercentPercentN.apply) + case p: PAdd => binaryExp(p.proc_1, p.proc_2, EPlusN.apply) + case p: PMinus => binaryExp(p.proc_1, p.proc_2, EMinusN.apply) + case p: PPlusPlus => binaryExp(p.proc_1, p.proc_2, EPlusPlusN.apply) + case p: PMinusMinus => binaryExp(p.proc_1, p.proc_2, EMinusMinusN.apply) + case p: PLt => binaryExp(p.proc_1, p.proc_2, ELtN.apply) + case p: PLte => binaryExp(p.proc_1, p.proc_2, ELteN.apply) + case p: PGt => binaryExp(p.proc_1, p.proc_2, EGtN.apply) + case p: PGte => binaryExp(p.proc_1, p.proc_2, EGteN.apply) + case p: PEq => binaryExp(p.proc_1, p.proc_2, EEqN.apply) + case p: PNeq => binaryExp(p.proc_1, p.proc_2, ENeqN.apply) + case p: PAnd => binaryExp(p.proc_1, p.proc_2, EAndN.apply) + case p: POr => binaryExp(p.proc_1, p.proc_2, EOrN.apply) + case p: PShortAnd => binaryExp(p.proc_1, p.proc_2, EShortAndN.apply) + case p: PShortOr => binaryExp(p.proc_1, p.proc_2, EShortOrN.apply) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + case p: PCollect => CollectNormalizer.normalizeCollect[F](p) + case p: PSend => SendNormalizer.normalizeSend[F](p) + case p: PSendSynch => SendSynchNormalizer.normalizeSendSynch[F](p) + case p: PContr => ContractNormalizer.normalizeContract[F, T](p) + case p: PInput => InputNormalizer.normalizeInput(p) + case p: PNew => NewNormalizer.normalizeNew[F, T](p) + case p: PLet => LetNormalizer.normalizeLet[F, T](p) + case p: PMatch => MatchNormalizer.normalizeMatch[F, T](p) + case p: PIf => IfNormalizer.normalizeIf[F](p) + case p: PIfElse => IfNormalizer.normalizeIfElse[F](p) + case p: PMethod => MethodNormalizer.normalizeMethod[F](p) + + case p => + UnrecognizedNormalizerError(s"Unrecognized parser AST type `${p.getClass}`.").raiseError + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/ParNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/ParNormalizer.scala new file mode 100644 index 00000000000..0482a01d7a7 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/ParNormalizer.scala @@ -0,0 +1,12 @@ +package io.rhonix.rholang.normalizer + +import cats.Apply +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.PPar +import io.rhonix.rholang.types.ParN +import sdk.syntax.all.* + +object ParNormalizer { + def normalizePar[F[_]: Apply: NormalizerRec](p: PPar): F[ParN] = + (p.proc_1, p.proc_2).nmap(NormalizerRec[F].normalize).mapN(ParN.combine) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/SendNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/SendNormalizer.scala new file mode 100644 index 00000000000..0185b790d77 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/SendNormalizer.scala @@ -0,0 +1,22 @@ +package io.rhonix.rholang.normalizer + +import cats.Applicative +import cats.syntax.all.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.types.SendN + +import scala.jdk.CollectionConverters.* + +object SendNormalizer { + def normalizeSend[F[_]: Applicative: NormalizerRec](p: PSend): F[SendN] = + ( + NormalizerRec[F].normalize(p.name_), + p.listproc_.asScala.toVector.traverse(NormalizerRec[F].normalize) + ).mapN { (chan, args) => + val persistent = p.send_ match { + case _: SendSingle => false + case _: SendMultiple => true + } + SendN(chan, args, persistent) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/SendSynchNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/SendSynchNormalizer.scala new file mode 100644 index 00000000000..8e0cca600b8 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/SendSynchNormalizer.scala @@ -0,0 +1,125 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.{NameSort, SourcePosition, VarSort} +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.types.{ + BoundVarN, + NewN, + NilN, + ParN, + ReceiveBindN, + ReceiveN, + SendN, + WildcardN +} +import sdk.syntax.all.* + +import java.util.UUID +import scala.jdk.CollectionConverters.* + +object SendSynchNormalizer { + def normalizeSendSynch[F[_]: Sync: NormalizerRec](p: PSendSynch): F[ParN] = Sync[F].defer { + val identifier = UUID.randomUUID().toString + val nameVar = new NameVar(identifier) + + val send: PSend = { + p.listproc_.asScala.prepend(new PEval(nameVar)).void() + new PSend(p.name_, new SendSingle(), p.listproc_) + } + + val receive: PInput = { + val listName = new ListName() + listName.add(new NameWildcard).void() + + val listLinearBind = new ListLinearBind() + listLinearBind + .add(new LinearBindImpl(listName, new NameRemainderEmpty, new SimpleSource(nameVar))) + .void() + + val listReceipt = new ListReceipt() + listReceipt.add(new ReceiptLinear(new LinearSimple(listLinearBind))).void() + + new PInput( + listReceipt, + p.synchsendcont_ match { + case _: EmptyCont => new PNil() + case nonEmptyCont: NonEmptyCont => nonEmptyCont.proc_ + } + ) + } + + val listName = new ListNameDecl() + listName.add(new NameDeclSimpl(identifier)).void() + NormalizerRec[F].normalize(new PNew(listName, new PPar(send, receive))) + } + + /** Normalizes synchronous send by transformation to send/receive pair with generated sync channel. + * + * NOTE: This is a new version of sync send AST transformation. It uses AST types and not parser types which enables + * avoiding using random string as a name of generated variable to ensure uniqueness. + * By using bound variable scope directly we can generate new bounded variable without syntax level name. + * + * TODO: To avoid using random var name, create variant of `putBoundVars` to add bound var without String name. + * + * {{{ + * // Input: Two sync send expressions. + * @1!?(2) ; @3!?(4). + * + * // Output: Generated AST with two send/receive pairs. + * new varGen1 { + * @1!(varGen1, 2) | + * for(_ <- varGen1) { + * + * new varGen2 { + * @3!(varGen2, 4) | + * for(_ <- varGen2) { + * Nil + * } + * } + * + * } + * } + * }}} + * + * @param p sync send parser AST object + * @return transformed [[ParN]] AST object + */ + def normalizeSendSynchNew[F[_]: Sync: NormalizerRec: BoundVarScope, T >: VarSort: BoundVarWriter]( + p: PSendSynch + ): F[ParN] = + BoundVarScope[F].withCopyBoundVarScope { + for { + identifier <- Sync[F].delay(UUID.randomUUID().toString) + // Source position of generated channel is the whole input expression + varPos = SourcePosition(p.line_num, p.col_num) + Seq(varIndex) = BoundVarWriter[T].putBoundVars(Seq((identifier, NameSort, varPos))) + // TODO: To avoid using random var name, create variant of `putBoundVars` to add bound vars without String name. + // Seq(varIndex) = BoundVarWriter[T].createBoundVars(count = 1) + varGen = BoundVarN(varIndex) + + // Send on the same channel, but prepend generated name to send data + chan <- NormalizerRec[F].normalize(p.name_) + data <- p.listproc_.asScala.toVector.traverse(NormalizerRec[F].normalize) + send = SendN(chan, varGen +: data) + + // Receive body is Nil when sync send ends with `.` or normalizes proc after `;`. + body <- p.synchsendcont_ match { + case _: EmptyCont => Sync[F].pure(NilN) + case nonEmptyCont: NonEmptyCont => NormalizerRec[F].normalize(nonEmptyCont.proc_) + } + bind = ReceiveBindN(WildcardN, varGen) + receive = ReceiveN(bind, body, 1) + + // Return send/receive pair wrapped with a new binding + } yield NewN( + bindCount = 1, + p = ParN.combine(send, receive), + uri = Seq(), + injections = Map[String, ParN]() + ) + } + +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/SimpleTypeNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/SimpleTypeNormalizer.scala new file mode 100644 index 00000000000..a81e87ca83a --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/SimpleTypeNormalizer.scala @@ -0,0 +1,15 @@ +package io.rhonix.rholang.normalizer + +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.types.* + +object SimpleTypeNormalizer { + def normalizeSimpleType(p: PSimpleType): ConnectiveSTypeN = p.simpletype_ match { + case _: SimpleTypeBool => ConnBoolN + case _: SimpleTypeInt => ConnIntN + case _: SimpleTypeBigInt => ConnBigIntN + case _: SimpleTypeString => ConnStringN + case _: SimpleTypeUri => ConnUriN + case _: SimpleTypeByteArray => ConnByteArrayN + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/VarNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/VarNormalizer.scala new file mode 100644 index 00000000000..ee084e3c095 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/VarNormalizer.scala @@ -0,0 +1,81 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.* +import coop.rchain.rholang.interpreter.errors.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.types.{BoundVarN, FreeVarN, VarN, WildcardN} + +object VarNormalizer { + def normalizeVar[F[_]: Sync, T >: VarSort: BoundVarReader: FreeVarReader: FreeVarWriter]( + p: PVar + )(implicit nestingInfo: NestingInfoReader): F[VarN] = { + def pos = SourcePosition(p.line_num, p.col_num) + p.procvar_ match { + case pvv: ProcVarVar => normalizeBoundVar[F, T](pvv.var_, pos, ProcSort) + case _: ProcVarWildcard => normalizeWildcard[F](pos) + } + } + + def normalizeRemainder[F[_]: Sync, T >: VarSort: FreeVarReader: FreeVarWriter]( + pv: ProcVar + )(implicit nestingInfo: NestingInfoReader): F[VarN] = + pv match { + case pvv: ProcVarVar => + normalizeFreeVar[F, T](pvv.var_, SourcePosition(pvv.line_num, pvv.col_num), ProcSort) + case pvw: ProcVarWildcard => normalizeWildcard[F](SourcePosition(pvw.line_num, pvw.col_num)) + } + + def normalizeBoundVar[F[_]: Sync, T: BoundVarReader: FreeVarReader: FreeVarWriter]( + varName: String, + pos: SourcePosition, + expectedSort: T + )(implicit nestingInfo: NestingInfoReader): F[VarN] = Sync[F].defer { + BoundVarReader[T].getBoundVar(varName) match { + case Some(BoundContext(level, `expectedSort`, _)) => Sync[F].pure(BoundVarN(level)) + case Some(BoundContext(_, _, sourcePosition)) => + expectedSort match { + case ProcSort => UnexpectedProcContext(varName, sourcePosition, pos).raiseError + case NameSort => UnexpectedNameContext(varName, sourcePosition, pos).raiseError + } + + case None => normalizeFreeVar[F, T](varName, pos, expectedSort) + } + } + + private def normalizeFreeVar[F[_]: Sync, T: FreeVarReader: FreeVarWriter]( + varName: String, + pos: SourcePosition, + expectedSort: T + )(implicit nestingInfo: NestingInfoReader): F[VarN] = + Sync[F].defer { + if (nestingInfo.insidePattern) + if (nestingInfo.insideBundle) + UnexpectedBundleContent(s"Illegal free variable in bundle at $pos").raiseError + else + FreeVarReader[T].getFreeVar(varName) match { + case None => + val index = FreeVarWriter[T].putFreeVar(varName, expectedSort, pos) + Sync[F].pure(FreeVarN(index)) + + case Some(FreeContext(_, _, firstSourcePosition)) => + expectedSort match { + case ProcSort => + UnexpectedReuseOfProcContextFree(varName, firstSourcePosition, pos).raiseError + case NameSort => + UnexpectedReuseOfNameContextFree(varName, firstSourcePosition, pos).raiseError + } + } else TopLevelFreeVariablesNotAllowedError(s"$varName at $pos").raiseError + } + + def normalizeWildcard[F[_]: Sync]( + pos: SourcePosition + )(implicit nestingInfo: NestingInfoReader): F[VarN] = + if (nestingInfo.insidePattern) + if (!nestingInfo.insideBundle) Sync[F].pure(WildcardN) + else UnexpectedBundleContent(s"Illegal wildcard in bundle at $pos").raiseError + else TopLevelWildcardsNotAllowedError(s"_ (wildcard) at $pos").raiseError + +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/VarRefNormalizer.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/VarRefNormalizer.scala new file mode 100644 index 00000000000..aafd9fde061 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/VarRefNormalizer.scala @@ -0,0 +1,34 @@ +package io.rhonix.rholang.normalizer + +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.* +import coop.rchain.rholang.interpreter.errors.* +import io.rhonix.rholang.ast.rholang.Absyn.* +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.types.ConnVarRefN + +object VarRefNormalizer { + def normalizeVarRef[F[_]: Sync, T >: VarSort: BoundVarReader](p: PVarRef): F[ConnVarRefN] = + Sync[F].delay(BoundVarReader[T].findBoundVar(p.var_)).flatMap { + // Found bounded variable + case Some((BoundContext(idx, kind, sourcePosition), depth)) => + kind match { + case ProcSort => + p.varrefkind_ match { + case _: VarRefKindProc => ConnVarRefN(idx, depth).pure + case _ => + UnexpectedProcContext(p.var_, sourcePosition, SourcePosition(p.line_num, p.col_num)).raiseError + } + case NameSort => + p.varrefkind_ match { + case _: VarRefKindName => ConnVarRefN(idx, depth).pure + case _ => + UnexpectedNameContext(p.var_, sourcePosition, SourcePosition(p.line_num, p.col_num)).raiseError + } + } + + // Bounded variable not found + case None => UnboundVariableRef(p.var_, p.line_num, p.col_num).raiseError + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarReader.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarReader.scala new file mode 100644 index 00000000000..1038ec4d618 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarReader.scala @@ -0,0 +1,16 @@ +package io.rhonix.rholang.normalizer.env + +import coop.rchain.rholang.interpreter.compiler.BoundContext + +trait BoundVarReader[T] { + + /** Gets bound variable by name, current level */ + def getBoundVar(name: String): Option[BoundContext[T]] + + /** Finds bound variable, searching parent levels */ + def findBoundVar(name: String): Option[(BoundContext[T], Int)] +} + +object BoundVarReader { + def apply[T](implicit instance: BoundVarReader[T]): BoundVarReader[T] = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarScope.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarScope.scala new file mode 100644 index 00000000000..f58b35274db --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarScope.scala @@ -0,0 +1,14 @@ +package io.rhonix.rholang.normalizer.env + +trait BoundVarScope[F[_]] { + + /** Runs functions in an empty bound variables scope (preserving history) */ + def withNewBoundVarScope[R](scopeFn: F[R]): F[R] + + /** Runs functions in an copy of this bound variables scope (preserving history) */ + def withCopyBoundVarScope[R](scopeFn: F[R]): F[R] +} + +object BoundVarScope { + def apply[F[_]](implicit instance: BoundVarScope[F]): BoundVarScope[F] = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarWriter.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarWriter.scala new file mode 100644 index 00000000000..622e513031c --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/BoundVarWriter.scala @@ -0,0 +1,19 @@ +package io.rhonix.rholang.normalizer.env + +import coop.rchain.rholang.interpreter.compiler.IdContext + +trait BoundVarWriter[T] { + + /** + * Inserts all bindings into the bound map and returns a sequence of indices. + * The returned indices are those that haven't been shadowed by the new bindings. + * + * @param bindings a sequence of tuples, where each tuple contains a variable name and its context. + * @return a sequence of indices of the inserted bindings that haven't been shadowed. + */ + def putBoundVars(bindings: Seq[IdContext[T]]): Seq[Int] +} + +object BoundVarWriter { + def apply[T](implicit instance: BoundVarWriter[T]): BoundVarWriter[T] = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarReader.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarReader.scala new file mode 100644 index 00000000000..01164b76032 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarReader.scala @@ -0,0 +1,16 @@ +package io.rhonix.rholang.normalizer.env + +import coop.rchain.rholang.interpreter.compiler.FreeContext + +trait FreeVarReader[T] { + + /** Gets all free variables */ + def getFreeVars: Seq[(String, FreeContext[T])] + + /** Gets free variable */ + def getFreeVar(name: String): Option[FreeContext[T]] +} + +object FreeVarReader { + def apply[T](implicit instance: FreeVarReader[T]): FreeVarReader[T] = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarScope.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarScope.scala new file mode 100644 index 00000000000..6fb9e9593ea --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarScope.scala @@ -0,0 +1,11 @@ +package io.rhonix.rholang.normalizer.env + +trait FreeVarScope[F[_]] { + + /** Run function in an empty free variables scope (preserving history) */ + def withNewFreeVarScope[R](scopeFn: F[R]): F[R] +} + +object FreeVarScope { + def apply[F[_]](implicit instance: FreeVarScope[F]): FreeVarScope[F] = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarWriter.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarWriter.scala new file mode 100644 index 00000000000..9688d469704 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/FreeVarWriter.scala @@ -0,0 +1,14 @@ +package io.rhonix.rholang.normalizer.env + +import coop.rchain.rholang.interpreter.compiler.IdContext + +trait FreeVarWriter[T] { + + /** Puts free variables to the context + * @return de Bruijn index of the added variable */ + def putFreeVar(binding: IdContext[T]): Int +} + +object FreeVarWriter { + def apply[T](implicit instance: FreeVarWriter[T]): FreeVarWriter[T] = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/NestingInfoReader.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/NestingInfoReader.scala new file mode 100644 index 00000000000..6c9a466df92 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/NestingInfoReader.scala @@ -0,0 +1,15 @@ +package io.rhonix.rholang.normalizer.env + +/** Retrieve information about nesting structure during normalization. */ +trait NestingInfoReader { + + /** Current processing is being executed within a pattern. */ + def insidePattern: Boolean + + /** Current processing is being executed within a receive pattern + * and this receive is not inside any other pattern */ + def insideTopLevelReceivePattern: Boolean + + /** Current processing is being executed within a bundle*/ + def insideBundle: Boolean +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/NestingInfoWriter.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/NestingInfoWriter.scala new file mode 100644 index 00000000000..dd2e772b392 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/NestingInfoWriter.scala @@ -0,0 +1,19 @@ +package io.rhonix.rholang.normalizer.env + +/** Preserve information about nesting structure during normalization. */ +trait NestingInfoWriter[F[_]] { + + /** Run scopeFn with a note that this is a pattern + * @param withinReceive Flag is necessary for normalizing the connectives. + * Since we cannot rely on a specific pattern matching order, we cannot use patterns + * separated by \/ to bind any variables in the top-level receive. + * */ + def withinPattern[R](withinReceive: Boolean)(scopeFn: F[R]): F[R] + + /** Run scopeFn with a note that this is a bundle */ + def withinBundle[R](scopeFn: F[R]): F[R] +} + +object NestingInfoWriter { + def apply[F[_]](implicit instance: NestingInfoWriter[F]): instance.type = instance +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/VarContext.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/VarContext.scala new file mode 100644 index 00000000000..9ec1451133f --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/env/VarContext.scala @@ -0,0 +1,5 @@ +package io.rhonix.rholang.normalizer.env + +import coop.rchain.rholang.interpreter.compiler.SourcePosition + +final case class VarContext[T](index: Int, typ: T, sourcePosition: SourcePosition) diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarReaderImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarReaderImpl.scala new file mode 100644 index 00000000000..0e4885ec926 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarReaderImpl.scala @@ -0,0 +1,19 @@ +package io.rhonix.rholang.normalizer.envimpl + +import coop.rchain.rholang.interpreter.compiler.BoundContext +import io.rhonix.rholang.normalizer.env.* + +final case class BoundVarReaderImpl[T]( + private val getFn: String => Option[VarContext[T]], + private val findFn: String => Option[(VarContext[T], Int)] +) extends BoundVarReader[T] { + + override def getBoundVar(name: String): Option[BoundContext[T]] = getFn(name).map { + case VarContext(index, typ, sourcePosition) => BoundContext(index, typ, sourcePosition) + } + + override def findBoundVar(name: String): Option[(BoundContext[T], Int)] = findFn(name).map { + case (VarContext(index, typ, sourcePosition), depth) => + (BoundContext(index, typ, sourcePosition), depth) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarScopeImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarScopeImpl.scala new file mode 100644 index 00000000000..a0d329157a2 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarScopeImpl.scala @@ -0,0 +1,9 @@ +package io.rhonix.rholang.normalizer.envimpl + +import io.rhonix.rholang.normalizer.env.BoundVarScope + +final case class BoundVarScopeImpl[F[_], T](private val chain: VarMapChain[F, T]) + extends BoundVarScope[F] { + override def withNewBoundVarScope[R](scopeFn: F[R]): F[R] = chain.withNewScope(scopeFn) + override def withCopyBoundVarScope[R](scopeFn: F[R]): F[R] = chain.withCopyScope(scopeFn) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarWriterImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarWriterImpl.scala new file mode 100644 index 00000000000..aac9e95693b --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BoundVarWriterImpl.scala @@ -0,0 +1,23 @@ +package io.rhonix.rholang.normalizer.envimpl + +import coop.rchain.rholang.interpreter.compiler.{IdContext, SourcePosition} +import io.rhonix.rholang.normalizer.env.BoundVarWriter + +final case class BoundVarWriterImpl[T](private val putFn: (String, T, SourcePosition) => Int) + extends BoundVarWriter[T] { + + override def putBoundVars(bindings: Seq[IdContext[T]]): Seq[Int] = { + // Insert all bindings into the bound map + val indices = bindings.map(putFn.tupled(_)) + + // Find indices that haven't been shadowed by the new bindings + val names = bindings.map(_._1) + val indexedNames = names.zip(indices) + val unShadowedIndexedNames = indexedNames + .foldRight(List.empty[(String, Int)]) { + case ((name, index), acc) => + if (acc.exists(_._1 == name)) acc else (name, index) :: acc + } + unShadowedIndexedNames.map(_._2) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BundleInfoChain.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BundleInfoChain.scala new file mode 100644 index 00000000000..bc8a63d8f07 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/BundleInfoChain.scala @@ -0,0 +1,31 @@ +package io.rhonix.rholang.normalizer.envimpl + +import cats.effect.Sync +import io.rhonix.rholang.normalizer.syntax.all.* + +/** + * Represents a chain of bundle information. + * + * @param chain a history chain of booleans, where each boolean represents a status of a bundle. + */ +final class BundleInfoChain[F[_]: Sync](private val chain: HistoryChain[Boolean]) { + + /** + * Runs a scope function with a new status. + * + * @param scopeFn the scope function to run. + * @return the result of the scope function, wrapped in the effect type F. + */ + def runWithNewStatus[R](scopeFn: F[R]): F[R] = chain.runWithNewDataInChain(scopeFn, true) + + /** + * Gets the current status of the bundle information chain. + * + * @return a boolean representing the current status of the bundle information chain. + */ + def getStatus: Boolean = chain.current() +} + +object BundleInfoChain { + def apply[F[_]: Sync](): BundleInfoChain[F] = new BundleInfoChain(HistoryChain(Seq(false))) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarReaderImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarReaderImpl.scala new file mode 100644 index 00000000000..ce735b1896f --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarReaderImpl.scala @@ -0,0 +1,20 @@ +package io.rhonix.rholang.normalizer.envimpl + +import coop.rchain.rholang.interpreter.compiler.FreeContext +import io.rhonix.rholang.normalizer.env.* + +final case class FreeVarReaderImpl[T]( + private val getFn: String => Option[VarContext[T]], + private val getAllFn: () => Seq[(String, VarContext[T])] +) extends FreeVarReader[T] { + + override def getFreeVars: Seq[(String, FreeContext[T])] = + getAllFn().map { + case (name, VarContext(index, typ, sourcePosition)) => + (name, FreeContext(index, typ, sourcePosition)) + } + + override def getFreeVar(name: String): Option[FreeContext[T]] = getFn(name).map { + case VarContext(index, typ, sourcePosition) => FreeContext(index, typ, sourcePosition) + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarScopeImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarScopeImpl.scala new file mode 100644 index 00000000000..9f4c824049d --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarScopeImpl.scala @@ -0,0 +1,8 @@ +package io.rhonix.rholang.normalizer.envimpl + +import io.rhonix.rholang.normalizer.env.FreeVarScope + +final case class FreeVarScopeImpl[F[_], T](private val chain: VarMapChain[F, T]) + extends FreeVarScope[F] { + override def withNewFreeVarScope[R](scopeFn: F[R]): F[R] = chain.withNewScope(scopeFn) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarWriterImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarWriterImpl.scala new file mode 100644 index 00000000000..3fd4e778f31 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/FreeVarWriterImpl.scala @@ -0,0 +1,10 @@ +package io.rhonix.rholang.normalizer.envimpl + +import coop.rchain.rholang.interpreter.compiler.{IdContext, SourcePosition} +import io.rhonix.rholang.normalizer.env.FreeVarWriter + +final case class FreeVarWriterImpl[T](private val putFn: (String, T, SourcePosition) => Int) + extends FreeVarWriter[T] { + + override def putFreeVar(binding: IdContext[T]): Int = putFn.tupled(binding) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/HistoryChain.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/HistoryChain.scala new file mode 100644 index 00000000000..fcc3748c8ac --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/HistoryChain.scala @@ -0,0 +1,63 @@ +package io.rhonix.rholang.normalizer.envimpl + +import sdk.syntax.all.* + +/** + * A class representing a history chain of generic type T. + * Note: T should be immutable. + */ +final class HistoryChain[T](private val chain: collection.mutable.ListBuffer[T]) { + + /** + * Retrieve the current (last) element of the chain. + * @return the last element of the chain + */ + def current(): T = chain.last + + /** + * Retrieve the depth (length) of the chain. + * @return the length of the chain + */ + def depth: Int = chain.length + + /** + * Retrieve an iterator for the chain. + * @return an iterator for the chain + */ + def iter: Iterator[T] = chain.iterator + + /** + * Append a new element to the end of the chain. + * @param t the element to be appended + */ + def push(t: T): Unit = chain.append(t).void() + + /** + * Append a copy of the last element to the end of the chain. + * + */ + def pushCopy(): Unit = this.push(chain.last) + + /** + * Remove and return the last element of the chain. + * @return the removed last element of the chain + */ + def pop(): T = chain.remove(chain.length - 1) +} + +object HistoryChain { + + /** + * Create an empty HistoryChain. + * @return a new HistoryChain with an empty ListBuffer + */ + def empty[T]: HistoryChain[T] = new HistoryChain(collection.mutable.ListBuffer[T]()) + + /** + * Create a HistoryChain with predefined Seq. + * @param seq the Seq of elements to be added to the HistoryChain + * @return a new HistoryChain with a ListBuffer containing the elements of the Seq + */ + def apply[T](seq: Seq[T]): HistoryChain[T] = + new HistoryChain(collection.mutable.ListBuffer[T]() ++= seq) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/NestingInfoReaderImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/NestingInfoReaderImpl.scala new file mode 100644 index 00000000000..575c2737fb2 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/NestingInfoReaderImpl.scala @@ -0,0 +1,25 @@ +package io.rhonix.rholang.normalizer.envimpl + +import io.rhonix.rholang.normalizer.env.NestingInfoReader + +final class NestingInfoReaderImpl( + private val insidePatternStatusFn: () => Boolean, + private val insideTopLevelReceivePatternStatusFn: () => Boolean, + private val insideBundleStatusFn: () => Boolean +) extends NestingInfoReader { + + override def insidePattern: Boolean = insidePatternStatusFn() + + override def insideTopLevelReceivePattern: Boolean = insideTopLevelReceivePatternStatusFn() + + override def insideBundle: Boolean = insideBundleStatusFn() +} + +object NestingInfoReaderImpl { + def apply( + insidePatternFn: () => Boolean, + insideTopLevelReceivePatternFn: () => Boolean, + insideBundleFn: () => Boolean + ): NestingInfoReaderImpl = + new NestingInfoReaderImpl(insidePatternFn, insideTopLevelReceivePatternFn, insideBundleFn) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/NestingInfoWriterImpl.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/NestingInfoWriterImpl.scala new file mode 100644 index 00000000000..ae17c0c0670 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/NestingInfoWriterImpl.scala @@ -0,0 +1,14 @@ +package io.rhonix.rholang.normalizer.envimpl + +import io.rhonix.rholang.normalizer.env.NestingInfoWriter + +final case class NestingInfoWriterImpl[F[_]]( + private val patternInfo: PatternInfoChain[F], + private val bundleInfo: BundleInfoChain[F] +) extends NestingInfoWriter[F] { + + override def withinPattern[R](inReceive: Boolean)(scopeFn: F[R]): F[R] = + patternInfo.runWithNewStatus(inReceive)(scopeFn) + + override def withinBundle[R](scopeFn: F[R]): F[R] = bundleInfo.runWithNewStatus(scopeFn) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/PatternInfoChain.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/PatternInfoChain.scala new file mode 100644 index 00000000000..df53280da93 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/PatternInfoChain.scala @@ -0,0 +1,38 @@ +package io.rhonix.rholang.normalizer.envimpl + +import cats.effect.Sync +import io.rhonix.rholang.normalizer.syntax.all.* + +/** + * Represents a chain of pattern information. + * @param chain a history chain of tuples, where each tuple represents a status of a pattern. + */ +final class PatternInfoChain[F[_]: Sync]( + private val chain: HistoryChain[(Boolean, Boolean)] +) { + + /** + * Runs a scope function with a new status. + * + * @param inReceive a boolean flag indicating whether the scope function is in receive term. + * @param scopeFn the scope function to run. + * @tparam R the type of the result of the scope function. + * @return the result of the scope function, wrapped in the effect type F. + */ + def runWithNewStatus[R](inReceive: Boolean)(scopeFn: F[R]): F[R] = + chain.runWithNewDataInChain(scopeFn, (true, inReceive)) + + /** + * Gets the current status of the pattern information chain. + * + * @return a tuple representing the current status of the pattern information chain. + * The first Boolean indicates whether we are within a pattern. + * The second Boolean indicates whether we are within a top level receive. + */ + def getStatus: (Boolean, Boolean) = chain.current() +} + +object PatternInfoChain { + def apply[F[_]: Sync](): PatternInfoChain[F] = + new PatternInfoChain(HistoryChain(Seq((false, false)))) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/VarMap.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/VarMap.scala new file mode 100644 index 00000000000..956f27d85fe --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/VarMap.scala @@ -0,0 +1,55 @@ +package io.rhonix.rholang.normalizer.envimpl + +import coop.rchain.rholang.interpreter.compiler.SourcePosition +import io.rhonix.rholang.normalizer.env.VarContext + +/** Map that associates variable names with their context, including de Bruijn index, type, and source position. */ +final case class VarMap[T]( + private val data: Map[String, VarContext[T]], + private val nextIndex: Int +) { + + /** + * Retrieve the variable context by its name. + * @return Some(varContext) if the variable is found, None otherwise. + */ + def get(name: String): Option[VarContext[T]] = data.get(name) + + /** + * Retrieve all variables and their contexts. + * @return a sequence of tuples, each containing a variable name and its context + */ + def getAll: Seq[(String, VarContext[T])] = data.toSeq + + /** + * Add a new variable to the map or update an existing one. + * @param name the name of the variable + * @param sort the type of the variable + * @param sourcePosition the source position of the variable + * @return a new VarMap with the updated data and next index + */ + def put(name: String, sort: T, sourcePosition: SourcePosition): VarMap[T] = { + val newData = data.updated(name, VarContext(nextIndex, sort, sourcePosition)) + val newIndex = nextIndex + 1 + new VarMap(newData, newIndex) + } +} + +object VarMap { + + /** + * Create a VarMap with the given data. + * @param initData the data to initialize the VarMap with + * @return a new VarMap with the given data and a next index of 0 + */ + def apply[T](initData: Seq[(String, T, SourcePosition)]): VarMap[T] = + initData.foldLeft(empty[T]) { + case (varMap, data) => varMap.put(data._1, data._2, data._3) + } + + /** + * Create an empty VarMap. + * @return a new VarMap with an empty data map and a next index of 0 + */ + def empty[T]: VarMap[T] = new VarMap(Map[String, VarContext[T]](), 0) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/VarMapChain.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/VarMapChain.scala new file mode 100644 index 00000000000..9bcb3c88975 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/envimpl/VarMapChain.scala @@ -0,0 +1,103 @@ +package io.rhonix.rholang.normalizer.envimpl + +import cats.effect.Sync +import cats.implicits.toFoldableOps +import coop.rchain.rholang.interpreter.compiler.SourcePosition +import io.rhonix.rholang.normalizer.env.VarContext +import io.rhonix.rholang.normalizer.syntax.all.* + +/** + * Represents a chain of variable maps. + * + * @param chain a history chain of variable maps. + * @tparam F the type of the effect. + * @tparam T the type of the variable sort. + */ +final class VarMapChain[F[_]: Sync, T](private val chain: HistoryChain[VarMap[T]]) { + + /** + * Runs a scope function with a new, empty variable map. + * + * @param scopeFn the scope function to run. + * @tparam R the type of the result of the scope function. + * @return the result of the scope function, wrapped in the effect type F. + */ + def withNewScope[R](scopeFn: F[R]): F[R] = chain.runWithNewDataInChain(scopeFn, VarMap.empty[T]) + + /** + * Runs a scope function with a copy of the current variable map. + * + * @param scopeFn the scope function to run. + * @tparam R the type of the result of the scope function. + * @return the result of the scope function, wrapped in the effect type F. + */ + def withCopyScope[R](scopeFn: F[R]): F[R] = chain.runWithNewDataInChain(scopeFn, chain.current()) + + /** + * Adds a new variable to the current variable map and returns its index. + * + * @param name the name of the variable. + * @param sort the sort of the variable. + * @param sourcePosition the source position of the variable. + * @return the index of the added variable. + */ + def putVar(name: String, sort: T, sourcePosition: SourcePosition): Int = { + chain.updateCurrent(_.put(name, sort, sourcePosition)) + chain.current().get(name).get.index + } + + /** + * Retrieves a variable from the current variable map. + * + * @param name the name of the variable. + * @return an option containing the variable context if the variable exists, None otherwise. + */ + def getVar(name: String): Option[VarContext[T]] = chain.current().get(name) + + /** + * Retrieves all variables in the current scope. + * + * @return a sequence of tuples, where each tuple contains the name of a variable and its context. + */ + def getAllInScope: Seq[(String, VarContext[T])] = chain.current().getAll + + /** + * Searches for a variable in the chain of variable maps and returns the first match along with its depth. + * + * @param name the name of the variable. + * @return an option containing a tuple with the variable context and its depth if the variable exists, None otherwise. + */ + def getFirstVarInChain(name: String): Option[(VarContext[T], Int)] = + chain.iter.zipWithIndex.toSeq.collectFirstSome { + case (boundMap, depth) => boundMap.get(name).map((_, depth)) + } + + /** + * Returns an iterator over the variable maps in the chain. + */ + def iter: Iterator[VarMap[T]] = chain.iter +} + +object VarMapChain { + + /** + * Creates a new variable map chain with one initial variable map. + * + * @param initVarMap the variable map to use. + */ + def apply[F[_]: Sync, T](initVarMap: VarMap[T]): VarMapChain[F, T] = apply(Seq(initVarMap)) + + /** + * Creates a new variable map chain with the given variable maps. + * + * @param initVarMaps the variable maps to use. + */ + def apply[F[_]: Sync, T](initVarMaps: Seq[VarMap[T]]): VarMapChain[F, T] = + new VarMapChain(HistoryChain(initVarMaps)) + + /** + * Creates a new variable map chain with an empty variable map. + */ + def empty[F[_]: Sync, T]: VarMapChain[F, T] = apply(VarMap.empty[T]) + +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/HistoryChainSyntax.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/HistoryChainSyntax.scala new file mode 100644 index 00000000000..1584f1c60c4 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/HistoryChainSyntax.scala @@ -0,0 +1,27 @@ +package io.rhonix.rholang.normalizer.syntax + +import cats.effect.Sync +import cats.syntax.all.* +import io.rhonix.rholang.normalizer.envimpl.HistoryChain + +trait HistoryChainSyntax { + implicit def normalizerSyntaxHistoryChain[T](x: HistoryChain[T]): HistoryChainOps[T] = + HistoryChainOps(x) +} + +final case class HistoryChainOps[T](private val x: HistoryChain[T]) extends AnyVal { + + /** + * Updates the current element in the HistoryChain. + * @param f a transformation function that takes an element of type `T` and returns a transformed element of the same type + */ + def updateCurrent(f: T => T): Unit = x.push(f(x.pop())) + + /** Run scopeFn with new data in the HistoryChain. */ + def runWithNewDataInChain[F[_]: Sync, R](scopeFn: F[R], newData: T): F[R] = + for { + _ <- Sync[F].delay(x.push(newData)) + res <- scopeFn + _ = x.pop() + } yield res +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/NormalizerSyntax.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/NormalizerSyntax.scala new file mode 100644 index 00000000000..14f354b9265 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/NormalizerSyntax.scala @@ -0,0 +1,77 @@ +package io.rhonix.rholang.normalizer.syntax + +import cats.Functor +import cats.effect.Sync +import cats.syntax.all.* +import coop.rchain.rholang.interpreter.compiler.{FreeContext, IdContext} +import io.rhonix.rholang.normalizer.env.* +import io.rhonix.rholang.normalizer.syntax.all.* + +trait NormalizerSyntax { + implicit def rholangNormalizerSyntax[F[_], A](f: F[A]): NormalizerOps[F, A] = + new NormalizerOps[F, A](f) +} + +class NormalizerOps[F[_], A](val f: F[A]) extends AnyVal { + + /** Run a function within a new scope, label it as a pattern + * @param withinReceive Flag should be true for pattern in receive (input) or contract. */ + def withinPattern( + withinReceive: Boolean = false + )(implicit bvs: BoundVarScope[F], fvs: FreeVarScope[F], nes: NestingInfoWriter[F]): F[A] = + bvs.withNewBoundVarScope(fvs.withNewFreeVarScope(nes.withinPattern(withinReceive)(f))) + + /** Run a function within a new scope, label it as a pattern, + * and subsequently extract all free variables from the normalized result of this function. + * @param withinReceive Flag should be true for pattern in receive (input) or contract. */ + def withinPatternGetFreeVars[T](withinReceive: Boolean = false)( + implicit + fun: Functor[F], + bvs: BoundVarScope[F], + fvs: FreeVarScope[F], + nes: NestingInfoWriter[F], + fvr: FreeVarReader[T] + ): F[(A, Seq[(String, FreeContext[T])])] = + f.withinPattern(withinReceive).map((_, FreeVarReader[T].getFreeVars)) + + /** Run function with restricted conditions with restrictions as for the bundle */ + def withinBundle()(implicit nes: NestingInfoWriter[F]): F[A] = + NestingInfoWriter[F].withinBundle(f) + + /** Bound free variables in a copy of the current scope. + * + * Free variables are sorted by levels and then added with indexes: + * {i0, i1, ..., iN} = {fl0 + last + 1, fl1 + last + 1, ..., flN + last + 1}. + * Here, i0, ..., iN represent the de Bruijn indices of the new bound vars, + * fl0, ..., flN are the de Bruijn levels of the inserted free vars, + * last is the last index among all bound vars at the moment. + */ + def withAbsorbedFreeVars[T]( + freeVars: Seq[(String, FreeContext[T])] + )(implicit sync: Sync[F], bvs: BoundVarScope[F], bvw: BoundVarWriter[T]): F[A] = { + + def absorbFree(freeVars: Seq[(String, FreeContext[T])]): Seq[IdContext[T]] = { + val sortedByLevel = freeVars.sortBy(_._2.level) + val (levels, data) = + sortedByLevel.unzip(fv => (fv._2.level, (fv._1, fv._2.typ, fv._2.sourcePosition))) + assert( + levels == levels.indices, + "Error when absorbing free variables during normalization: incorrect de Bruijn levels." + + s"Should be ${levels.indices}, but was $levels." + ) + data + } + f.withAddedBoundVars(absorbFree(freeVars)).map(_._1) + } + + /** Put new bound variables in a copy of the current scope. + * @return result of the effect and the number of inserted non-duplicate variables + */ + def withAddedBoundVars[T]( + boundVars: Seq[IdContext[T]] + )(implicit sync: Sync[F], bvs: BoundVarScope[F], bvw: BoundVarWriter[T]): F[(A, Seq[Int])] = + BoundVarScope[F].withCopyBoundVarScope(for { + indices <- Sync[F].delay(BoundVarWriter[T].putBoundVars(boundVars)) + fRes <- f + } yield (fRes, indices)) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/package.scala b/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/package.scala new file mode 100644 index 00000000000..2e2c0478e3f --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/normalizer/syntax/package.scala @@ -0,0 +1,7 @@ +package io.rhonix.rholang.normalizer + +import io.rhonix.rholang.normalizer.syntax.* + +package object syntax { object all extends AllSyntaxNormalizer } + +trait AllSyntaxNormalizer extends NormalizerSyntax with HistoryChainSyntax diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/ConnectiveUsed.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/ConnectiveUsed.scala new file mode 100644 index 00000000000..ec573770486 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/ConnectiveUsed.scala @@ -0,0 +1,59 @@ +package io.rhonix.rholang.parmanager + +import cats.Eval +import cats.syntax.all.* +import io.rhonix.rholang.types.* + +object ConnectiveUsed { + def cUsed(p: RhoTypeN): Eval[Boolean] = p.connectiveUsed + def cUsed(kv: (RhoTypeN, RhoTypeN)): Eval[Boolean] = (cUsed(kv._1), cUsed(kv._2)).mapN(_ || _) + def cUsed(ps: Seq[RhoTypeN]): Eval[Boolean] = ps.existsM(cUsed) + def cUsedKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Eval[Boolean] = kVPairs.existsM(cUsed) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def connectiveUsedFn(input: RhoTypeN): Eval[Boolean] = Eval.defer { + input match { + + /** Basic types */ + case _: NilN.type => Eval.False + case p: ParProcN => cUsed(p.ps) + case p: SendN => (cUsed(p.chan), cUsed(p.args)).mapN(_ || _) + case p: ReceiveN => (cUsed(p.binds.map(_.source)), cUsed(p.body)).mapN(_ || _) + case p: MatchN => (cUsed(p.target), cUsed(p.cases.map(_.source))).mapN(_ || _) + case _: NewN => Eval.False // There are no situations when New gets into the matcher + + /** Ground types */ + case _: GroundN => Eval.False + + /** Collections */ + case p: EListN => (cUsed(p.ps), p.remainder.existsM(cUsed)).mapN(_ || _) + case p: ETupleN => cUsed(p.ps) + case p: ESetN => (cUsed(p.ps.toSeq), p.remainder.existsM(cUsed)).mapN(_ || _) + case p: EMapN => (cUsedKVPairs(p.ps.toSeq), p.remainder.existsM(cUsed)).mapN(_ || _) + + /** Vars */ + case _: BoundVarN => Eval.False + case _: FreeVarN => Eval.True + case _: WildcardN.type => Eval.True + + /** Operations */ + case p: Operation1ParN => cUsed(p.p) + case p: Operation2ParN => (cUsed(p.p1), cUsed(p.p2)).mapN(_ || _) + case p: EMethodN => (cUsed(p.target), cUsed(p.args)).mapN(_ || _) + case p: EMatchesN => cUsed(p.target) + + /** Unforgeable names */ + case _: UnforgeableN => Eval.False + + /** Connective */ + case _: ConnectiveSTypeN => Eval.True + case _: ConnectiveFuncN => Eval.True + case _: ConnectiveVarN => Eval.False + + /** Other types */ + case _: BundleN => Eval.False // There are no situations when New gets into the matcher + + case p => throw new Exception(s"Undefined type $p") + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/Constants.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/Constants.scala new file mode 100644 index 00000000000..a14e9b111bd --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/Constants.scala @@ -0,0 +1,88 @@ +package io.rhonix.rholang.parmanager + +private[parmanager] object Constants { + final val intSize = 4 + final val longSize = 8 + final val booleanSize = 1 + + final val tagSize = 1 + + /** Tags for serialization */ + /** Basic types */ + final val NIL = 0x01.toByte + final val PARPROC = 0x02.toByte + final val SEND = 0x03.toByte + final val RECEIVE = 0x04.toByte + final val MATCH = 0x05.toByte + final val NEW = 0x06.toByte + + /** Ground types */ + final val GBOOL = 0x10.toByte + final val GINT = 0x11.toByte + final val GBIG_INT = 0x12.toByte + final val GSTRING = 0x13.toByte + final val GBYTE_ARRAY = 0x14.toByte + final val GURI = 0x15.toByte + + /** Collections */ + final val ELIST = 0x20.toByte + final val ETUPLE = 0x21.toByte + final val ESET = 0x22.toByte + final val EMAP = 0x23.toByte + + /** Vars */ + final val BOUND_VAR = 0x2a.toByte + final val FREE_VAR = 0x2b.toByte + final val WILDCARD = 0x2c.toByte + + /** Operations */ + final val ENEG = 0x30.toByte + final val ENOT = 0x31.toByte + + final val EPLUS = 0x32.toByte + final val EMINUS = 0x33.toByte + final val EMULT = 0x34.toByte + final val EDIV = 0x35.toByte + final val EMOD = 0x36.toByte + final val ELT = 0x37.toByte + final val ELTE = 0x38.toByte + final val EGT = 0x39.toByte + final val EGTE = 0x3a.toByte + final val EEQ = 0x3b.toByte + final val ENEQ = 0x3c.toByte + final val EAND = 0x3d.toByte + final val ESHORTAND = 0x3e.toByte + final val EOR = 0x3f.toByte + final val ESHORTOR = 0x40.toByte + final val EPLUSPLUS = 0x41.toByte + final val EMINUSMINUS = 0x42.toByte + final val EPERCENT = 0x43.toByte + + final val EMETHOD = 0x4a.toByte + final val EMATCHES = 0x4b.toByte + + /** Unforgeable names */ + final val UPRIVATE = 0x50.toByte + final val UDEPLOY_ID = 0x51.toByte + final val UDEPLOYER_ID = 0x52.toByte + + /** Connective */ + final val CONNECTIVE_BOOL = 0x70.toByte + final val CONNECTIVE_INT = 0x71.toByte + final val CONNECTIVE_STRING = 0x72.toByte + final val CONNECTIVE_URI = 0x73.toByte + final val CONNECTIVE_BYTEARRAY = 0x74.toByte + final val CONNECTIVE_BIG_INT = 0x75.toByte + final val CONNECTIVE_NOT = 0x76.toByte + final val CONNECTIVE_AND = 0x77.toByte + final val CONNECTIVE_OR = 0x78.toByte + final val CONNECTIVE_VARREF = 0x79.toByte + + /** Auxiliary types */ + final val RECEIVE_BIND = 0x80.toByte + final val MATCH_CASE = 0x81.toByte + + /** Other types */ + final val BUNDLE = 0x90.toByte + final val SYS_AUTH_TOKEN = 0x91.toByte +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/EvalRequired.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/EvalRequired.scala new file mode 100644 index 00000000000..d0f1e359965 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/EvalRequired.scala @@ -0,0 +1,48 @@ +package io.rhonix.rholang.parmanager + +import io.rhonix.rholang.types.* + +object EvalRequired { + def eReq(p: RhoTypeN): Boolean = p.evalRequired + def eReq(kv: (RhoTypeN, RhoTypeN)): Boolean = eReq(kv._1) || eReq(kv._2) + def eReq(ps: Seq[RhoTypeN]): Boolean = ps.exists(eReq) + def eReqKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Boolean = kVPairs.exists(eReq) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def evalRequiredFn(input: RhoTypeN): Boolean = input match { + + /** Basic types */ + case p: BasicN => + p match { + case _: NilN.type => false + case pProc: ParProcN => eReq(pProc.ps) + case _ => true + } + + /** Ground types */ + case _: GroundN => false + + /** Collections */ + case p: EListN => eReq(p.ps) + case p: ETupleN => eReq(p.ps) + case p: ESetN => eReq(p.ps.toSeq) + case p: EMapN => eReqKVPairs(p.ps.toSeq) + + /** Vars */ + case _: VarN => true + + /** Operations */ + case _: OperationN => true + + /** Unforgeable names */ + case _: UnforgeableN => false + + /** Connective */ + case _: ConnectiveN => false + + /** Other types */ + case p: BundleN => eReq(p.body) + + case p => throw new Exception(s"Undefined type $p") + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/Manager.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/Manager.scala new file mode 100644 index 00000000000..1039be17cd4 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/Manager.scala @@ -0,0 +1,60 @@ +package io.rhonix.rholang.parmanager + +import cats.Eval +import com.google.protobuf.CodedOutputStream +import io.rhonix.rholang.types.{NilN, ParN, ParProcN, RhoTypeN} +import sdk.codecs.protobuf +import sdk.codecs.protobuf.{ProtoCodec, ProtoPrimitiveReader} + +import java.io.InputStream + +object Manager { + private def flatPs(ps: Seq[ParN]): Seq[ParN] = + ps.flatMap { + case _: NilN.type => Seq() + case x: ParProcN => flatPs(x.ps) + case p => Seq(p) + } + + private def makePProc(ps: Seq[ParN]): ParN = ps match { + case Nil => NilN + case p :: Nil => p + case _ => ParProcN(ps) + } + + /** + * Create a flatten parallel Par (ParProc) from par sequence + * Flatting is the process of transforming ParProc(P, Q, ...): + * - empty data: ParProc() -> Nil + * - single data: ParProc(P) -> P + * - nil data: ParProc(P, Q, Nil) -> ParProc(P, Q) + * - nested data ParProc(ParProc(P,Q), ParProc(L,K)) -> ParProc(P, Q, L, K) + * @param ps initial par sequence to be executed in parallel + * @return + */ + def flattedPProc(ps: Seq[ParN]): ParN = makePProc(flatPs(ps)) + + /** + * Create a flatten parallel Par (ParProc) from two Pars. + * See [[flattedPProc]] for more information. + */ + def combinePars(p1: ParN, p2: ParN): ParN = flattedPProc(Seq(p1, p2)) + + /** MetaData */ + def rhoHashFn(p: RhoTypeN): Eval[Array[Byte]] = RhoHash.calcHash(p) + def serializedSizeFn(p: RhoTypeN): Eval[Int] = SerializedSize.calcSerSize(p) + def serializedFn(p: RhoTypeN, memoizeChildren: Boolean): Eval[Array[Byte]] = { + val write = (out: CodedOutputStream) => + Serialization.serialize(p, protobuf.ProtoPrimitiveWriter(out), memoizeChildren) + p.serializedSize.flatMap(size => ProtoCodec.encode(size, write)) + } + def connectiveUsedFn(p: RhoTypeN): Eval[Boolean] = ConnectiveUsed.connectiveUsedFn(p) + def evalRequiredFn(p: RhoTypeN): Boolean = EvalRequired.evalRequiredFn(p) + def substituteRequiredFn(p: RhoTypeN): Boolean = SubstituteRequired.substituteRequiredFn(p) + + // Deserialize with protobuf + def protoDeserialize(bytes: Array[Byte]): ParN = { + val decode = (in: InputStream) => Serialization.deserialize(ProtoPrimitiveReader(in)) + ProtoCodec.decode(bytes, decode).value + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/RhoHash.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/RhoHash.scala new file mode 100644 index 00000000000..dcf9648d6d9 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/RhoHash.scala @@ -0,0 +1,171 @@ +package io.rhonix.rholang.parmanager + +import cats.Eval +import cats.syntax.all.* +import io.rhonix.rholang.parmanager.Constants.* +import io.rhonix.rholang.types.* +import sdk.hashing.ProtoBlakeHashing.* + +object RhoHash { + + /** Creates singleton byte array with supplied byte. */ + def arE(b: Byte): Eval[Array[Byte]] = Eval.now(Array[Byte](b)) + + /** Hashes Rholang AST node wrapped in Option. */ + def hashOpt(opt: Option[ParN]): Eval[Array[Byte]] = + opt.map(x => (HASH_TRUE, x.rhoHash).mapN(_ ++ _)).getOrElse(HASH_FALSE) + + /** Hashes [[ReceiveBindN]] object part of the [[ReceiveN]] Rholang constructor. */ + def hashReceiveBind(p: ReceiveBindN): Eval[Array[Byte]] = + (arE(RECEIVE_BIND) + +++ p.patterns.traverse(_.rhoHash) + ++ hash(p.freeCount) + ++ p.source.rhoHash + ++ hashOpt(p.remainder)) + .map(hash) + + /** Hashes [[MatchCaseN]] object part of the [[MatchN]] Rholang constructor. */ + def hashMatchCase(p: MatchCaseN): Eval[Array[Byte]] = + (arE(MATCH_CASE) ++ p.pattern.rhoHash ++ p.source.rhoHash ++ hash(p.freeCount)).map(hash) + + /** ==Computes the hash of the Rholang AST types.== + * + * This function represents the specification of the hashing algorithm for the Rholang core types (AST). + * + * ===How to read the code?=== + * + * To make the specification more succinct three combinator functions are defined `++`, `+++` and `+|+`. + * These functions are lifted to work on the `Eval` type level which means combining types are wrapped in `Eval`. + * + * They are used to combine bytes representing hashes of the object tree being hashed. + * + * ===Concatenates two byte arrays=== + * + * {{{ ++ : Eval[Array[Byte]] => Eval[Array[Byte]] => Eval[Array[Byte]] }}} + * Concatenates two byte arrays. It works the same as Scala concat (`++`) function, but wrapped in `Eval`. + * + * ===Prepends a byte array to the sequence of byte arrays=== + * + * {{{ +++ : Eval[Array[Byte]] => Eval[Seq[Array[Byte]] => Eval[Array[Byte]] }}} + * Prepends the byte array to the concatenated and hashed sequence of byte arrays. + * + * ===Prepends a byte array to the sequence of byte arrays with sorting=== + * + * {{{ +|+ : Eval[Array[Byte]] => Eval[Seq[Array[Byte]] => Eval[Array[Byte]] }}} + * The same as `+++`, but the sequence is first sorted before concatenation. + * + * @param input Rholang AST root object + */ + // TODO: Properly handle errors with return type (remove throw) + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def calcHash(input: RhoTypeN): Eval[Array[Byte]] = Eval.defer { + input match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case _: NilN.type | _: GBoolN | _: GIntN | _: GBigIntN | _: GStringN | _: GByteArrayN | + _: GUriN | _: WildcardN.type + /* Unforgeable names */ + | _: UnforgeableN + /* Vars */ + | _: BoundVarN | _: FreeVarN | _: ConnVarRefN + /* Simple types */ + | _: ConnBoolN.type | _: ConnIntN.type | _: ConnBigIntN.type | _: ConnStringN.type | + _: ConnUriN.type | _: ConnByteArrayN.type => + // Terminals use serialized value as a source for hashing (AST base case) + input.serialized.map(hash) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case p: Operation1ParN => + val (tag, op) = p match { + case e: ENegN => (ENEG, e.p) + case e: ENotN => (ENOT, e.p) + } + (arE(tag) ++ op.rhoHash).map(hash) + + case p: BundleN => (arE(BUNDLE) ++ p.body.rhoHash).map(hash) + + /* Connective */ + case p: ConnNotN => (arE(CONNECTIVE_NOT) ++ p.p.rhoHash).map(hash) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case p: Operation2ParN => + val tag = p match { + case _: EPlusN => EPLUS + case _: EMinusN => EMINUS + case _: EMultN => EMULT + case _: EDivN => EDIV + case _: EModN => EMOD + case _: ELtN => ELT + case _: ELteN => ELTE + case _: EGtN => EGT + case _: EGteN => EGTE + case _: EEqN => EEQ + case _: ENeqN => ENEQ + case _: EAndN => EAND + case _: EShortAndN => ESHORTAND + case _: EOrN => EOR + case _: EShortOrN => ESHORTOR + case _: EPlusPlusN => EPLUSPLUS + case _: EMinusMinusN => EMINUSMINUS + case _: EPercentPercentN => EPERCENT + } + (arE(tag) ++ p.p1.rhoHash ++ p.p2.rhoHash).map(hash) + + case p: EMatchesN => (arE(EMATCHES) ++ p.target.rhoHash ++ p.pattern.rhoHash).map(hash) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case p: ParProcN => (arE(PARPROC) +|+ p.ps.traverse(_.rhoHash)).map(hash) + + case p: SendN => + (arE(SEND) ++ p.chan.rhoHash ++ hash(p.persistent) +|+ p.args.traverse(_.rhoHash)).map(hash) + + case p: ReceiveN => + (arE(RECEIVE) + ++ hash(p.persistent) + ++ hash(p.peek) + ++ hash(p.bindCount) + +|+ p.binds.traverse(_.rhoHash) + ++ p.body.rhoHash) + .map(hash) + + case p: MatchN => (arE(MATCH) ++ p.target.rhoHash +++ p.cases.traverse(_.rhoHash)).map(hash) + + case p: NewN => + (arE(NEW) + ++ hash(p.bindCount) + +|+ p.uri.traverse(_.rhoHash) + +|+ p.injections.toSeq.traverse(_.bimap(_.rhoHash, _.rhoHash).mapN(_ ++ _)) + ++ p.p.rhoHash) + .map(hash) + + /* Collections */ + case p: ETupleN => (arE(ETUPLE) +++ p.ps.traverse(_.rhoHash)).map(hash) + case p: EListN => (arE(ELIST) +++ p.ps.traverse(_.rhoHash) ++ hashOpt(p.remainder)).map(hash) + case p: ESetN => + (arE(ESET) +|+ p.ps.toSeq.traverse(_.rhoHash) ++ hashOpt(p.remainder)).map(hash) + case p: EMapN => + (arE(EMAP) + +|+ p.ps.toSeq.traverse(_.bimap(_.rhoHash, _.rhoHash).mapN(_ ++ _)) + ++ hashOpt(p.remainder)) + .map(hash) + + /* Connective */ + case p: ConnAndN => (arE(CONNECTIVE_AND) +++ p.ps.traverse(_.rhoHash)).map(hash) + case p: ConnOrN => (arE(CONNECTIVE_OR) +++ p.ps.traverse(_.rhoHash)).map(hash) + + case p: EMethodN => + (arE(EMETHOD) ++ hash(p.methodName) +++ p.args.traverse(_.rhoHash) ++ p.target.rhoHash) + .map(hash) + + case p => throw new Exception(s"Unknown type `$p`") + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/RhoRecWriter.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/RhoRecWriter.scala new file mode 100644 index 00000000000..e7c5a89aff0 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/RhoRecWriter.scala @@ -0,0 +1,40 @@ +package io.rhonix.rholang.parmanager + +import cats.Applicative +import cats.syntax.all.* +import io.rhonix.rholang.types.RhoTypeN +import sdk.codecs.PrimitiveWriter + +object RhoRecWriter { + def apply[F[_]: Applicative]( + writer: PrimitiveWriter[F], + rec: RhoTypeN => F[Unit] + ): RhoRecWriter[F] = new RhoRecWriter(writer, rec) +} + +/** Wrapper for protobuf serialization with recursive function. */ +class RhoRecWriter[F[_]: Applicative] private ( + writer: PrimitiveWriter[F], + rec: RhoTypeN => F[Unit] +) { + def writeBigInt(x: BigInt): F[Unit] = writer.write(x.toByteArray) + + // Recursive traversal + def writePar(x: RhoTypeN): F[Unit] = rec(x) + + // Recursive traversal of a sequence + def writeSeq(seq: Seq[RhoTypeN]): F[Unit] = writeSeq[RhoTypeN](seq, writePar) + + def writeOpt(pOpt: Option[RhoTypeN]): F[Unit] = + pOpt.map(writer.write(true) *> writePar(_)).getOrElse(writer.write(false)) + + def writeTuplePar(kv: (RhoTypeN, RhoTypeN)): F[Unit] = + writePar(kv._1) *> writePar(kv._2) + + def writeTupleStringPar(kv: (String, RhoTypeN)): F[Unit] = + writer.write(kv._1) *> writePar(kv._2) + + // Writes serialized value of a sequence + def writeSeq[T](seq: Seq[T], f: T => F[Unit]): F[Unit] = + writer.write(seq.size) *> seq.traverse_(f) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/Serialization.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/Serialization.scala new file mode 100644 index 00000000000..a3d79240ff8 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/Serialization.scala @@ -0,0 +1,321 @@ +package io.rhonix.rholang.parmanager + +import cats.Eval +import cats.syntax.all.* +import io.rhonix.rholang.parmanager.Constants.* +import io.rhonix.rholang.types.* +import sdk.codecs.{PrimitiveReader, PrimitiveWriter} + +object Serialization { + + /** + * Serialization of the Rholang AST types. + * + * @param inp Rholang AST root object + * @param wrt Writer of primitive types + * @param memo Use memoization for all children fields recursively + */ + // TODO: Properly handle errors with return type (remove throw) + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def serialize(inp: RhoTypeN, wrt: PrimitiveWriter[Eval], memo: Boolean): Eval[Unit] = Eval.defer { + // Recursive traversal with or without memoization of all children objects + val writePar: RhoTypeN => Eval[Unit] = + if (memo) + // Recursive traversal using memoized values + _.serialized.flatMap(wrt.writeRaw) + else + // Recursive traversal of the whole object without memoization of intermediaries + serialize(_, wrt, memo) + + val rhoWriter: RhoRecWriter[Eval] = RhoRecWriter(wrt, writePar) + + import rhoWriter._ + import wrt._ + + def writeReceiveBind(p: ReceiveBindN): Eval[Unit] = + write(RECEIVE_BIND) *> + writeSeq(p.patterns) *> + writePar(p.source) *> + writeOpt(p.remainder) *> + write(p.freeCount) + + def writeMatchCase(p: MatchCaseN): Eval[Unit] = + write(MATCH_CASE) *> writePar(p.pattern) *> writePar(p.source) *> write(p.freeCount) + + inp match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case _: NilN.type => write(NIL) + case p: GBoolN => write(GBOOL) *> write(p.v) + case p: GIntN => write(GINT) *> write(p.v) + case p: GBigIntN => write(GBIG_INT) *> writeBigInt(p.v) + case p: GStringN => write(GSTRING) *> write(p.v) + case p: GByteArrayN => write(GBYTE_ARRAY) *> write(p.v) + case p: GUriN => write(GURI) *> write(p.v) + case _: WildcardN.type => write(WILDCARD) + + /* Unforgeable names */ + case p: UnforgeableN => + val unfKind = p match { + case _: UPrivateN => UPRIVATE + case _: UDeployIdN => UDEPLOY_ID + case _: UDeployerIdN => UDEPLOYER_ID + case _: USysAuthTokenN => SYS_AUTH_TOKEN + } + write(unfKind) *> write(p.v) + + /* Vars */ + case p: BoundVarN => write(BOUND_VAR) *> write(p.idx) + case p: FreeVarN => write(FREE_VAR) *> write(p.idx) + case p: ConnVarRefN => write(CONNECTIVE_VARREF) *> write(p.index) *> write(p.depth) + + /* Simple types */ + case _: ConnBoolN.type => write(CONNECTIVE_BOOL) + case _: ConnIntN.type => write(CONNECTIVE_INT) + case _: ConnBigIntN.type => write(CONNECTIVE_BIG_INT) + case _: ConnStringN.type => write(CONNECTIVE_STRING) + case _: ConnUriN.type => write(CONNECTIVE_URI) + case _: ConnByteArrayN.type => write(CONNECTIVE_BYTEARRAY) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case p: Operation1ParN => + val tag = p match { + case _: ENegN => ENEG + case _: ENotN => ENOT + } + write(tag) *> writePar(p.p) + + case p: BundleN => + write(BUNDLE) *> writePar(p.body) *> write(p.writeFlag) *> write(p.readFlag) + + /* Connective */ + case p: ConnNotN => write(CONNECTIVE_NOT) *> writePar(p.p) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case p: Operation2ParN => + val tag = p match { + case _: EPlusN => EPLUS + case _: EMinusN => EMINUS + case _: EMultN => EMULT + case _: EDivN => EDIV + case _: EModN => EMOD + case _: ELtN => ELT + case _: ELteN => ELTE + case _: EGtN => EGT + case _: EGteN => EGTE + case _: EEqN => EEQ + case _: ENeqN => ENEQ + case _: EAndN => EAND + case _: EShortAndN => ESHORTAND + case _: EOrN => EOR + case _: EShortOrN => ESHORTOR + case _: EPlusPlusN => EPLUSPLUS + case _: EMinusMinusN => EMINUSMINUS + case _: EPercentPercentN => EPERCENT + } + write(tag) *> writePar(p.p1) *> writePar(p.p2) + + case p: EMatchesN => write(EMATCHES) *> writePar(p.target) *> writePar(p.pattern) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case p: ParProcN => write(PARPROC) *> p.psSorted.flatMap(writeSeq) + + case p: SendN => write(SEND) *> writePar(p.chan) *> writeSeq(p.args) *> write(p.persistent) + + case p: ReceiveN => + write(RECEIVE) *> + p.bindsSorted.flatMap(writeSeq(_, writeReceiveBind)) *> + writePar(p.body) *> + write(p.persistent) *> + write(p.peek) *> + write(p.bindCount) + + case p: MatchN => write(MATCH) *> writePar(p.target) *> writeSeq(p.cases, writeMatchCase) + + case p: NewN => + write(NEW) *> + write(p.bindCount) *> + writePar(p.p) *> + p.urisSorted.flatMap(writeSeq) *> + p.injectionsSorted.flatMap(writeSeq(_, writeTuplePar)) + + /* Collections */ + case p: EListN => write(ELIST) *> writeSeq(p.ps) *> writeOpt(p.remainder) + case p: ETupleN => write(ETUPLE) *> writeSeq(p.ps) + case p: ESetN => write(ESET) *> p.psSorted.flatMap(writeSeq) *> writeOpt(p.remainder) + case p: EMapN => + write(EMAP) *> p.psSorted.flatMap(writeSeq(_, writeTuplePar)) *> writeOpt(p.remainder) + + /* Connective */ + case p: ConnAndN => write(CONNECTIVE_AND) *> writeSeq(p.ps) + case p: ConnOrN => write(CONNECTIVE_OR) *> writeSeq(p.ps) + + case eMethod: EMethodN => + write(EMETHOD) *> + writePar(eMethod.target) *> + write(eMethod.methodName) *> + writeSeq(eMethod.args) + + case p => throw new Exception(s"Unknown type `$p`") + } + } + + // TODO: Properly handle errors with return type (remove throw) + def deserialize(primitiveReader: PrimitiveReader[Eval]): Eval[ParN] = { + import primitiveReader._ + + def readBigInt: Eval[BigInt] = readBytes.map(BigInt(_)) + + // Reads a sequence + def readSeq[T](v: Eval[T]): Eval[Seq[T]] = readInt.flatMap(Seq.range(0, _).as(v).sequence) + + // Reads par object with all nested objects + def readPar: Eval[ParN] = readByte >>= matchPar + + // Reads sequence of pars + def readPars: Eval[Seq[ParN]] = readSeq(readPar) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readVar: Eval[VarN] = + readPar.map { + case v: VarN => v + case p => throw new Exception(s"Expected VarN, found `$p`") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readGString: Eval[GStringN] = + readPar.map { + case v: GStringN => v + case p => throw new Exception(s"Expected GStringN, found `$p`") + } + + def readVarOpt: Eval[Option[VarN]] = + readBool.flatMap(x => if (x) readVar.map(Some(_)) else Eval.now(none)) + + def readTuplePar: Eval[(ParN, ParN)] = (readPar, readPar).mapN((_, _)) + def readTupleStringPar: Eval[(GStringN, ParN)] = (readGString, readPar).mapN((_, _)) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readReceiveBind(tag: Byte): Eval[ReceiveBindN] = tag match { + case RECEIVE_BIND => (readPars, readPar, readVarOpt, readInt).mapN(ReceiveBindN(_, _, _, _)) + case _ => throw new Exception(s"Invalid tag `$tag` for ReceiveBindN deserialization") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def readMatchMCase(tag: Byte): Eval[MatchCaseN] = tag match { + case MATCH_CASE => (readPar, readPar, readInt).mapN(MatchCaseN(_, _, _)) + case _ => throw new Exception(s"Invalid tag `$tag` for matchMCase deserialization") + } + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def matchPar(tag: Byte): Eval[ParN] = tag match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case NIL => Eval.now(NilN) + case GBOOL => readBool.map(GBoolN(_)) + case GINT => readLong.map(GIntN(_)) + case GBIG_INT => readBigInt.map(GBigIntN(_)) + case GSTRING => readString.map(GStringN(_)) + case GBYTE_ARRAY => readBytes.map(GByteArrayN(_)) + case GURI => readString.map(GUriN(_)) + case WILDCARD => Eval.now(WildcardN) + + /* Unforgeable names */ + case UPRIVATE => readBytes.map(UPrivateN(_)) + case UDEPLOY_ID => readBytes.map(UDeployIdN(_)) + case UDEPLOYER_ID => readBytes.map(UDeployerIdN(_)) + // TODO: Temporary solution for easier conversion from old types - change type in the future + case SYS_AUTH_TOKEN => readBytes.as(USysAuthTokenN()) + + /* Vars */ + case BOUND_VAR => readInt.map(BoundVarN(_)) + case FREE_VAR => readInt.map(FreeVarN(_)) + case CONNECTIVE_VARREF => (readInt, readInt).mapN(ConnVarRefN(_, _)) + + /* Simple types */ + case CONNECTIVE_BOOL => Eval.now(ConnBoolN) + case CONNECTIVE_INT => Eval.now(ConnIntN) + case CONNECTIVE_BIG_INT => Eval.now(ConnBigIntN) + case CONNECTIVE_STRING => Eval.now(ConnStringN) + case CONNECTIVE_URI => Eval.now(ConnUriN) + case CONNECTIVE_BYTEARRAY => Eval.now(ConnByteArrayN) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case ENEG => readPar.map(ENegN(_)) + case ENOT => readPar.map(ENotN(_)) + + case BUNDLE => (readPar, readBool, readBool).mapN(BundleN(_, _, _)) + + /* Connective */ + case CONNECTIVE_NOT => readPar.map(ConnNotN(_)) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case EPLUS => (readPar, readPar).mapN(EPlusN(_, _)) + case EMINUS => (readPar, readPar).mapN(EMinusN(_, _)) + case EMULT => (readPar, readPar).mapN(EMultN(_, _)) + case EDIV => (readPar, readPar).mapN(EDivN(_, _)) + case EMOD => (readPar, readPar).mapN(EModN(_, _)) + case ELT => (readPar, readPar).mapN(ELtN(_, _)) + case ELTE => (readPar, readPar).mapN(ELteN(_, _)) + case EGT => (readPar, readPar).mapN(EGtN(_, _)) + case EGTE => (readPar, readPar).mapN(EGteN(_, _)) + case EEQ => (readPar, readPar).mapN(EEqN(_, _)) + case ENEQ => (readPar, readPar).mapN(ENeqN(_, _)) + case EAND => (readPar, readPar).mapN(EAndN(_, _)) + case ESHORTAND => (readPar, readPar).mapN(EShortAndN(_, _)) + case EOR => (readPar, readPar).mapN(EOrN(_, _)) + case ESHORTOR => (readPar, readPar).mapN(EShortOrN(_, _)) + case EPLUSPLUS => (readPar, readPar).mapN(EPlusPlusN(_, _)) + case EMINUSMINUS => (readPar, readPar).mapN(EMinusMinusN(_, _)) + case EPERCENT => (readPar, readPar).mapN(EPercentPercentN(_, _)) + + case EMATCHES => (readPar, readPar).mapN(EMatchesN(_, _)) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case PARPROC => readPars.map(ParProcN(_)) + + case SEND => (readPar, readPars, readBool).mapN(SendN(_, _, _)) + + case RECEIVE => + (readSeq(readByte >>= readReceiveBind), readPar, readBool, readBool, readInt) + .mapN(ReceiveN(_, _, _, _, _)) + + case MATCH => (readPar, readSeq(readByte >>= readMatchMCase)).mapN(MatchN(_, _)) + + case NEW => + (readInt, readPar, readSeq(readGString), readSeq(readTupleStringPar)).mapN(NewN(_, _, _, _)) + + /* Collections */ + case ELIST => (readPars, readVarOpt).mapN(EListN(_, _)) + case ETUPLE => readPars.map(ETupleN(_)) + case ESET => (readPars, readVarOpt).mapN(ESetN(_, _)) + case EMAP => (readSeq(readTuplePar), readVarOpt).mapN(EMapN(_, _)) + + /* Connective */ + case CONNECTIVE_AND => readPars.map(ConnAndN(_)) + case CONNECTIVE_OR => readPars.map(ConnOrN(_)) + + case EMETHOD => (readPar, readString, readPars).mapN(EMethodN(_, _, _)) + + case _ => throw new Exception(s"Invalid tag `$tag` for ParN deserialization") + } + + readPar + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/SerializedSize.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/SerializedSize.scala new file mode 100644 index 00000000000..0adb228f6b6 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/SerializedSize.scala @@ -0,0 +1,136 @@ +package io.rhonix.rholang.parmanager + +import cats.Eval +import cats.syntax.all.* +import com.google.protobuf.CodedOutputStream +import io.rhonix.rholang.types.* + +import scala.annotation.unused + +private object ProtobufSerializedSize { + import Constants.* + + // Terminal expressions + def sSize(bytes: Array[Byte]): Eval[Int] = + Eval.later(CodedOutputStream.computeByteArraySizeNoTag(bytes)) + def sSize(@unused v: Boolean): Eval[Int] = Eval.now(booleanSize) + def sSize(v: Int): Eval[Int] = Eval.later(CodedOutputStream.computeInt32SizeNoTag(v)) + def sSize(v: Long): Eval[Int] = Eval.later(CodedOutputStream.computeInt64SizeNoTag(v)) + def sSize(v: String): Eval[Int] = Eval.later(CodedOutputStream.computeStringSizeNoTag(v)) + def sSize(v: BigInt): Eval[Int] = sSize(v.toByteArray) + + // Recursive traversal with memoization of serialized size on children objects + def sSize(x: RhoTypeN): Eval[Int] = x.serializedSize + + // Recursive traversal of a sequence with memoization of serialized size on children objects + def sSize(ps: Seq[RhoTypeN]): Eval[Int] = sSizeSeq[RhoTypeN](ps, sSize) + + def sSize(kv: (RhoTypeN, RhoTypeN)): Eval[Int] = + kv.bimap(sSize, sSize).mapN(_ + _) + + def sSize(pOpt: Option[RhoTypeN]): Eval[Int] = + (Eval.now(booleanSize), pOpt.traverse(sSize)).mapN(_ + _.getOrElse(0)) + + def sSizeSeqTuplePar(seq: Seq[(RhoTypeN, RhoTypeN)]): Eval[Int] = + sSizeSeq[(RhoTypeN, RhoTypeN)](seq, sSize) + + def totalSize(sizes: Int*): Int = tagSize + sizes.sum + + // Calculates serialized size of a sequence (the sum of element sizes) + def sSizeSeq[T](seq: Seq[T], f: T => Eval[Int]): Eval[Int] = + (sSize(seq.size), seq.traverse(f).map(_.sum)).mapN(_ + _) +} + +private[parmanager] object SerializedSize { + import ProtobufSerializedSize._ + + def sSizeReceiveBind(p: ReceiveBindN): Eval[Int] = + (sSize(p.patterns), sSize(p.source), sSize(p.remainder), sSize(p.freeCount)) + .mapN(totalSize(_, _, _, _)) + + def sSizeMatchCase(p: MatchCaseN): Eval[Int] = + (sSize(p.pattern), sSize(p.source), sSize(p.freeCount)).mapN(totalSize(_, _, _)) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def calcSerSize(input: RhoTypeN): Eval[Int] = Eval.defer { + input match { + + /* Terminal expressions (0-arity constructors) */ + /* =========================================== */ + + case _: NilN.type => Eval.now(totalSize()) + case p: GBoolN => sSize(p.v).map(totalSize(_)) + case p: GIntN => sSize(p.v).map(totalSize(_)) + case p: GBigIntN => sSize(p.v).map(totalSize(_)) + case p: GStringN => sSize(p.v).map(totalSize(_)) + case p: GByteArrayN => sSize(p.v).map(totalSize(_)) + case p: GUriN => sSize(p.v).map(totalSize(_)) + case _: WildcardN.type => Eval.now(totalSize()) + + /* Unforgeable names */ + case p: UnforgeableN => sSize(p.v).map(totalSize(_)) + + /* Vars */ + case p: BoundVarN => sSize(p.idx).map(totalSize(_)) + case p: FreeVarN => sSize(p.idx).map(totalSize(_)) + case p: ConnVarRefN => (sSize(p.index), sSize(p.depth)).mapN(totalSize(_, _)) + + /* Simple types */ + case _: ConnectiveSTypeN => Eval.now(totalSize()) + + /* Unary expressions (1-arity constructors) */ + /* ======================================== */ + + case p: Operation1ParN => sSize(p.p).map(totalSize(_)) + + case p: BundleN => + (sSize(p.body), sSize(p.writeFlag), sSize(p.readFlag)).mapN(totalSize(_, _, _)) + + /* Connective */ + case p: ConnNotN => sSize(p.p).map(totalSize(_)) + + /* Binary expressions (2-arity constructors) */ + /* ========================================= */ + + case p: Operation2ParN => (sSize(p.p1), sSize(p.p2)).mapN(totalSize(_, _)) + + case p: EMatchesN => + (sSize(p.target), sSize(p.pattern)).mapN(totalSize(_, _)) + + /* N-ary parameter expressions (N-arity constructors) */ + /* ================================================== */ + + case p: ParProcN => sSize(p.ps).map(totalSize(_)) + + case p: SendN => (sSize(p.chan), sSize(p.args), sSize(p.persistent)).mapN(totalSize(_, _, _)) + + case p: ReceiveN => + val bindsSize = p.binds.traverse(sSizeReceiveBind).map(totalSize) + (bindsSize, sSize(p.body), sSize(p.persistent), sSize(p.peek), sSize(p.bindCount)) + .mapN(totalSize(_, _, _, _, _)) + + case p: MatchN => + val casesSize = p.cases.traverse(sSizeMatchCase).map(totalSize) + (sSize(p.target), casesSize).mapN(totalSize(_, _)) + + case p: NewN => + (sSize(p.bindCount), sSize(p.p), sSize(p.uri), sSizeSeqTuplePar(p.injections.toSeq)) + .mapN(totalSize(_, _, _, _)) + + /* Collections */ + case p: EListN => (sSize(p.ps), sSize(p.remainder)).mapN(totalSize(_, _)) + case p: ETupleN => sSize(p.ps).map(totalSize(_)) + case p: ESetN => (sSize(p.ps.toSeq), sSize(p.remainder)).mapN(totalSize(_, _)) + case p: EMapN => (sSizeSeqTuplePar(p.ps.toSeq), sSize(p.remainder)).mapN(totalSize(_, _)) + + /* Connective */ + case p: ConnAndN => sSize(p.ps).map(totalSize(_)) + case p: ConnOrN => sSize(p.ps).map(totalSize(_)) + + case p: EMethodN => + (sSize(p.methodName), sSize(p.target), sSize(p.args)).mapN(totalSize(_, _, _)) + + case p => throw new Exception(s"Undefined type $p") + } + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/parmanager/SubstituteRequired.scala b/rholang/src/main/scala/io/rhonix/rholang/parmanager/SubstituteRequired.scala new file mode 100644 index 00000000000..23104f80bcf --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/parmanager/SubstituteRequired.scala @@ -0,0 +1,59 @@ +package io.rhonix.rholang.parmanager + +import io.rhonix.rholang.types.* + +object SubstituteRequired { + def sReq(p: RhoTypeN): Boolean = p.substituteRequired + def sReq(kv: (RhoTypeN, RhoTypeN)): Boolean = kv._1.substituteRequired || kv._2.substituteRequired + def sReq(ps: Seq[RhoTypeN]): Boolean = ps.exists(sReq) + def sReqKVPairs(kVPairs: Seq[(RhoTypeN, RhoTypeN)]): Boolean = kVPairs.exists(sReq) + def sReqReceiveBind(p: ReceiveBindN): Boolean = sReq(p.patterns) || sReq(p.source) + def sReqMatchCase(p: MatchCaseN): Boolean = sReq(p.pattern) || sReq(p.source) + + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def substituteRequiredFn(input: RhoTypeN): Boolean = input match { + + /** Basic types */ + case _: NilN.type => false + case p: ParProcN => sReq(p.ps) + case p: SendN => sReq(p.chan) || sReq(p.args) + case p: ReceiveN => p.binds.exists(sReqReceiveBind) || sReq(p.body) + case p: MatchN => sReq(p.target) || p.cases.exists(sReqMatchCase) + case p: NewN => sReq(p.p) + + /** Ground types */ + case _: GroundN => false + + /** Collections */ + case p: EListN => sReq(p.ps) + case p: ETupleN => sReq(p.ps) + case p: ESetN => sReq(p.ps.toSeq) + case p: EMapN => sReqKVPairs(p.ps.toSeq) + + /** Vars */ + case _: BoundVarN => true + case _: FreeVarN => false + case _: WildcardN.type => false + + /** Operations */ + case p: Operation1ParN => sReq(p.p) + case p: Operation2ParN => sReq(p.p1) || sReq(p.p2) + case p: EMethodN => sReq(p.target) || sReq(p.args) + case p: EMatchesN => sReq(p.target) || sReq(p.pattern) + + /** Unforgeable names */ + case _: UnforgeableN => false + + /** Connective */ + case _: ConnectiveSTypeN => false + case p: ConnNotN => sReq(p.p) + case p: ConnAndN => sReq(p.ps) + case p: ConnOrN => sReq(p.ps) + case _: ConnVarRefN => true + + /** Other types */ + case p: BundleN => sReq(p.body) + + case p => throw new Exception(s"Undefined type $p") + } +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Basic.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Basic.scala new file mode 100644 index 00000000000..9093d685de9 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Basic.scala @@ -0,0 +1,183 @@ +package io.rhonix.rholang.types + +import cats.Eval +import cats.syntax.all.* +import io.rhonix.rholang.parmanager.RhoHash +import io.rhonix.rholang.types.ParN.* + +object NilN extends BasicN + +/** * + * Rholang process + * + * For example, `@0!(1) | @2!(3) | for(x <- @0) { Nil }` has two sends + * and one receive. + */ +final class ParProcN(val ps: Seq[ParN]) extends BasicN { + // Sorted by the hash of the objects which is memoized as part of Rho type + val psSorted: Eval[Seq[ParN]] = this.ps.sortByBytes(_.rhoHash).memoize +} + +object ParProcN { def apply(ps: Seq[ParN]): ParProcN = new ParProcN(ps) } + +/** * + * A send is written `chan!(data)` or `chan!!(data)` for a persistent send. + * Upon send, all free variables in data are substituted with their values. + */ +final class SendN(val chan: ParN, val args: Seq[ParN], val persistent: Boolean) extends BasicN +object SendN { + def apply(chan: ParN, args: Seq[ParN], persistent: Boolean): SendN = + new SendN(chan, args, persistent) + + def apply(chan: ParN, args: Seq[ParN]): SendN = + apply(chan, args, persistent = false) + + def apply(chan: ParN, args: ParN, persistent: Boolean): SendN = + apply(chan, Seq(args), persistent) + + def apply(chan: ParN, arg: ParN): SendN = + apply(chan, Seq(arg), persistent = false) +} + +/** * + * A receive is written `for(binds) { body }` + * i.e. `for(patterns <- source) { body }` + * or for a persistent recieve: `for(patterns <= source) { body }`. + * + * It's an error for free Variable to occur more than once in a pattern. + */ +final class ReceiveN( + val binds: Seq[ReceiveBindN], + val body: ParN, + val persistent: Boolean, + val peek: Boolean, + val bindCount: Int +) extends BasicN { + // Sorted by the hash of the objects which is memoized as part of Rho type + val bindsSorted: Eval[Seq[ReceiveBindN]] = this.binds.sortByBytes(_.rhoHash).memoize +} + +object ReceiveN { + def apply( + binds: Seq[ReceiveBindN], + body: ParN, + persistent: Boolean, + peek: Boolean, + bindCount: Int + ): ReceiveN = + new ReceiveN(binds, body, persistent, peek, bindCount) + + def apply( + bind: ReceiveBindN, + body: ParN, + persistent: Boolean, + peek: Boolean, + bindCount: Int + ): ReceiveN = + apply(Seq(bind), body, persistent, peek, bindCount) + + def apply(binds: Seq[ReceiveBindN], body: ParN, bindCount: Int): ReceiveN = + apply(binds, body, persistent = false, peek = false, bindCount) + + def apply(bind: ReceiveBindN, body: ParN, bindCount: Int): ReceiveN = + apply(Seq(bind), body, bindCount) +} + +final class ReceiveBindN( + val patterns: Seq[ParN], + val source: ParN, + val remainder: Option[VarN], + val freeCount: Int +) { + + /** Cryptographic hash code of this object */ + val rhoHash: Eval[Array[Byte]] = RhoHash.hashReceiveBind(this).memoize +} + +object ReceiveBindN { + def apply( + patterns: Seq[ParN], + source: ParN, + remainder: Option[VarN], + freeCount: Int + ): ReceiveBindN = new ReceiveBindN(patterns, source, remainder, freeCount) + + def apply(pattern: ParN, source: ParN, remainder: Option[VarN], freeCount: Int): ReceiveBindN = + apply(Seq(pattern), source, remainder, freeCount) + + def apply(patterns: Seq[ParN], source: ParN, freeCount: Int): ReceiveBindN = + new ReceiveBindN(patterns, source, None, freeCount) + + def apply(pattern: ParN, source: ParN, freeCount: Int): ReceiveBindN = + apply(Seq(pattern), source, freeCount) + + def apply(pattern: ParN, source: ParN): ReceiveBindN = + apply(Seq(pattern), source, 0) +} + +final class MatchN(val target: ParN, val cases: Seq[MatchCaseN]) extends BasicN + +object MatchN { + def apply(target: ParN, cases: Seq[MatchCaseN]): MatchN = new MatchN(target, cases) + def apply(target: ParN, mCase: MatchCaseN): MatchN = apply(target, Seq(mCase)) +} + +final class MatchCaseN(val pattern: ParN, val source: ParN, val freeCount: Int) { + + /** Cryptographic hash code of this object */ + val rhoHash: Eval[Array[Byte]] = RhoHash.hashMatchCase(this).memoize +} + +object MatchCaseN { + def apply(pattern: ParN, source: ParN, freeCount: Int = 0): MatchCaseN = + new MatchCaseN(pattern, source, freeCount) +} + +/** + * The new construct serves as a variable binder with scope Proc which producesan unforgeable process + * for each uniquely declared variable and substitutes these (quoted) processes for the variables. + * + * @param bindCount Total number of variables entered in p. This makes it easier to substitute or walk a term. + * @param p Rholang executable code inside New. + * For normalized form, p should not contain solely another new. + * Also for normalized form, the first use should be level+0, next use level+1 + * up to level+count for the last used variable. + * @param uri List of names Rho built-in processes listening on channels (e.g. `rho:io:stdout`). + * For normalization, uri-referenced variables come at the end, and in lexicographical order. + * @param injections List of injected uri-referenced variables (e.g. rho:rchain:deployId). + * Should be sort by key in lexicographical order. + */ +final class NewN( + val bindCount: Int, + val p: ParN, + val uri: Seq[GStringN], + val injections: Map[GStringN, ParN] +) extends BasicN { + // Sorted by the hash of the objects which is memoized as part of Rho type + val urisSorted: Eval[Seq[GStringN]] = this.uri.sortByBytes(_.rhoHash).memoize + + // Sorted by the hash of the objects which is memoized as part of Rho type + val injectionsSorted: Eval[Seq[(GStringN, ParN)]] = + this.injections.toSeq.sortByBytes(_.bimap(_.rhoHash, _.rhoHash).mapN(_ ++ _)).memoize + + def injectionsStrKeys: Map[String, ParN] = this.injections.map(_.bimap(_.v, identity)) +} + +object NewN { + def apply( + bindCount: Int, + p: ParN, + uri: Seq[String], + injections: Map[String, ParN] + ): NewN = + new NewN(bindCount, p, uri.map(GStringN(_)), injections.map(_.bimap(GStringN(_), identity))) + + def apply( + bindCount: Int, + p: ParN, + uri: Seq[GStringN], + injections: Seq[(GStringN, ParN)] + ): NewN = new NewN(bindCount, p, uri, injections.toMap) + + def apply(bindCount: Int, p: ParN): NewN = new NewN(bindCount, p, Seq(), Map()) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Collection.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Collection.scala new file mode 100644 index 00000000000..94fa6579815 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Collection.scala @@ -0,0 +1,113 @@ +package io.rhonix.rholang.types + +import cats.Eval + +import scala.collection.immutable.{TreeMap, TreeSet} + +/** + * Ordered collection of 0 or more processes. + * @param ps The sequence of any Rholang processes + * @param remainder gives support to use ... in the list construction and deconstruction e.g. [1, 2, 3 ... rest]. + * It's defined as optional variable. + */ +final class EListN private (val ps: Seq[ParN], val remainder: Option[VarN]) extends CollectionN { + def :+(elem: ParN): EListN = EListN(ps :+ elem, remainder) + def +:(elem: ParN): EListN = EListN(elem +: ps, remainder) + def ++(elems: Seq[ParN]): EListN = EListN(ps ++ elems, None) + def ++(that: EListN): EListN = EListN(ps ++ that.ps, None) +} + +object EListN { + def apply(ps: Seq[ParN] = Seq(), r: Option[VarN] = None): EListN = new EListN(ps, r) + def apply(p: ParN): EListN = apply(Seq(p), None) + def empty: EListN = EListN() +} + +/** + * Ordered collection of 1 or more processes. + * @param ps The non-empty sequence of any Rholang processes + */ +final class ETupleN private (val ps: Seq[ParN]) extends CollectionN + +object ETupleN { + @SuppressWarnings(Array("org.wartremover.warts.Throw")) + def apply(ps: Seq[ParN]): ETupleN = + if (ps.isEmpty) throw new Exception("Cannot create ETuple with an empty par sequence") + else new ETupleN(ps) + def apply(p: ParN): ETupleN = apply(Seq(p)) +} + +/** + * A Rholang set is an unordered collection of 0 or more processes. + * @param ps The sequence of any Rholang processes + * @param remainder gives support to use ... in the set construction and deconstruction e.g. Set(1, 2, 3 ... rest). + * It's defined as optional variable. + */ +final class ESetN private (val ps: TreeSet[ParN], val remainder: Option[VarN]) extends CollectionN { + // TreeSet is sorted internally by the ParN hash + val psSorted: Eval[Seq[ParN]] = Eval.later(this.ps.toSeq).memoize + + def +(elem: ParN): ESetN = ESetN(ps + elem, remainder) + def -(elem: ParN): ESetN = ESetN(ps - elem, remainder) + + def ++(elems: Seq[ParN]): ESetN = ESetN(ps ++ elems, None) + def --(elems: Seq[ParN]): ESetN = ESetN(ps -- elems, None) + + def ++(that: ESetN): ESetN = ESetN(ps ++ that.ps, None) + def --(that: ESetN): ESetN = ESetN(ps -- that.ps, None) + + def contains(elem: ParN): Boolean = ps.contains(elem) +} + +object ESetN { + def apply(): ESetN = new ESetN(TreeSet.empty(ParN.ordering), None) + def apply(ps: Seq[ParN], r: Option[VarN] = None): ESetN = + new ESetN(TreeSet.from(ps)(ParN.ordering), r) + def apply(p: ParN): ESetN = ESetN(Seq(p), None) + def empty: ESetN = ESetN() + private def apply(ps: TreeSet[ParN], remainder: Option[VarN]): ESetN = new ESetN(ps, remainder) +} + +/** + * A Rholang map is an unordered collection of 0 or more key-value pairs; both keys and values are processes. + * @param ps The sequence of any Rholang processes (that form key-value pairs) + * @param remainder gives support to use ... in the set construction and deconstruction e.g. {"a":1, "b":2 ... rest}. + * It's defined as optional variable. + */ +final class EMapN private (val ps: TreeMap[ParN, ParN], val remainder: Option[VarN]) + extends CollectionN { + // TreeMap is sorted internally by the ParN hash + val psSorted: Eval[Seq[(ParN, ParN)]] = Eval.later(ps.toSeq).memoize + + def +(kv: (ParN, ParN)): EMapN = EMapN(ps + kv, remainder) + def -(key: ParN): EMapN = EMapN(ps - key, remainder) + + def ++(kvs: Seq[(ParN, ParN)]): EMapN = EMapN(ps ++ kvs, None) + def --(keys: Iterable[ParN]): EMapN = EMapN(ps -- keys, None) + + def ++(that: EMapN): EMapN = EMapN(ps ++ that.ps, None) + def --(that: EMapN): EMapN = EMapN(ps -- that.keys, None) + + def contains(p: ParN): Boolean = ps.contains(p) + def get(key: ParN): Option[ParN] = ps.get(key) + def getOrElse(key: ParN, default: ParN): ParN = ps.getOrElse(key, default) + + def keys: Seq[ParN] = ps.keys.toSeq + def values: Seq[ParN] = ps.values.toSeq +} + +object EMapN { + def apply(ps: Seq[(ParN, ParN)], r: Option[VarN]): EMapN = + new EMapN(TreeMap.from(ps)(ParN.ordering), r) + def apply(ps: Seq[(ParN, ParN)]): EMapN = apply(ps, None) + + def apply(ps: Map[ParN, ParN], r: Option[VarN]): EMapN = + new EMapN(TreeMap.from(ps)(ParN.ordering), r) + def apply(ps: Map[ParN, ParN]): EMapN = apply(ps, None) + + def apply(): EMapN = apply(Seq()) + def empty: EMapN = EMapN() + + private def apply(ps: TreeMap[ParN, ParN], remainder: Option[VarN]): EMapN = + new EMapN(ps, remainder) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Connective.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Connective.scala new file mode 100644 index 00000000000..af6a4ace8c4 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Connective.scala @@ -0,0 +1,54 @@ +package io.rhonix.rholang.types + +/** Connective for type Bool in pattern */ +object ConnBoolN extends ConnectiveSTypeN + +/** Connective for type Int in pattern */ +object ConnIntN extends ConnectiveSTypeN + +/** Connective for type BigInt in pattern */ +object ConnBigIntN extends ConnectiveSTypeN + +/** Connective for type String in pattern */ +object ConnStringN extends ConnectiveSTypeN + +/** Connective for type Uri in pattern */ +object ConnUriN extends ConnectiveSTypeN + +/** Connective for type ByteArray in pattern */ +object ConnByteArrayN extends ConnectiveSTypeN + +/** The "~" (logical Not) for pattern matching. + * the pattern ~p says "anything but p" */ +final class ConnNotN(val p: ParN) extends ConnectiveFuncN +object ConnNotN { def apply(p: ParN): ConnNotN = new ConnNotN(p) } + +/** The "/\" (logical And) Conjunction for pattern matching. */ +// TODO: Consider a replacement `ps: Seq[ParN]` to `p1: ParN, p2: ParN` +final class ConnAndN(val ps: Seq[ParN]) extends ConnectiveFuncN +object ConnAndN { + def apply(ps: Seq[ParN]): ConnAndN = new ConnAndN(ps) + def apply(p1: ParN, p2: ParN): ConnAndN = new ConnAndN(Seq(p1, p2)) +} + +/** The "\/" (logical Or) Disjunction for pattern matching. */ +// TODO: Consider a replacement `ps: Seq[ParN]` to `p1: ParN, p2: ParN` +final class ConnOrN(val ps: Seq[ParN]) extends ConnectiveFuncN +object ConnOrN { + def apply(ps: Seq[ParN]): ConnOrN = new ConnOrN(ps) + def apply(p1: ParN, p2: ParN): ConnOrN = new ConnOrN(Seq(p1, p2)) +} + +/** The "=..." Binding for Bound variable in pattern matching. + * The purpose of VarRef is to provide a mechanism to bind variables to values or processes + * within pattern matching structures in Rholang, which is useful for controlling the flow of information + * and processes within a Rholang program. + * E.g.: + * match someProc { =x => x!(*someChannel) } + * or + * for(@{=*x} <- someChannel) { x!(*someOtherChannel) } + */ +final class ConnVarRefN(val index: Int, val depth: Int) extends ConnectiveVarN +object ConnVarRefN { + def apply(index: Int, depth: Int): ConnVarRefN = new ConnVarRefN(index, depth) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Ground.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Ground.scala new file mode 100644 index 00000000000..ce1228bf83d --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Ground.scala @@ -0,0 +1,21 @@ +package io.rhonix.rholang.types + +final class GBoolN(val v: Boolean) extends GroundN +object GBoolN { def apply(v: Boolean): GBoolN = new GBoolN(v) } + +final class GIntN(val v: Long) extends GroundN +object GIntN { def apply(v: Long): GIntN = new GIntN(v) } + +final class GBigIntN(val v: BigInt) extends GroundN +object GBigIntN { def apply(v: BigInt): GBigIntN = new GBigIntN(v) } + +final class GStringN(val v: String) extends GroundN +object GStringN { def apply(v: String): GStringN = new GStringN(v) } + +final class GByteArrayN(val v: Array[Byte]) extends GroundN +object GByteArrayN { + def apply(bytes: Array[Byte]): GByteArrayN = new GByteArrayN(bytes) +} + +final class GUriN(val v: String) extends GroundN +object GUriN { def apply(v: String): GUriN = new GUriN(v) } diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Operation.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Operation.scala new file mode 100644 index 00000000000..864161e9ae6 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Operation.scala @@ -0,0 +1,86 @@ +package io.rhonix.rholang.types + +final class ENegN(private val input: ParN) extends Operation1ParN { + override val p: ParN = input +} +object ENegN { def apply(p: ParN): ENegN = new ENegN(p) } + +final class ENotN(private val input: ParN) extends Operation1ParN { + override val p: ParN = input +} +object ENotN { def apply(p: ParN): ENotN = new ENotN(p) } + +final class EPlusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EPlusN { def apply(p1: ParN, p2: ParN): EPlusN = new EPlusN(p1, p2) } + +final class EMinusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EMinusN { def apply(p1: ParN, p2: ParN): EMinusN = new EMinusN(p1, p2) } + +final class EMultN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EMultN { def apply(p1: ParN, p2: ParN): EMultN = new EMultN(p1, p2) } + +final class EDivN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EDivN { def apply(p1: ParN, p2: ParN): EDivN = new EDivN(p1, p2) } + +final class EModN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EModN { def apply(p1: ParN, p2: ParN): EModN = new EModN(p1, p2) } + +final class ELtN(val p1: ParN, val p2: ParN) extends Operation2ParN +object ELtN { def apply(p1: ParN, p2: ParN): ELtN = new ELtN(p1, p2) } + +final class ELteN(val p1: ParN, val p2: ParN) extends Operation2ParN +object ELteN { def apply(p1: ParN, p2: ParN): ELteN = new ELteN(p1, p2) } + +final class EGtN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EGtN { def apply(p1: ParN, p2: ParN): EGtN = new EGtN(p1, p2) } + +final class EGteN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EGteN { def apply(p1: ParN, p2: ParN): EGteN = new EGteN(p1, p2) } + +final class EEqN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EEqN { def apply(p1: ParN, p2: ParN): EEqN = new EEqN(p1, p2) } + +final class ENeqN(val p1: ParN, val p2: ParN) extends Operation2ParN +object ENeqN { def apply(p1: ParN, p2: ParN): ENeqN = new ENeqN(p1, p2) } + +final class EAndN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EAndN { def apply(p1: ParN, p2: ParN): EAndN = new EAndN(p1, p2) } + +final class EShortAndN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EShortAndN { def apply(p1: ParN, p2: ParN): EShortAndN = new EShortAndN(p1, p2) } + +final class EOrN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EOrN { def apply(p1: ParN, p2: ParN): EOrN = new EOrN(p1, p2) } + +final class EShortOrN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EShortOrN { def apply(p1: ParN, p2: ParN): EShortOrN = new EShortOrN(p1, p2) } + +final class EPlusPlusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EPlusPlusN { def apply(p1: ParN, p2: ParN): EPlusPlusN = new EPlusPlusN(p1, p2) } + +final class EMinusMinusN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EMinusMinusN { def apply(p1: ParN, p2: ParN): EMinusMinusN = new EMinusMinusN(p1, p2) } + +final class EPercentPercentN(val p1: ParN, val p2: ParN) extends Operation2ParN +object EPercentPercentN { + def apply(p1: ParN, p2: ParN): EPercentPercentN = new EPercentPercentN(p1, p2) +} + +final class EMethodN(val target: ParN, val methodName: String, val args: Seq[ParN]) + extends OperationOtherN + +object EMethodN { + def apply(target: ParN, methodName: String, args: Seq[ParN] = Seq()): EMethodN = + new EMethodN(target, methodName, args) + def apply(target: ParN, methodName: String, arg: ParN): EMethodN = + new EMethodN(target, methodName, Seq(arg)) +} + +/** + * The p matches q expression is similar to: + * match p { q -> true; _ -> false } + */ +final class EMatchesN(val target: ParN, val pattern: ParN) extends OperationOtherN +object EMatchesN { + def apply(target: ParN, pattern: ParN): EMatchesN = new EMatchesN(target, pattern) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Other.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Other.scala new file mode 100644 index 00000000000..4a28923d3b3 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Other.scala @@ -0,0 +1,22 @@ +package io.rhonix.rholang.types + +/** * + * Nothing can be received from a (quoted) bundle with `readFlag = false`. + * Likeise nothing can be sent to a (quoted) bundle with `writeFlag = false`. + * + * If both flags are set to false, bundle allows only for equivalance check. + * + * @param writeFlag flag indicating whether bundle is writeable + * @param readFlag flag indicating whether bundle is readable + */ +final class BundleN(val body: ParN, val writeFlag: Boolean, val readFlag: Boolean) extends OtherN { + def merge(other: BundleN): BundleN = { + val wFlag = writeFlag && other.writeFlag + val rFlag = readFlag && other.readFlag + BundleN(other.body, wFlag, rFlag) + } +} +object BundleN { + def apply(body: ParN, writeFlag: Boolean, readFlag: Boolean): BundleN = + new BundleN(body, writeFlag, readFlag) +} diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/RhoType.scala b/rholang/src/main/scala/io/rhonix/rholang/types/RhoType.scala new file mode 100644 index 00000000000..4df7877c1e9 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/RhoType.scala @@ -0,0 +1,125 @@ +package io.rhonix.rholang.types + +import cats.Eval +import cats.syntax.all.* +import io.rhonix.rholang.parmanager.Manager.* +import sdk.syntax.all.* + +import scala.util.hashing.MurmurHash3 + +/** Base trait for Rholang elements in the Reducer */ +sealed trait RhoTypeN { + + /** Cryptographic hash code of this object */ + val rhoHash: Eval[Array[Byte]] = rhoHashFn(this).memoize + + /** The size of serialized bytes lazily evaluated with memoization */ + val serializedSize: Eval[Int] = serializedSizeFn(this).memoize + + /** Serialized bytes lazily evaluated with memoization */ + val serialized: Eval[Array[Byte]] = serializedFn(this, memoizeChildren = false).memoize + + /** True if the object or at least one of the nested objects non-concrete. + * Such a object cannot be viewed as if it were a term.*/ + // TODO: Rename connectiveUsed for more clarity + val connectiveUsed: Eval[Boolean] = connectiveUsedFn(this).memoize + + /** True if the object or at least one of the nested objects can be evaluated in Reducer */ + lazy val evalRequired: Boolean = evalRequiredFn(this) + + /** True if the object or at least one of the nested objects can be substituted in Reducer */ + lazy val substituteRequired: Boolean = substituteRequiredFn(this) + + override def equals(other: Any): Boolean = other match { + case x: RhoTypeN => this.rhoHash.value sameElements x.rhoHash.value + case _ => false + } + + // Have to hash bytes since this.rhoHash.value.hashCode is different for two copies of the same array + // TODO Maybe this.rhoHash.value.toList.hashCode is more effective + private lazy val hCode = MurmurHash3.arrayHash(this.rhoHash.value) + override def hashCode(): Int = hCode +} + +/** Rholang element that can be processed in parallel, together with other elements */ +sealed trait ParN extends RhoTypeN + +object ParN { + + /** + * Create a flatten parallel Par (ParProc) from par sequence. + * See [[flattedPProc]] for more information. + */ + def makeParProc(ps: Seq[ParN]): ParN = flattedPProc(ps) + + /** Combine two pars for their parallel execution */ + def combine(p1: ParN, p2: ParN): ParN = combinePars(p1, p2) + + def compare(p1: ParN, p2: ParN): Int = + java.util.Arrays.compare(p1.rhoHash.value, p2.rhoHash.value) + val ordering: Ordering[ParN] = (p1: ParN, p2: ParN) => compare(p1, p2) + + implicit class SequenceHelpers[T](val seq: Seq[T]) extends AnyVal { + + /** Sorts the sequence by the selected byte array and its defined `Ordering` instance. */ + @inline def sortByBytes(f: T => Eval[Array[Byte]]): Eval[Seq[T]] = + seq + .traverse(t => f(t).map((t, _))) + .map(_.sortBy { case (_, hash) => hash }.map { case (t, _) => t }) + } +} + +/** Basic rholang operations that can be executed in parallel*/ +trait BasicN extends ParN + +/** Expressions included in Rholang elements */ +sealed trait ExprN extends ParN + +/** Base types for Rholang expressions */ +trait GroundN extends ExprN + +/** Rholang collections */ +trait CollectionN extends ExprN + +/** Variables in Rholang (can be bound, free and wildcard) */ +trait VarN extends ExprN + +/** Operations in Rholang */ +sealed trait OperationN extends ExprN + +/** Operation with one par */ +trait Operation1ParN extends OperationN { + val p: ParN +} + +/** Operation with two par */ +trait Operation2ParN extends OperationN { + val p1: ParN + val p2: ParN +} + +/** Other operations (e.g. method) */ +trait OperationOtherN extends OperationN + +/** Rholang unforgeable names (stored in internal environment map) */ +trait UnforgeableN extends ParN { + val v: Array[Byte] +} + +/** + * Connectives (bindings) are used in patterns to combine several conditions together or + * to set a pattern with some specific Rholang type or variables. + * */ +trait ConnectiveN extends ParN + +/** Connectives for simple types */ +trait ConnectiveSTypeN extends ConnectiveN + +/** Connectives for truth-functional operators */ +trait ConnectiveFuncN extends ConnectiveN + +/** Connectives for variables */ +trait ConnectiveVarN extends ConnectiveN + +/** Other types that can't be categorized */ +trait OtherN extends ParN diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Unforgeable.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Unforgeable.scala new file mode 100644 index 00000000000..e3d75a76661 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Unforgeable.scala @@ -0,0 +1,14 @@ +package io.rhonix.rholang.types + +final class UPrivateN(val v: Array[Byte]) extends UnforgeableN +object UPrivateN { def apply(bytes: Array[Byte]): UPrivateN = new UPrivateN(bytes) } + +final class UDeployIdN(val v: Array[Byte]) extends UnforgeableN +object UDeployIdN { def apply(bytes: Array[Byte]): UDeployIdN = new UDeployIdN(bytes) } + +final class UDeployerIdN(val v: Array[Byte]) extends UnforgeableN +object UDeployerIdN { def apply(bytes: Array[Byte]): UDeployerIdN = new UDeployerIdN(bytes) } + +// TODO: Temporary solution for easier conversion from old types - change type in the future +final class USysAuthTokenN(val v: Array[Byte]) extends UnforgeableN +object USysAuthTokenN { def apply(): USysAuthTokenN = new USysAuthTokenN(Array[Byte]()) } diff --git a/rholang/src/main/scala/io/rhonix/rholang/types/Var.scala b/rholang/src/main/scala/io/rhonix/rholang/types/Var.scala new file mode 100644 index 00000000000..70e34bc4253 --- /dev/null +++ b/rholang/src/main/scala/io/rhonix/rholang/types/Var.scala @@ -0,0 +1,9 @@ +package io.rhonix.rholang.types + +final class BoundVarN(val idx: Int) extends VarN +object BoundVarN { def apply(value: Int): BoundVarN = new BoundVarN(value) } + +final class FreeVarN(val idx: Int) extends VarN +object FreeVarN { def apply(value: Int): FreeVarN = new FreeVarN(value) } + +object WildcardN extends VarN diff --git a/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagDataSyntax.scala b/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagDataSyntax.scala index d749f22233e..ae8c29d0df1 100644 --- a/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagDataSyntax.scala +++ b/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagDataSyntax.scala @@ -1,7 +1,7 @@ package coop.rchain.sdk.casper.syntax import coop.rchain.sdk.dag.data.DagData -import coop.rchain.sdk.dag.syntax._ +import coop.rchain.sdk.dag.syntax.all._ /** * Casper specific extensions for DagData operations. diff --git a/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagViewSyntax.scala b/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagViewSyntax.scala index 4f9bf886845..6e4772b074d 100644 --- a/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagViewSyntax.scala +++ b/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/CasperDagViewSyntax.scala @@ -4,7 +4,7 @@ import cats.Applicative import cats.effect.Sync import cats.syntax.all._ import coop.rchain.sdk.dag.data.DagView -import coop.rchain.sdk.dag.syntax._ +import coop.rchain.sdk.dag.syntax.all._ /** * Casper specific extensions for DagView operations. diff --git a/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/package.scala b/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/package.scala index 8ddc9295d11..17b1f9fffc6 100644 --- a/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/package.scala +++ b/sdk/src/main/scala/coop/rchain/sdk/casper/syntax/package.scala @@ -1,3 +1,5 @@ package coop.rchain.sdk.casper -package object syntax extends CasperDagDataSyntax with CasperDagViewSyntax +package object syntax { + object all extends CasperDagDataSyntax with CasperDagViewSyntax +} diff --git a/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/DagViewSyntax.scala b/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/DagViewSyntax.scala index 97508f27d0c..aa79c920d5d 100644 --- a/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/DagViewSyntax.scala +++ b/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/DagViewSyntax.scala @@ -3,6 +3,7 @@ package coop.rchain.sdk.dag.syntax import cats.Applicative import cats.syntax.all._ import coop.rchain.sdk.dag.data.DagView +import coop.rchain.sdk.dag.syntax.all._ import fs2.Stream /** diff --git a/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/package.scala b/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/package.scala index 5564db52540..4196a4b876c 100644 --- a/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/package.scala +++ b/sdk/src/main/scala/coop/rchain/sdk/dag/syntax/package.scala @@ -1,3 +1,5 @@ package coop.rchain.sdk.dag -package object syntax extends DagDataSyntax with DagViewSyntax +package object syntax { + object all extends DagDataSyntax with DagViewSyntax +} diff --git a/shared/src/main/scala/coop/rchain/catscontrib/package.scala b/shared/src/main/scala/coop/rchain/catscontrib/package.scala index 0b73723fcf5..0ba1463c890 100644 --- a/shared/src/main/scala/coop/rchain/catscontrib/package.scala +++ b/shared/src/main/scala/coop/rchain/catscontrib/package.scala @@ -1,7 +1,5 @@ package coop.rchain -package object catscontrib - extends EitherTSyntax - with StateTInstances - with WriterTInstances - with SeqInstances +package object catscontrib { + object all extends EitherTSyntax with StateTInstances with WriterTInstances with SeqInstances +}