diff --git a/chunk_manager/chunk_manager.go b/chunk_manager/chunk_manager.go index 366bd66a..74d94d0b 100644 --- a/chunk_manager/chunk_manager.go +++ b/chunk_manager/chunk_manager.go @@ -97,10 +97,14 @@ func NewChunkManager(cfg *ChunkManagerConfig) (*ChunkManager, error) { return nil, errors.New("provided state tob nonce greater than highest settled tob nonce") } - if cfg.ExpirationTime < 0 || cfg.GCInterval < 0 || cfg.LayerSubmissionCheckInterval < 0 { + if cfg.ExpirationTime < 0 || cfg.GCInterval < 0 || cfg.LayerSubmissionCheckInterval <= 0 { return nil, fmt.Errorf("invalid duration variables, ExpirationTime: %s, GCInterval: %s, LayerSubmissionCheckInterval: %s", cfg.ExpirationTime, cfg.GCInterval, cfg.LayerSubmissionCheckInterval) } + if cfg.SEQClient == nil { + return nil, errors.New("SEQClient is required") + } + // when cfg.StateLowestToBNonce =0, after pushing the first element to both PQs, we add a place holder layer as the first layer // the tob nonce of which is headTobNonce+1 // when cfg.StateLowestToBNonce >0, chunks will be replayed with the base headToBNonce to be cfg.StateLowestToBNonce-1, hence guarantees diff --git a/common/test_utils.go b/common/test_utils.go index a76022f5..a04fdd09 100644 --- a/common/test_utils.go +++ b/common/test_utils.go @@ -250,12 +250,45 @@ func DisplayEthTxs(txs map[string]ethtypes.Transactions) { for domain, domainTxs := range txs { fmt.Printf("domain: %s\n", domain) for _, tx := range domainTxs { - fmt.Printf("tx: %s\n", tx.Hash().Hex()) + sender, err := ExtractSender(tx) + if err != nil { + panic(err) + } + fmt.Printf("sender: %s tx: %s:%d\n", sender, tx.Hash().Hex(), tx.Nonce()) } } fmt.Printf("========txs info end=======\n") } +func FindTxHash(txLhs *ethtypes.Transaction, txs []*ethtypes.Transaction) bool { + if txLhs == nil { + panic("txLhs is nil") + } + lhsHash := txLhs.Hash() + for _, tx := range txs { + rhsHash := tx.Hash() + if lhsHash == rhsHash { + return true + } + } + return false +} + +func TxsHashUnorderedMatch(txsLhs []*ethtypes.Transaction, txsRhs []*ethtypes.Transaction) bool { + if len(txsLhs) != len(txsRhs) { + return false + } + + for _, tx := range txsLhs { + if !FindTxHash(tx, txsRhs) { + fmt.Printf("could not find tx hash " + tx.Hash().Hex()) + return false + } + } + + return true +} + func SyncMapLen(m *sync.Map) int { var length int m.Range(func(_, _ interface{}) bool { @@ -264,3 +297,11 @@ func SyncMapLen(m *sync.Map) int { }) return length } + +func CollectChunksFromRequests(reqs ...*SubmitNewBlockRequest) []*ArcadiaChunk { + chunks := make([]*ArcadiaChunk, len(reqs)) + for i, req := range reqs { + chunks[i] = &req.Chunk + } + return chunks +} diff --git a/common/types.go b/common/types.go index 96f1b936..a030ed56 100644 --- a/common/types.go +++ b/common/types.go @@ -13,6 +13,7 @@ import ( "time" srpc "github.com/AnomalyFi/nodekit-seq/rpc" + "github.com/rollkit/go-da" hactions "github.com/AnomalyFi/hypersdk/actions" "github.com/gorilla/websocket" @@ -205,10 +206,12 @@ type ToBChunk struct { txHash2BundleHash map[string]string revertingTxHashes map[string]struct{} + // refers to whether bundle at idx has been removed removedBitSet *bitset.BitSet - domains map[string]struct{} // chain ids or rollup ids - seqTxs []*chain.Transaction - initialized bool + + domains map[string]struct{} // chain ids or rollup ids + seqTxs []*chain.Transaction + initialized bool l sync.RWMutex } @@ -229,6 +232,17 @@ func (tob *ToBChunk) GetBundles() []*CrossRollupBundle { return ret } +func (tob *ToBChunk) IsBundleIdxFiltered(idx int) bool { + tob.l.RLock() + defer tob.l.RUnlock() + + if idx >= len(tob.Bundles) { + return false + } + + return tob.removedBitSet.Test(uint(idx)) +} + func (tob *ToBChunk) GetTxs() map[string]ethtypes.Transactions { tob.l.RLock() defer tob.l.RUnlock() @@ -344,10 +358,45 @@ func (tob *ToBChunk) removeBundleContainTx(txHash string) (*CrossRollupBundle, e bundleIdx2remove := slices.IndexFunc(tob.Bundles, func(crb *CrossRollupBundle) bool { return crb.BundleHash == bundleHash }) + // mark as removed tob.removedBitSet = tob.removedBitSet.Set(uint(bundleIdx2remove)) // re-populate [tob.txs] + tob.repopulateToBTxs() + + return tob.Bundles[bundleIdx2remove], nil +} + +func (tob *ToBChunk) FilterBundleWithHash(bundleHash string) (*CrossRollupBundle, error) { + tob.l.Lock() + defer tob.l.Unlock() + + var foundIdx int + var foundBundle *CrossRollupBundle + + for bundleIdx, bundle := range tob.Bundles { + if bundle.BundleHash == bundleHash { + foundIdx = bundleIdx + foundBundle = bundle + break + } + } + + if foundBundle == nil { + return nil, fmt.Errorf("filterBundleWithHash found no bundle hash [%s]", bundleHash) + } + + // mark as removed + tob.removedBitSet = tob.removedBitSet.Set(uint(foundIdx)) + + // re-populate [tob.txs] + tob.repopulateToBTxs() + + return tob.Bundles[foundIdx], nil +} + +func (tob *ToBChunk) repopulateToBTxs() { tob.txs = make(map[string]ethtypes.Transactions) for bundleIdx, bundle := range tob.Bundles { // continue as this bundle was removed @@ -360,13 +409,12 @@ func (tob *ToBChunk) removeBundleContainTx(txHash string) (*CrossRollupBundle, e tob.txs[domain] = l } } + // track domains that contain txs in it tob.domains = make(map[string]struct{}) for domain := range tob.txs { tob.domains[domain] = struct{}{} } - - return tob.Bundles[bundleIdx2remove], nil } // LowestBlockNumber return the tracked lowest heights for domains, this prevents the situation that @@ -527,7 +575,7 @@ type RoBChunk struct { BlockNumber uint64 `json:"block_number"` // following fields will be populated after initialization - removedBitSet *bitset.BitSet + removedBitSet *bitset.BitSet // refers to whether tx within txs at idx has been removed initialized bool txs ethtypes.Transactions seqTxs []*chain.Transaction @@ -1541,6 +1589,10 @@ func (b *CrossRollupBundle) Domains() []string { return maps.Keys(b.txs) } +func (b *CrossRollupBundle) HasTxs() bool { + return len(b.txs) > 0 +} + func (b *CrossRollupBundle) ContainTx(txHash string) bool { for _, txs := range b.txs { contain := slices.ContainsFunc(txs, func(t *ethtypes.Transaction) bool { @@ -1645,3 +1697,13 @@ type CertInfo struct { Cert []byte PlaceHolder bool } + +type DACertificate = []byte + +type DAPayload = []byte + +type BlobInfo struct { + BlobID []DACertificate + Proof []da.Proof + IsFinalized bool +} diff --git a/database/mockdb.go b/database/mockdb.go index 70ea846b..d5fd71b3 100644 --- a/database/mockdb.go +++ b/database/mockdb.go @@ -29,7 +29,7 @@ type MockDB struct { RoBChunkMap map[string]map[uint64]*common.RoBChunkDB RoBChunkAcceptedMap map[string]*common.RoBChunkAcceptedDB LastFetchedBlockNum common.LastFetchedBlockNumberDB - PayloadResp common.PayloadDB + PayloadResp *common.PayloadDB Epoch uint64 EpochLowestToBNonce map[uint64]uint64 PayloadTxsToB map[string]*common.PayloadTxs @@ -54,7 +54,6 @@ func NewMockDB() *MockDB { RoBChunkMap: make(map[string]map[uint64]*common.RoBChunkDB), RoBChunkAcceptedMap: make(map[string]*common.RoBChunkAcceptedDB), LastFetchedBlockNum: common.LastFetchedBlockNumberDB{}, - PayloadResp: common.PayloadDB{}, Epoch: 0, EpochLowestToBNonce: make(map[uint64]uint64), PayloadTxsToB: make(map[string]*common.PayloadTxs), @@ -247,8 +246,10 @@ func (db *MockDB) RemoveBestAuctionBid(epoch uint64) error { func (db *MockDB) GetPayloadResp(chainID string, blockNumber uint64) (*common.PayloadDB, error) { db.l.Lock() defer db.l.Unlock() - - return &db.PayloadResp, nil + if db.PayloadResp == nil { + return nil, nil + } + return db.PayloadResp, nil } func (db *MockDB) SetPayloadResp(chainID string, blockNumber uint64, txs *common.GetPayloadResponse) error { @@ -260,7 +261,7 @@ func (db *MockDB) SetPayloadResp(chainID string, blockNumber uint64, txs *common return err } - db.PayloadResp = *payloadTxs + db.PayloadResp = payloadTxs return nil } diff --git a/datalayer/da_submitter.go b/datalayer/da_submitter.go index 27abdc4c..93ec0b1b 100644 --- a/datalayer/da_submitter.go +++ b/datalayer/da_submitter.go @@ -21,6 +21,7 @@ const ( type IDASubmitter interface { ChunksChan() chan *common.ArcadiaToSEQChunkMessage + SubmitAndFinalizeBlob(ctx context.Context, payload common.DAPayload) (*common.BlobInfo, error) } type DASubmitterOpts struct { @@ -34,16 +35,6 @@ type DASubmitterOpts struct { Mocking bool } -type DACertificate = []byte - -type DAPayload = []byte - -type BlobInfo struct { - BlobID []DACertificate - Proof []da.Proof - IsFinalized bool -} - var _ IDASubmitter = (*DASubmitter)(nil) // CelestiaConfig TODO: check fields below @@ -120,7 +111,7 @@ func NewDASubmitter(opts DASubmitterOpts) (*DASubmitter, error) { chunkCancel() continue } - cert, err := submitter.submitAndFinalizeBlob(chunkCtx, blob) + cert, err := submitter.SubmitAndFinalizeBlob(chunkCtx, blob) if err != nil { log.WithError(err).Error("unable to submit blob to DA") chunkCancel() @@ -152,14 +143,14 @@ func NewDASubmitter(opts DASubmitterOpts) (*DASubmitter, error) { return submitter, nil } -// TODO: submission and finalization check should be separated in the future // SubmitAndFinalizeBlob Submit does submission of the given payload to the data availability layer. // It will retry maxRetries number of times with increasing backoff until it succeeds. // On success, returns the DA certificate as []byte. // On failure, returns nil byte slice and error -func (da *DASubmitter) submitAndFinalizeBlob(ctx context.Context, payload DAPayload) (*BlobInfo, error) { +// TODO: submission and finalization check should be separated in the future +func (da *DASubmitter) SubmitAndFinalizeBlob(ctx context.Context, payload common.DAPayload) (*common.BlobInfo, error) { var numRetries int - var certificateDA DACertificate + var certificateDA common.DACertificate for { numRetries++ @@ -203,7 +194,7 @@ func (da *DASubmitter) submitAndFinalizeBlob(ctx context.Context, payload DAPayl } da.log.Infof("Retrieved proofs for blobs: %v", proofs) - cert := &BlobInfo{ + cert := &common.BlobInfo{ BlobID: [][]byte{certificateDA}, Proof: proofs, IsFinalized: true, diff --git a/datalayer/da_submitter_test.go b/datalayer/da_submitter_test.go index 09005c91..eac5f715 100644 --- a/datalayer/da_submitter_test.go +++ b/datalayer/da_submitter_test.go @@ -62,7 +62,7 @@ func TestDAClientCanPushCertToChunkManager(t *testing.T) { mockDA.EXPECT().Submit(mock.Anything, [][]byte{blob}, celestiaConfig.GasPrice, celestiaConfig.NamespaceID).Return([][]byte{testBlobID}, nil) mockDA.EXPECT().GetProofs(mock.Anything, [][]byte{testBlobID}, celestiaConfig.NamespaceID).Return([][]byte{testProof}, nil) - expectedCert := BlobInfo{ + expectedCert := common.BlobInfo{ BlobID: [][]byte{testBlobID}, Proof: [][]byte{testProof}, IsFinalized: true, diff --git a/datalayer/mocks/mock_IDASubmitter.go b/datalayer/mocks/mock_IDASubmitter.go index 8750d754..2993fa2e 100644 --- a/datalayer/mocks/mock_IDASubmitter.go +++ b/datalayer/mocks/mock_IDASubmitter.go @@ -3,6 +3,8 @@ package mocks import ( + context "context" + common "github.com/AnomalyFi/Arcadia/common" mock "github.com/stretchr/testify/mock" @@ -68,6 +70,65 @@ func (_c *MockIDASubmitter_ChunksChan_Call) RunAndReturn(run func() chan *common return _c } +// SubmitAndFinalizeBlob provides a mock function with given fields: ctx, payload +func (_m *MockIDASubmitter) SubmitAndFinalizeBlob(ctx context.Context, payload []byte) (*common.BlobInfo, error) { + ret := _m.Called(ctx, payload) + + if len(ret) == 0 { + panic("no return value specified for SubmitAndFinalizeBlob") + } + + var r0 *common.BlobInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []byte) (*common.BlobInfo, error)); ok { + return rf(ctx, payload) + } + if rf, ok := ret.Get(0).(func(context.Context, []byte) *common.BlobInfo); ok { + r0 = rf(ctx, payload) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*common.BlobInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { + r1 = rf(ctx, payload) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockIDASubmitter_SubmitAndFinalizeBlob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SubmitAndFinalizeBlob' +type MockIDASubmitter_SubmitAndFinalizeBlob_Call struct { + *mock.Call +} + +// SubmitAndFinalizeBlob is a helper method to define mock.On call +// - ctx context.Context +// - payload []byte +func (_e *MockIDASubmitter_Expecter) SubmitAndFinalizeBlob(ctx interface{}, payload interface{}) *MockIDASubmitter_SubmitAndFinalizeBlob_Call { + return &MockIDASubmitter_SubmitAndFinalizeBlob_Call{Call: _e.mock.On("SubmitAndFinalizeBlob", ctx, payload)} +} + +func (_c *MockIDASubmitter_SubmitAndFinalizeBlob_Call) Run(run func(ctx context.Context, payload []byte)) *MockIDASubmitter_SubmitAndFinalizeBlob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]byte)) + }) + return _c +} + +func (_c *MockIDASubmitter_SubmitAndFinalizeBlob_Call) Return(_a0 *common.BlobInfo, _a1 error) *MockIDASubmitter_SubmitAndFinalizeBlob_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockIDASubmitter_SubmitAndFinalizeBlob_Call) RunAndReturn(run func(context.Context, []byte) (*common.BlobInfo, error)) *MockIDASubmitter_SubmitAndFinalizeBlob_Call { + _c.Call.Return(run) + return _c +} + // NewMockIDASubmitter creates a new instance of MockIDASubmitter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockIDASubmitter(t interface { diff --git a/go.mod b/go.mod index 60cd13e8..3752b670 100644 --- a/go.mod +++ b/go.mod @@ -20,6 +20,7 @@ require ( github.com/gorilla/websocket v1.5.0 github.com/holiman/uint256 v1.3.1 github.com/jmoiron/sqlx v1.3.5 + github.com/labstack/gommon v0.4.0 github.com/lib/pq v1.10.8 github.com/prometheus/client_golang v1.18.0 github.com/rollkit/go-da v0.9.0 @@ -105,6 +106,8 @@ require ( github.com/stretchr/objx v0.5.2 // indirect github.com/supranational/blst v0.3.13 // indirect github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.2 // indirect github.com/yuin/gopher-lua v1.1.0 // indirect github.com/yusufpapurcu/wmi v1.2.2 // indirect go.opencensus.io v0.24.0 // indirect diff --git a/go.sum b/go.sum index 8a53a8a7..d6b29811 100644 --- a/go.sum +++ b/go.sum @@ -292,6 +292,8 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8= +github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= @@ -305,6 +307,7 @@ github.com/markbates/oncer v1.0.0 h1:E83IaVAHygyndzPimgUYJjbshhDTALZyXxvk9FOlQRY github.com/markbates/oncer v1.0.0/go.mod h1:Z59JA581E9GP6w96jai+TGqafHPW+cPfRxz2aSZ0mcI= github.com/markbates/safe v1.0.1 h1:yjZkbvRM6IzKj9tlu/zMJLS0n/V351OZWRnF3QfaUxI= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= @@ -449,6 +452,11 @@ github.com/umbracle/gohashtree v0.0.2-alpha.0.20230207094856-5b775a815c10 h1:CQh github.com/umbracle/gohashtree v0.0.2-alpha.0.20230207094856-5b775a815c10/go.mod h1:x/Pa0FF5Te9kdrlZKJK82YmAkvL8+f989USgz6Jiw7M= github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs= github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= +github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU= github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -558,6 +566,8 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/services/api/payload.go b/services/api/payload.go index c89ba960..5c622349 100644 --- a/services/api/payload.go +++ b/services/api/payload.go @@ -12,7 +12,11 @@ import ( ) // buildPayload builds the payload response from the previous chunk transactions from the datastore(redis and the database) -func (api *ArcadiaAPI) buildPayload(headEpoch uint64, rollupRegistration *hactions.RollupInfo, blockNumber uint64) (*common.GetPayloadResponse, error) { +func (api *ArcadiaAPI) buildPayload( + headEpoch uint64, + rollupRegistration *hactions.RollupInfo, + blockNumber uint64, +) (*common.GetPayloadResponse, error) { if rollupRegistration == nil { return nil, fmt.Errorf("rollup registration not provided") } @@ -65,7 +69,7 @@ func (api *ArcadiaAPI) buildPayload(headEpoch uint64, rollupRegistration *hactio } chunkID, err := placeHolderRoB.ID() if err == nil { - log.WithField("chunkID", chunkID.String()).Warn("failed to get rob chunk, inserting empty RoB as subtitution") + log.WithField("chunkID", chunkID.String()).Warn("failed to get rob chunk, inserting empty RoB as substitution") } else { log.WithError(err).Error("unable to get place holder rob chunk id") } @@ -153,7 +157,7 @@ func (api *ArcadiaAPI) buildPayload(headEpoch uint64, rollupRegistration *hactio return nil, err } - // first check if all the before tobChunks are settled before the ToBNocne of the RoB + // first check if all the before tobChunks are settled before the ToBNonce of the RoB // then we can safely fetch all the txs from ToBs highestToBNonce := api.chunkManager.HighestPreconfedToB() if highestToBNonce < *tobNonceOfRoB { diff --git a/services/api/service.go b/services/api/service.go index efaa2711..8b8c5e71 100644 --- a/services/api/service.go +++ b/services/api/service.go @@ -67,7 +67,6 @@ const ( CancelTimeoutSecs = 2 SlotWindowToCheckAuctionWinner = 2 SafetyNonceDifference = uint64(20) - MaxDARetries = int(5) ) // prometheus counters @@ -295,6 +294,9 @@ type ArcadiaAPI struct { // map of ChunkID -> PreConfInfo pendingChunksPreConfs sync.Map + // map of ChainID -> num messages received + numChunksByChainID sync.Map + // map of rollup id to block number. this is updated whenever rollup successfully calls getPayload(). // we have an issue in which builders cannot build for the block number that has been called via getPayload() // else we experience state corruption. This map is used to reject @@ -313,6 +315,7 @@ type ArcadiaAPI struct { // seq validator registration upgrader *websocket.Upgrader + // map of subscribed validators, on subscribe, we query its weight and store the connection and weight by [SubscribedValidator] // on disconnected, we remove that validator from the map subscribedValidatorMap sync.Map @@ -388,6 +391,7 @@ func NewArcadiaAPI(opts ArcadiaAPIOpts) (api *ArcadiaAPI, err error) { blockSimRateLimiter: opts.BlockSimulator, pendingChunksPreConfs: sync.Map{}, + numChunksByChainID: sync.Map{}, rollupToLastFetchedBlockNumber: make(map[string]uint64), submitReqOngoing: &sync.Map{}, @@ -437,6 +441,16 @@ func NewArcadiaAPI(opts ArcadiaAPIOpts) (api *ArcadiaAPI, err error) { return api, nil } +// setBlockSimRateLimiter used to set a block sim rate limiter. Only for test use. +func (api *ArcadiaAPI) setBlockSimRateLimiter(limiter simulator.IBlockSimRateLimiter) { + api.blockSimRateLimiter = limiter +} + +// setSeqClient used to set seq client on ArcadiaAPI. Only for test use. +func (api *ArcadiaAPI) setSeqClient(client seq.BaseSeqClient) { + api.seqClient = client +} + func (api *ArcadiaAPI) getRouter() http.Handler { // Main router mainRouter := mux.NewRouter() @@ -487,6 +501,26 @@ func (api *ArcadiaAPI) getRouter() http.Handler { return loggedRouter } +func (api *ArcadiaAPI) trackChunkByChainID(chainID string) { + numChunk, ok := api.numChunksByChainID.Load(chainID) + if !ok { + api.numChunksByChainID.Store(chainID, 1) + } else { + numChunkVal := numChunk.(int) + api.numChunksByChainID.Store(chainID, numChunkVal+1) + } +} + +func (api *ArcadiaAPI) getNumChunksByChainID(chainID string) int { + numChunk, ok := api.numChunksByChainID.Load(chainID) + if !ok { + return 0 + } else { + numChunkVal := numChunk.(int) + return numChunkVal + } +} + // StartServer starts up this API instance and HTTP server // - First it initializes the cache and updates local information // - Once that is done, the HTTP server is started @@ -1128,7 +1162,7 @@ func (api *ArcadiaAPI) handleGetPayload(w http.ResponseWriter, req *http.Request // try fetch cached payload var getPayloadResp *common.GetPayloadResponse getPayloadResp, err = api.redis.GetPayloadResp(payload.ChainID, payload.BlockNumber) - if err != nil { + if err != nil || getPayloadResp == nil { log.WithError(err).Warn("unable to get payload from redis, trying database...") payloadDB, err := api.db.GetPayloadResp(payload.ChainID, payload.BlockNumber) if err != nil { @@ -1141,6 +1175,7 @@ func (api *ArcadiaAPI) handleGetPayload(w http.ResponseWriter, req *http.Request log.WithError(err).Warn("unable to convert payload from database, conversion failed.") } } + if getPayloadResp != nil { api.RespondOK(w, getPayloadResp) log.Infof("execution payload(from cache) delivered, timestampAfterLoadResponse %d", time.Now().UTC().UnixMilli()) @@ -1610,7 +1645,15 @@ func (api *ArcadiaAPI) handleSubmitNewBlockRequest(w http.ResponseWriter, req *h // specific checks for either ToB or RoB if isToB { + bundlesToFilter := make([]*common.CrossRollupBundle, 0) for _, bundle := range blockReq.ToBChunk().Bundles { + if !bundle.HasTxs() { + errMsg := fmt.Sprintf("bundle with hash [%s] contained no txs, rejecting request", bundle.BundleHash) + log.WithError(err).Warn(errMsg) + api.RespondError(w, http.StatusBadRequest, errMsg) + return + } + for _, chainIDStr := range bundle.Domains() { namespace, err := common.ChainIDStrToNamespace(chainIDStr) if err != nil { @@ -1619,14 +1662,35 @@ func (api *ArcadiaAPI) handleSubmitNewBlockRequest(w http.ResponseWriter, req *h api.RespondError(w, http.StatusBadRequest, errMsg) return } + + // The given rollup for chain id must be registered for this auction period else we reject if !api.datastore.IsRollupRegistered(headEpoch, namespace) { errMsg := fmt.Sprintf("builder chunk tried to build tob chunk with cross bundle rollup for unregistered rollup chain id [%s]", chainIDStr) log.Warn(errMsg) api.RespondError(w, http.StatusBadRequest, errMsg) return } + + // We will filter out the ToB bundle if we haven't received a RoB for each chain id it is using + if api.getNumChunksByChainID(chainIDStr) == 0 { + warnMsg := fmt.Sprintf("tob bundle filtered out due to lack of RoB for chain ID [%s] ", chainIDStr) + api.log.Warn(warnMsg) + bundlesToFilter = append(bundlesToFilter, bundle) + } + } + + // mark any bundles to be filtered as removed + for _, bundle := range bundlesToFilter { + _, err = blockReq.ToBChunk().FilterBundleWithHash(bundle.BundleHash) + if err != nil { + errMsg := fmt.Sprintf("failed to find bundle hash [%s] when trying to filter, possible state corruption", bundle.BundleHash) + log.Warn(errMsg) + api.RespondError(w, http.StatusBadRequest, errMsg) + return + } } } + // query the latest block numbers for each domain and assign to ToB blockNumbers, err := api.blockSimRateLimiter.GetBlockNumber(blockReq.ToBChunk().Domains()) if err != nil { @@ -1767,6 +1831,15 @@ func (api *ArcadiaAPI) handleSubmitNewBlockRequest(w http.ResponseWriter, req *h "tobNonce": headToBNonce, }).Debug("incoming chunk txs") + // tracks the kind of chunk we are receiving + if isToB { + api.trackChunkByChainID("tob") + } else { + api.trackChunkByChainID(chainID) + } + + // Simulation handling + // At this point, filter checks have completed and we want to perform simulation on txs in the chunk var simulationErr error // label all successful bundles to be accepted defer func() { @@ -1796,6 +1869,21 @@ func (api *ArcadiaAPI) handleSubmitNewBlockRequest(w http.ResponseWriter, req *h return } + if isToB { + // If all bundles are filtered, then we can reject this block. + // Note that simulation can also filter blocks. + var foundValidBundle bool + for i := 0; i < len(blockReq.ToBChunk().Bundles); i++ { + foundValidBundle = foundValidBundle || !blockReq.ToBChunk().IsBundleIdxFiltered(i) + } + if !foundValidBundle { + warnMsg := "no bundles found in tob, all bundles filtered" + log.Warn(warnMsg) + api.RespondError(w, http.StatusNoContent, warnMsg) + return + } + } + // we will only know the stable ToBNonce after adding this chunk to chunk manager and chunk manager will assign the latest one to it if err := api.chunkManager.AddChunk(chunk); err != nil { log.WithError(err).Warn("unable to add chunk to chunk manager") @@ -2167,6 +2255,7 @@ func (api *ArcadiaAPI) simulateChunk(chunk *common.ArcadiaChunk) error { for _, tx := range domainAllTxs { log.Debugf("tx: %s", tx.Hash().Hex()) } + baseNonces, err := api.blockSimRateLimiter.GetNonces(domain, domainAllTxs, lowestHeightStr) if err != nil { removeChunkTxsForDomain(domain, "unable to fetch domain base nonces") diff --git a/services/api/service_test.go b/services/api/service_test.go index 943e9b26..7e2b1527 100644 --- a/services/api/service_test.go +++ b/services/api/service_test.go @@ -2,7 +2,7 @@ package api import ( "bytes" - "context" + "crypto/rand" "encoding/binary" "encoding/hex" @@ -13,18 +13,17 @@ import ( mrand "math/rand" "net/http" "net/http/httptest" + "slices" "strconv" "strings" "sync" "testing" "time" - mda "github.com/AnomalyFi/Arcadia/datalayer/mocks" + mseq "github.com/AnomalyFi/Arcadia/seq/mocks" "golang.org/x/exp/maps" - "github.com/alicebob/miniredis/v2" - "github.com/AnomalyFi/hypersdk/actions" "github.com/AnomalyFi/hypersdk/crypto/ed25519" @@ -36,14 +35,10 @@ import ( hutils "github.com/AnomalyFi/hypersdk/utils" "github.com/AnomalyFi/nodekit-seq/auth" - chunkmanager "github.com/AnomalyFi/Arcadia/chunk_manager" "github.com/AnomalyFi/Arcadia/common" - "github.com/AnomalyFi/Arcadia/database" "github.com/AnomalyFi/Arcadia/datastore" "github.com/AnomalyFi/Arcadia/seq" - mseq "github.com/AnomalyFi/Arcadia/seq/mocks" "github.com/AnomalyFi/Arcadia/simulator" - msim "github.com/AnomalyFi/Arcadia/simulator/mocks" "github.com/AnomalyFi/hypersdk/chain" "github.com/AnomalyFi/hypersdk/codec" hrpc "github.com/AnomalyFi/hypersdk/rpc" @@ -58,354 +53,8 @@ import ( "github.com/stretchr/testify/require" ) -const ( - testManagerSecretKey = "0x3fae9bafcf1572be9a4d4b7f8e6cb1d0c4bca8ad1e6f75d3d1286ad0e3e5fba1" - mockSecretKeyHex = "0x4e343a647c5a5c44d76c2c58b63f02cdf3a9a0ec40f102ebc26363b4b1b95033" -) - -var ( - skBytes, _ = hexutil.Decode(mockSecretKeyHex) - mockSecretKey, _ = bls.SecretKeyFromBytes(skBytes) - mockPublicKey, _ = bls.PublicKeyFromSecretKey(mockSecretKey) - numTestAccounts = 10 - testAccounts = GenerateTestEthAccounts(numTestAccounts) -) - -type testBackend struct { - t require.TestingT - arcadia *ArcadiaAPI - datastore *datastore.Datastore - redis *datastore.RedisCache - simManager *bls.SecretKey - seqcli *mseq.MockBaseSeqClient - da *mda.MockIDASubmitter - simulator *msim.MockIBlockSimRateLimiter - currOpts *ArcadiaAPIOpts - useRedis bool - - daChunksChan chan *common.ArcadiaToSEQChunkMessage -} - -type testParams struct { - disableRedis bool -} - -func (tp *testParams) name() string { - return "tp_" + strconv.FormatBool(tp.disableRedis) + "_" -} - -func newTestBackend(t *testing.T) *testBackend { - return newTestBackendWithFlags(t, false) -} - -func newTestBackendWithRedisDown(t *testing.T) *testBackend { - return newTestBackendWithFlags(t, true) -} - -func newTestBackendWithFlags(t *testing.T, disableRedis bool) *testBackend { - redisClient, err := miniredis.Run() - require.NoError(t, err) - - redisCache, err := datastore.NewRedisCache("", redisClient.Addr(), "") - require.NoError(t, err) - - db := database.NewMockDB() - return newTestbackendWithCustomDatastore(t, redisCache, db, disableRedis) -} - -func newTestbackendWithCustomSEQCliNDatastore(t *testing.T, seqClient *mseq.MockBaseSeqClient, redisCache *datastore.RedisCache, db database.IDatabaseService, disableRedis bool) *testBackend { - logger := common.TestLog - logger.Logger.SetLevel(logrus.DebugLevel) - - ds, err := datastore.NewDatastore(redisCache, db, logger) - require.NoError(t, err) - - managerSkBytes, err := hexutil.Decode(testManagerSecretKey) - require.NoError(t, err) - managerSk, err := bls.SecretKeyFromBytes(managerSkBytes) - require.NoError(t, err) - - blockSim := msim.NewMockIBlockSimRateLimiter(t) - - seqClient.EXPECT().SetOnNewBlockHandler(mock.Anything).Return().Maybe() - seqClient.EXPECT().Parser().Return(&srpc.Parser{}).Maybe() - - highestSettledToBNonce, err := seqClient.GetHighestSettledToBNonce(context.TODO()) - require.NoError(t, err) - var lowestToBNonce uint64 - lowestToBNonceRef, err := ds.LoadLowestManagedStateToBNonce() - require.NoError(t, err) - if lowestToBNonceRef != nil { - lowestToBNonce = *lowestToBNonceRef - } - - t.Logf("initializing chunk manager with lowestStateToBNonce: %d settledToBNonce: %d", lowestToBNonce, highestSettledToBNonce) - - config := chunkmanager.ChunkManagerConfig{ - ExpirationTime: 5 * time.Minute, - GCInterval: 10 * time.Second, - LayerSubmissionCheckInterval: 100 * time.Second, - HighestSettledToBNonce: highestSettledToBNonce, - StateLowestToBNonce: lowestToBNonce, - Datastore: ds, - SEQClient: seqClient, - SEQChainParser: seqClient.Parser(), - Logger: common.TestLog, - } - - cm, err := chunkmanager.NewChunkManager(&config) - require.NoError(t, err) - - da := mda.NewMockIDASubmitter(t) - - opts := ArcadiaAPIOpts{ - Log: common.TestLog, - ListenAddr: "localhost:12345", - Datastore: ds, - Redis: redisCache, - DB: db, - DA: da, - ChunkManager: cm, - SeqClient: seqClient, - BlockSimulator: blockSim, - mockMode: true, - SlotSizeLimit: DefaultSizeLimit, - TestScenarioRedisDown: disableRedis, - } - - arcadia, err := NewArcadiaAPI(opts) - require.NoError(t, err) - - backend := testBackend{ - t: t, - arcadia: arcadia, - datastore: ds, - redis: redisCache, - simManager: managerSk, - seqcli: seqClient, - da: da, - simulator: blockSim, - currOpts: &opts, - useRedis: !disableRedis, - - daChunksChan: make(chan *common.ArcadiaToSEQChunkMessage), - } - - mockPublicKeyBytes := bls.PublicKeyToBytes(mockPublicKey) - mockPublicKeyHex := hex.EncodeToString(mockPublicKeyBytes[:]) - backend.datastore.SetKnownValidator("0x"+common.PubkeyHex(mockPublicKeyHex), 0) - backend.setLowestToBNonceForEpoch(t, 0, 0) - return &backend -} - -func newTestbackendWithCustomDatastore(t *testing.T, redisCache *datastore.RedisCache, db database.IDatabaseService, disableRedis bool) *testBackend { - seqClient := mseq.NewMockBaseSeqClient(t) - seqClient.EXPECT().SetOnNewBlockHandler(mock.Anything).Return().Maybe() - seqClient.EXPECT().Parser().Return(&srpc.Parser{}).Maybe() - seqClient.EXPECT().GetHighestSettledToBNonce(mock.Anything).Return(uint64(0), nil).Once() - - err := redisCache.SetLowestManagedStateToBNonce(0) - require.NoError(t, err) - err = db.SetAndUpdateLowestManagedStateToBNonce(&common.ToBNoncesInfoDB{ - ToBNonce: 0, - }) - require.NoError(t, err) - - return newTestbackendWithCustomSEQCliNDatastore(t, seqClient, redisCache, db, false) -} - -func newDatastores(t *testing.T) (*datastore.RedisCache, database.IDatabaseService) { - redisClient, err := miniredis.Run() - require.NoError(t, err) - - redisCache, err := datastore.NewRedisCache("", redisClient.Addr(), "") - require.NoError(t, err) - - db := database.NewMockDB() - - return redisCache, db -} - -// setLowestToBNonceForEpoch is used to set the lowest tob nonce for epoch in datastore for testing -// fails with error only if both redis and database error out -func (be *testBackend) setLowestToBNonceForEpoch(t *testing.T, epoch uint64, tobNonce uint64) { - redisErr := be.redis.SetEpochLowestToBNonce(epoch, tobNonce) - if redisErr != nil { - dbErr := be.arcadia.db.SetEpochLowestToBNonce(epoch, tobNonce) - require.NoError(t, dbErr) - } -} - -func (be *testBackend) GetArcadia() *ArcadiaAPI { - return be.arcadia -} - -func (be *testBackend) GetRedis() *datastore.RedisCache { - return be.redis -} - -func (be *testBackend) request(method, path string, payload any) *httptest.ResponseRecorder { - var req *http.Request - var err error - - path = "/api" + path - - if payload == nil { - req, err = http.NewRequest(method, path, bytes.NewReader(nil)) - } else { - payloadBytes, err2 := json.Marshal(payload) - require.NoError(be.t, err2) - req, err = http.NewRequest(method, path, bytes.NewReader(payloadBytes)) - } - require.NoError(be.t, err) - - rr := httptest.NewRecorder() - be.arcadia.getRouter().ServeHTTP(rr, req) - return rr -} - -func (be *testBackend) RequestWithHeaders(method, path string, payload any, headers map[string]string) *httptest.ResponseRecorder { - var req *http.Request - var err error - path = "/api" + path - - if payload == nil { - req, err = http.NewRequest(method, path, bytes.NewReader(nil)) - } else { - payloadBytes, err2 := json.Marshal(payload) - require.NoError(be.t, err2) - req, err = http.NewRequest(method, path, bytes.NewReader(payloadBytes)) - } - require.NoError(be.t, err) - for header, value := range headers { - req.Header.Set(header, value) - } - rr := httptest.NewRecorder() - be.arcadia.getRouter().ServeHTTP(rr, req) - return rr -} - -func (be *testBackend) RequestWithPayloadHeader(method, path string, payload any, sig string) *httptest.ResponseRecorder { - return be.RequestWithHeaders(method, path, payload, map[string]string{ - GetPayloadHeaderRollupSig: sig, - }) -} - -func (be *testBackend) SetupRegisteredRollups(epoch uint64, chainID *big.Int) { - namespace := common.ChainIDToNamespace(chainID) - rollup := &actions.RollupInfo{ - Namespace: namespace, - } - err := be.GetArcadia().datastore.SetRegisteredRollup(epoch, rollup) - if err != nil { - panic("failed to set registered rollup") - } -} - -func (be *testBackend) SetupRegisteredRollupsWithPublicKey(epoch uint64, chainID *big.Int, seqPk []byte) { - namespace := common.ChainIDToNamespace(chainID) - rollup := &actions.RollupInfo{ - Namespace: namespace, - SequencerPublicKey: seqPk, - } - err := be.GetArcadia().datastore.SetRegisteredRollup(epoch, rollup) - if err != nil { - panic("failed to set registered rollup") - } -} - -func (be *testBackend) SetupMockDASubmitter() { - be.da.EXPECT().ChunksChan().Return(be.daChunksChan) -} - -type SimAuctionWinnerInfo struct { - SeqChainID ids.ID - Bid *common.Auction - SecretKey *ed25519.PrivateKey -} - -// AdvanceToNextEpoch advance the arcadia backend to next epoch -// Can optionally take pointer to SimAuctionWinnerInfo which will produce a winning block with auction when needed -// Returns the epoch advanced that is advanced to -func (be *testBackend) AdvanceToNextEpoch( - t *testing.T, - auctionWinner *SimAuctionWinnerInfo, - checkSlotConsistency bool, -) uint64 { - currEpoch := be.arcadia.headEpoch.Load() - targetEpoch := currEpoch + 1 - targetSlot := targetEpoch * 6 - - slot := be.arcadia.headSlot.Load() - if checkSlotConsistency { - if slot != currEpoch*6 { - t.Fatal("AdvanceToNextEpoch() detected slot and epoch inconsistency") - } - } - - // process the left block for this epoch - for currSlot := slot; currSlot <= targetSlot; currSlot++ { - var block *chain.StatefulBlock - var results []*chain.Result - if auctionWinner == nil || currSlot%common.SlotsPerEpoch != 5 { - block, results = MakeEmptySEQBlock(currSlot) - } else { - block, results = MakeSEQBlockWithOneAuction(t, auctionWinner.SecretKey, currSlot, auctionWinner.SeqChainID, auctionWinner.Bid) - } - - be.arcadia.onNewSeqBlock(block, results) - - require.Equal(t, currSlot, be.arcadia.headSlot.Load()) - } - // sleep to wait announceAuctionWinnerToSeq to be finished - if auctionWinner != nil { - time.Sleep(3 * time.Second) - } - - require.Equal(t, be.arcadia.headEpoch.Load(), targetEpoch) - require.Equal(t, be.arcadia.headSlot.Load(), targetSlot) - - return targetEpoch -} - -func (be *testBackend) AdvanceToNextSlotInSameEpoch(t *testing.T) { - currEpoch := be.arcadia.headEpoch.Load() - slot := be.arcadia.headSlot.Load() - targetSlot := slot + 1 - - if targetSlot/common.SlotsPerEpoch > currEpoch { - return - } - - var block *chain.StatefulBlock - var results []*chain.Result - block, results = MakeEmptySEQBlock(targetSlot) - be.arcadia.onNewSeqBlock(block, results) - require.Equal(t, targetSlot, be.arcadia.headSlot.Load()) - - require.Equal(t, be.arcadia.headEpoch.Load(), currEpoch) // shouldn't advance - require.Equal(t, be.arcadia.headSlot.Load(), targetSlot) -} - -func (be *testBackend) SetMockSeqClientRollupExpectations(targetEpoch uint64, rollups []*actions.RollupInfo) error { - // The last fetched registered rollup value is checked when Arcadia updates the registered rollups map. - // If it doesn't equal our expected epoch, then the update doesn't take place. - be.seqcli.EXPECT().GetRollupsValidAtEpoch(mock.Anything, targetEpoch).Return(rollups, nil).Maybe() - - for _, rollup := range rollups { - err := be.arcadia.datastore.SetRegisteredRollup(targetEpoch, rollup) - if err != nil { - return err - } - } - - if be.useRedis { - for _, rollup := range rollups { - err := be.redis.InsertRollupAtEpoch(targetEpoch, rollup) - require.NoError(be.t, err) - } - } - return nil +func init() { + _, _ = TestChainParser.Registry() } func TestWebserver(t *testing.T) { @@ -1275,14 +924,12 @@ func TestSubmitChunkToSEQValidators(t *testing.T) { require.NoError(t, err) wsURL := "ws://" + backend.arcadia.opts.ListenAddr + "/ws" + pathTestWebsocket - fmt.Printf("ws url: %s\n", wsURL) conn, _, err := websocket.DefaultDialer.Dial(wsURL, nil) require.NoError(t, err) // construct test chunk - testSeqChainID := ids.GenerateTestID() parser := srpc.Parser{} - tob, err := common.MakeRandomToB(testSeqChainID, &parser, 2, 2, 2) + tob, err := common.MakeRandomToB(TestSeqChainID, &parser, 2, 2, 2) require.NoError(t, err) chunkID, err := tob.ID() require.NoError(t, err) @@ -1356,9 +1003,6 @@ func TestSubscribeValidators(t *testing.T) { chainIDRes := backend.seqcli.GetChainID() t.Log(chainIDRes) - vals := backend.seqcli.CurrentValidators(context.Background()) - fmt.Println(vals) - pk, err := bls.PublicKeyFromSecretKey(sk) require.NoError(t, err) pkBytes := pk.Bytes() @@ -1422,9 +1066,6 @@ func TestSubscribeValidators(t *testing.T) { chainIDRes := backend.seqcli.GetChainID() t.Log(chainIDRes) - vals := backend.seqcli.CurrentValidators(context.Background()) - fmt.Println(vals) - // Start a test server router := backend.arcadia.getRouter() testServer := httptest.NewServer(router) @@ -1673,16 +1314,14 @@ func TestSubscribeValidators(t *testing.T) { func TestRecvPreconf(t *testing.T) { subscribePath := "/ws/arcadia/v1/validator/subscribe" // shared info - testSeqChainID := ids.GenerateTestID() - testSeqNetworkID := uint32(1337) parser := srpc.Parser{} originChainID := big.NewInt(45200) originBlockHeight := uint64(100) setupBackend := func(backend *testBackend, nodes []*hrpc.Validator) { backend.arcadia.srvStarted.Store(true) - backend.seqcli.EXPECT().GetChainID().Return(testSeqChainID) - backend.seqcli.EXPECT().GetNetworkID().Return(testSeqNetworkID) + backend.seqcli.EXPECT().GetChainID().Return(TestSeqChainID) + backend.seqcli.EXPECT().GetNetworkID().Return(TestSeqNetworkID) backend.seqcli.EXPECT().CurrentValidators(mock.Anything).Return(nodes) for _, node := range nodes { @@ -1711,9 +1350,6 @@ func TestRecvPreconf(t *testing.T) { chainIDRes := backend.seqcli.GetChainID() t.Log(chainIDRes) - vals := backend.seqcli.CurrentValidators(context.Background()) - fmt.Println(vals) - pk, err := bls.PublicKeyFromSecretKey(sk) require.NoError(t, err) pkBytes := pk.Bytes() @@ -1734,7 +1370,7 @@ func TestRecvPreconf(t *testing.T) { // check msg t.Log(randomBytes) - uwm, err := warp.NewUnsignedMessage(testSeqNetworkID, testSeqChainID, randomBytes) + uwm, err := warp.NewUnsignedMessage(TestSeqNetworkID, TestSeqChainID, randomBytes) require.NoError(t, err) t.Log(uwm) uwmBytes := uwm.Bytes() @@ -1780,7 +1416,7 @@ func TestRecvPreconf(t *testing.T) { } // the following chunks are initialized within the util methods - rob1, err := common.MakeRandomRoB(testSeqChainID, &parser, originChainID, originBlockHeight, 10) + rob1, err := common.MakeRandomRoB(TestSeqChainID, &parser, originChainID, originBlockHeight, 10) require.NoError(t, err) rob1.SetChunkID(ids.GenerateTestID()) rob1ID, err := rob1.ID() @@ -1791,9 +1427,9 @@ func TestRecvPreconf(t *testing.T) { }) // rob case - rob1Sig0, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[0], rob1ID[:]) + rob1Sig0, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[0], rob1ID[:]) require.NoError(t, err) - rob1Sig1, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[1], rob1ID[:]) + rob1Sig1, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[1], rob1ID[:]) require.NoError(t, err) val2 := common.ValidatorMessage{ @@ -1823,6 +1459,8 @@ func TestRecvPreconf(t *testing.T) { t.Run("recv enough preconfs for tob", func(t *testing.T) { // set up backend backend := newTestBackend(t) + backend.SetupMockDASubmitter() + weights := []uint64{30, 30, 3, 2, 1} nodes := make([]*hrpc.Validator, 0, 5) sks := make([]*bls.SecretKey, 0, 5) @@ -1847,7 +1485,7 @@ func TestRecvPreconf(t *testing.T) { } // the following chunks are initialized within the util methods - tob1, err := common.MakeRandomToB(testSeqChainID, &parser, 2, 2, 2) + tob1, err := common.MakeRandomToB(TestSeqChainID, &parser, 2, 2, 2) require.NoError(t, err) tob1.SetChunkID(ids.GenerateTestID()) tob1ID, err := tob1.ID() @@ -1857,11 +1495,11 @@ func TestRecvPreconf(t *testing.T) { Chunk: tob1, }) - tob1Sig1, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[0], tob1ID[:]) + tob1Sig1, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[0], tob1ID[:]) require.NoError(t, err) - tob1Sig2, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[1], tob1ID[:]) + tob1Sig2, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[1], tob1ID[:]) require.NoError(t, err) - tob1Sig3, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[2], tob1ID[:]) + tob1Sig3, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[2], tob1ID[:]) require.NoError(t, err) val1 := common.ValidatorMessage{ @@ -1939,7 +1577,7 @@ func TestRecvPreconf(t *testing.T) { } // the following chunks are initialized within the util methods - rob1, err := common.MakeRandomRoB(testSeqChainID, &parser, originChainID, originBlockHeight, 10) + rob1, err := common.MakeRandomRoB(TestSeqChainID, &parser, originChainID, originBlockHeight, 10) require.NoError(t, err) rob1.SetChunkID(ids.GenerateTestID()) rob1ID, err := rob1.ID() @@ -1950,11 +1588,11 @@ func TestRecvPreconf(t *testing.T) { }) // rob case - rob1Sig1, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[0], rob1ID[:]) + rob1Sig1, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[0], rob1ID[:]) require.NoError(t, err) - rob1Sig2, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[1], rob1ID[:]) + rob1Sig2, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[1], rob1ID[:]) require.NoError(t, err) - rob1Sig3, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, asks[2], rob1ID[:]) + rob1Sig3, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, asks[2], rob1ID[:]) require.NoError(t, err) val1 := common.ValidatorMessage{ @@ -2045,7 +1683,7 @@ func TestRecvPreconf(t *testing.T) { } // the following chunks are initialized within the util methods - rob1, err := common.MakeRandomRoB(testSeqChainID, &parser, originChainID, originBlockHeight, 10) + rob1, err := common.MakeRandomRoB(TestSeqChainID, &parser, originChainID, originBlockHeight, 10) require.NoError(t, err) rob1.SetChunkID(ids.GenerateTestID()) rob1ID, err := rob1.ID() @@ -2066,7 +1704,7 @@ func TestRecvPreconf(t *testing.T) { ask, err := abls.SecretKeyFromBytes(skBytes[:]) require.NoError(t, err) - sig, err := common.SignSEQMsg(testSeqChainID, testSeqNetworkID, ask, rob1ID[:]) + sig, err := common.SignSEQMsg(TestSeqChainID, TestSeqNetworkID, ask, rob1ID[:]) require.NoError(t, err) val := common.ValidatorMessage{ ChunkID: rob1ID, @@ -2169,21 +1807,20 @@ func TestOnAuctionWinnerForEpoch(t *testing.T) { // create test chain tx to return for seqclient mock originChainID := big.NewInt(0x40000) - testSeqChainID := ids.GenerateTestID() + TestSeqChainID := ids.GenerateTestID() bundleTxs := map[string]ethtypes.Transactions{ "0x40000": { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) backend.seqcli.EXPECT().GenerateTransaction(mock.Anything, mock.Anything).Return(seqTx, nil) // this should trigger onAuctionWinnerNotification backend.GetArcadia().processNewSlot(targetSlot) time.Sleep(200 * time.Millisecond) // wait a little so goroutine can process auction - err = backend.SetMockSeqClientRollupExpectations(targetEpoch+1, nil) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(targetEpoch+1, nil) // the following operation will create a SEQ block that contain the auction winner info const ArcadiaSecretKeyhex = "323b1d8f4eed5f0da9da93071b034f2dce9d2d22692c172f3cb252a64ddfafd01b057de320297c29ad0c1f589ea216869cf1938d88c9fbd70d6748323dbf2fa7" @@ -2191,7 +1828,7 @@ func TestOnAuctionWinnerForEpoch(t *testing.T) { require.NoError(t, err) sk := ed25519.PrivateKey(skBytes) backend.AdvanceToNextEpoch(t, &SimAuctionWinnerInfo{ - SeqChainID: testSeqChainID, + SeqChainID: TestSeqChainID, Bid: &bids[2], SecretKey: &sk, }, false) @@ -2225,20 +1862,39 @@ func TestBLSPublicKeyConversion(t *testing.T) { require.Equal(t, testBuilderPublicKey, newPk) } -func TestHandleSubmitNewBlockRequest(t *testing.T) { - epoch := uint64(0) +func TestTxsHashStability(t *testing.T) { + epoch := uint64(100) + bundleTxs := map[string]ethtypes.Transactions{ + TestOriginChainIDStr: { + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, 1, 21000, nil), + }, + TestRemoteChainIDStr: { + CreateEthTransfer(t, TestRemoteChainID, TestAccounts[1].PrivateKey, TestAccounts[0].Address, 100, TestAccounts[1].Nonce, 21000, nil), + }, + } - // Build hypersdk registry - var chainParser = &srpc.Parser{} - _, _ = chainParser.Registry() + tobOpts := buildToBOptsWithBundles(t, epoch, TestAccounts[0].Nonce+1, TestAccounts[1].Nonce, bundleTxs) + tobReq := CreateToBReq(t, tobOpts) - // Build test builder keys - testBuilderSecretKey, err := bls.GenerateRandomSecretKey() - require.NoError(t, err) - testBuilderPublicKey, err := bls.PublicKeyFromSecretKey(testBuilderSecretKey) - require.NoError(t, err) - testSeqChainID := ids.GenerateTestID() - builderPkBytes := testBuilderPublicKey.Bytes() + require.Greater(t, len(bundleTxs[TestOriginChainIDStr]), 0) + require.Greater(t, len(bundleTxs[TestRemoteChainIDStr]), 0) + + bundleTxOriginHash := bundleTxs[TestOriginChainIDStr][0].Hash() + bundleTxRemoteHash := bundleTxs[TestRemoteChainIDStr][0].Hash() + + tobReqTxs := tobReq.Chunk.ToB.GetTxs() + require.Greater(t, len(tobReqTxs[TestOriginChainIDStr]), 0) + require.Greater(t, len(tobReqTxs[TestRemoteChainIDStr]), 0) + + tobReqTxOriginHash := tobReqTxs[TestOriginChainIDStr][0].Hash() + tobReqTxRemoteHash := tobReqTxs[TestRemoteChainIDStr][0].Hash() + + require.Equal(t, bundleTxOriginHash, tobReqTxOriginHash) + require.Equal(t, bundleTxRemoteHash, tobReqTxRemoteHash) +} + +func TestHandleSubmitNewBlockRequest(t *testing.T) { + epoch := uint64(0) // Helper for processing block requests to the backend. Returns the status code of the request. processBlockRequest := func(backend *testBackend, blockReq *common.SubmitNewBlockRequest) int { @@ -2248,40 +1904,31 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { // test ethereum signing keys t.Log("account info") - for _, acct := range testAccounts { + for _, acct := range TestAccounts { t.Logf("acct(%s) info: nonce(%d), balance(%d)\n", acct.Address.Hex(), acct.Nonce, acct.Balance.Int64()) } t.Run("Warmup period blocks submit new block", func(t *testing.T) { warmupEpoch := uint64(1) - originChainID := big.NewInt(50000) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: warmupEpoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + SeqChainID: TestSeqChainID, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] - 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) // instantiate backend backend := newTestBackend(t) - err := backend.arcadia.datastore.SetAuctionWinner(warmupEpoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) @@ -2294,77 +1941,61 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { }) t.Run("test chunk db conversion functions", func(t *testing.T) { - // test ethereum signing keys - t.Log("account info") - for _, acct := range testAccounts { - t.Logf("acct(%s) info: nonce(%d), balance(%d)\n", acct.Address.Hex(), acct.Nonce, acct.Balance.Int64()) - } - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - } epoch := uint64(1) // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + SeqChainID: TestSeqChainID, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] - 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) chunkDB, err := common.ConvertRoBChunkToDBType(robReq.RoBChunk()) require.NoError(t, err) - chunk2, err := common.ConvertRoBChunkDBToRoBChunk(chunkDB, chainParser) + chunk2, err := common.ConvertRoBChunkDBToRoBChunk(chunkDB, TestChainParser) require.NoError(t, err) require.NotNil(t, chunk2) + // compare fields require.Equal(t, robReq.RoBChunk().ChainID, chunk2.ChainID, "ChainID mismatch") require.Equal(t, robReq.RoBChunk().BlockNumber, chunk2.BlockNumber, "GetBlockNumber mismatch") + // compare Txs require.Equal(t, len(robReq.RoBChunk().Txs), len(chunk2.Txs), "Number of transactions mismatch") for i := range robReq.RoBChunk().Txs { require.Equal(t, robReq.RoBChunk().Txs[i], chunk2.Txs[i], "Transaction mismatch at index %d", i) } + require.Equal(t, robReq.RoBChunk().SEQTxs(), chunk2.SEQTxs()) require.Equal(t, robReq.RoBChunk().GetTxs().Len(), chunk2.GetTxs().Len()) require.True(t, robReq.RoBChunk().RemovedBitSet().Equal(chunk2.RemovedBitSet())) - }) t.Run("just RoB without registered rollup is rejected", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + SeqChainID: TestSeqChainID, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] - 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) + chunkDB, err := common.ConvertRoBChunkToDBType(robReq.RoBChunk()) require.NoError(t, err) - chunk2, err := common.ConvertRoBChunkDBToRoBChunk(chunkDB, chainParser) + chunk2, err := common.ConvertRoBChunkDBToRoBChunk(chunkDB, TestChainParser) require.NoError(t, err) require.NotNil(t, chunk2) // compare fields @@ -2381,46 +2012,35 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { // instantiate backend backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) rrCode1 := processBlockRequest(backend, robReq) require.Equal(t, http.StatusBadRequest, rrCode1) }) - t.Run("Run valid base case, just RoB", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // constructing RoB + t.Run("test db chunk conversions", func(t *testing.T) { robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + SeqChainID: TestSeqChainID, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] - 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) // testing chunk db conversions for rob chunkDB, err := common.ConvertRoBChunkToDBType(robReq.RoBChunk()) require.NoError(t, err) - chunk2, err := common.ConvertRoBChunkDBToRoBChunk(chunkDB, chainParser) + chunk2, err := common.ConvertRoBChunkDBToRoBChunk(chunkDB, TestChainParser) require.NoError(t, err) require.NotNil(t, chunk2) // compare fields @@ -2434,184 +2054,59 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { require.Equal(t, robReq.RoBChunk().SEQTxs(), chunk2.SEQTxs()) require.Equal(t, robReq.RoBChunk().GetTxs().Len(), chunk2.GetTxs().Len()) require.True(t, robReq.RoBChunk().RemovedBitSet().Equal(chunk2.RemovedBitSet())) + }) - // instantiate backend + t.Run("Run valid base case, just RoB", func(t *testing.T) { backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) - redis := backend.redis - redis.SetSizeTracker(backend.arcadia.sizeTracker) // register test rollup - backend.SetupRegisteredRollups(epoch, originChainID) - - // set up mock expectations - // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - t.Log("========setup & send RoB============") - for domain, expectedTxs := range map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, - } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil) + backend.SetupRegisteredRollups(epoch, TestOriginChainID) - // simulation results, only the txs from this RoB will be simulated and the txs from ToB will be filtered out - callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) - for _, tx := range expectedTxs { - callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ - TxHash: tx.Hash().Hex(), - Error: "", - Revert: "", - }) - } - rawExpectedTxs, err := CollectRawTxs(expectedTxs) - require.NoError(t, err) - validationReq := common.BlockValidationRequest{ - Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - StateBlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - } - backend.simulator.EXPECT(). - SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). - Return(100, callBundleRes, nil) + // constructing RoB + robTxs := ethtypes.Transactions{ + // conflicting tx with prev ToB + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), + } + robOpts := &CreateTestBlockSubmissionOpts{ + Epoch: epoch, + SeqChainID: TestSeqChainID, + OriginChainID: *TestOriginChainIDInt, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] - 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + Txs: robTxs, } - rrCode1 := processBlockRequest(backend, robReq) - require.Equal(t, http.StatusOK, rrCode1) + rob1 := backend.submitRoBChunk(t, robOpts, TestChainParser) + require.NotNil(t, rob1) }) t.Run("Run valid base case, just ToB", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // constructing ToB - bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), - }, - } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) - oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) - require.NoError(t, err) - - bundle := common.CrossRollupBundle{ - BundleHash: "0xbundle1", - Txs: oSeqTx, - RevertingTxHashes: nil, - } - - tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Bundles: []*common.CrossRollupBundle{&bundle}, - }) - - backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) - redis := backend.redis - redis.SetSizeTracker(backend.arcadia.sizeTracker) + backend := newTestBackend(t) // register test rollup - backend.SetupRegisteredRollups(epoch, originChainID) - backend.SetupRegisteredRollups(epoch, remoteChainID) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) - // set up mock expectations - // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - // adding new tob will create a new layer, which will try submit the previous layer to SEQ - actsChan := make(chan []chain.Action) - backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + backend.SetupRegisteredRollups(epoch, TestOriginChainID) + backend.SetupRegisteredRollups(epoch, TestRemoteChainID) - t.Log("========setup & send TOB============") - tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) - require.NoError(t, err) - common.DisplayEthTxs(tobTxs) - allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) - require.NoError(t, err) - common.DisplayEthTxs(allTxs) - // expectations for the first tob - for domain, expectedTxs := range tobTxs { - matchTxs := func(txs ethtypes.Transactions) bool { + rob1, rob2 := backend.setupRoBsForToBTestWithBlockNumOffset(t, epoch, -1) - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + // Sends the ToB + tobOpts := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+1, TestAccounts[1].Nonce) + robChunks := common.CollectChunksFromRequests(rob1, rob2) + tobReq := backend.submitToBChunkWithOffset(t, tobOpts, TestChainParser, robChunks, -2) + require.NotNil(t, tobReq) - // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out - callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) - for _, tx := range expectedTxs { - callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ - TxHash: tx.Hash().Hex(), - Error: "", - Revert: "", - }) - } - rawExpectedTxs, err := CollectRawTxs(expectedTxs) - require.NoError(t, err) - validationReq := common.BlockValidationRequest{ - Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain]), - StateBlockNumber: uint64ToHexString(blockNumbers[domain]), - } - backend.simulator.EXPECT(). - SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). - Return(100, callBundleRes, nil) - } - - rrCode := processBlockRequest(backend, tobReq) - require.Equal(t, http.StatusOK, rrCode) cmTxs, cmHeights, err := backend.arcadia.chunkManager.Txs() require.NoError(t, err) - TxsTheSame(t, bundleTxs, cmTxs) - require.Equal(t, blockNumbers, cmHeights) + allChunksTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk, &rob1.Chunk, &rob2.Chunk}) + require.NoError(t, err) + + common.DisplayEthTxs(allChunksTxs) + common.DisplayEthTxs(cmTxs) + TxsTheSameUnordered(t, allChunksTxs, cmTxs) + require.Equal(t, TestBlockNumbers[TestOriginChainIDStr]-2, cmHeights[TestOriginChainIDStr]) + require.Equal(t, TestBlockNumbers[TestRemoteChainIDStr]-2, cmHeights[TestRemoteChainIDStr]) for _, bundle := range tobReq.Chunk.ToB.GetBundles() { status, err := backend.redis.GetBundleStatus(bundle.BundleHash) @@ -2621,25 +2116,16 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { }) t.Run("just ToB without registered rollup for each bundle is rejected", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - // constructing ToB bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + TestOriginChainIDStr: { + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 21000, nil), }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), + TestRemoteChainIDStr: { + CreateEthTransfer(t, TestRemoteChainID, TestAccounts[1].PrivateKey, TestAccounts[0].Address, 100, TestAccounts[1].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) require.NoError(t, err) @@ -2651,136 +2137,80 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + ToBBlockNumber: TestBlockNumbers, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Bundles: []*common.CrossRollupBundle{&bundle}, }) backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) + redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) // register test rollup // note only the remote chain id is omitted which should cause rejection - backend.SetupRegisteredRollups(epoch, originChainID) + backend.SetupRegisteredRollups(epoch, TestOriginChainID) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) rrCode := processBlockRequest(backend, tobReq) require.Equal(t, http.StatusBadRequest, rrCode) }) t.Run("incoming RoB conflicts with previous ToB, tx with the same nonce", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // constructing ToB - bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), - }, - } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) - oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) - require.NoError(t, err) + backend := newTestBackend(t) - bundle := common.CrossRollupBundle{ - BundleHash: "0xbundle1", - Txs: oSeqTx, - RevertingTxHashes: nil, - } + rob1, rob2 := backend.setupRoBsForToBTestWithBlockNumOffset(t, epoch, -1) - tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Bundles: []*common.CrossRollupBundle{&bundle}, - }) + tobOpts := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+1, TestAccounts[1].Nonce) + robChunks := common.CollectChunksFromRequests(rob1, rob2) + tobReq := backend.submitToBChunkWithOffset(t, tobOpts, TestChainParser, robChunks, -2) - // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce+1, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] + 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + SeqChainID: TestSeqChainID, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] + 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) - backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) - redis := backend.redis - redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) - - // register test rollup - backend.SetupRegisteredRollups(epoch, originChainID) - backend.SetupRegisteredRollups(epoch, remoteChainID) - - // set up mock expectations - // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - // adding new tob will create a new layer, which will try submit the previous layer to SEQ - actsChan := make(chan []chain.Action) - backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() - - t.Log("========setup & send TOB============") - tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) - require.NoError(t, err) - common.DisplayEthTxs(tobTxs) - allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk, &robReq.Chunk}) + // for the gas check calculation with robs+tob + allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk, &tobReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(allTxs) - // expectations for the first tob - for domain, expectedTxs := range tobTxs { - matchTxs := func(txs ethtypes.Transactions) bool { + // for the rob after tob + allTxs2, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk, &tobReq.Chunk, &robReq.Chunk}) + require.NoError(t, err) + common.DisplayEthTxs(allTxs2) - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") + // only rob domain valid for the below + delete(allTxs2, TestRemoteChainIDStr) - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + t.Log("========setup & send RoB after ToB============") + for domain, expectedTxs := range allTxs2 { + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) + nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + matchTxs := ExpectedMatchTxs(expectedTxs) + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain]-2)).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain]-2)).Return(balances, nil) + } + + for domain, expectedTxs := range allTxs { // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) for _, tx := range expectedTxs { @@ -2794,63 +2224,25 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { require.NoError(t, err) validationReq := common.BlockValidationRequest{ Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain]), - StateBlockNumber: uint64ToHexString(blockNumbers[domain]), + BlockNumber: uint64ToHexString(TestBlockNumbers[domain] - 2), + StateBlockNumber: uint64ToHexString(TestBlockNumbers[domain] - 2), } backend.simulator.EXPECT(). SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). Return(100, callBundleRes, nil) } - rrCode := processBlockRequest(backend, tobReq) - require.Equal(t, http.StatusOK, rrCode) - cmTxs, _, err := backend.arcadia.chunkManager.Txs() - require.NoError(t, err) - TxsTheSame(t, bundleTxs, cmTxs) - - // for the rob - t.Log("========setup & send RoB============") - for domain, expectedTxs := range allTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) - } - rrCode1 := processBlockRequest(backend, robReq) require.Equal(t, http.StatusOK, rrCode1) require.Equal(t, 2, len(backend.arcadia.chunkManager.Chunks())) validTxs, cmHeights, err := backend.arcadia.chunkManager.Txs() require.NoError(t, err) - t.Log("expected txs") - common.DisplayEthTxs(bundleTxs) - t.Log("validTxs from chunk manager") - common.DisplayEthTxs(validTxs) - TxsTheSame(t, bundleTxs, validTxs) - require.Equal(t, blockNumbers, cmHeights) + TxsTheSameUnordered(t, allTxs, validTxs) + require.Equal(t, len(cmHeights), 2) + require.Equal(t, cmHeights[TestOriginChainIDStr], TestBlockNumbers[TestOriginChainIDStr]-2) + require.Equal(t, cmHeights[TestRemoteChainIDStr], TestBlockNumbers[TestRemoteChainIDStr]-2) for _, bundle := range tobReq.Chunk.ToB.GetBundles() { status, err := backend.redis.GetBundleStatus(bundle.BundleHash) @@ -2859,102 +2251,41 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { } }) - t.Run("incoming ToB conflicts with previous RoBs, txs with incosecutive nonces", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // constructing RoB - robTxs := ethtypes.Transactions{ - // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), - } - robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Txs: robTxs, - }) - - // constructing ToB - bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), - }, - } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) - oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) - require.NoError(t, err) + t.Run("incoming ToB conflicts with previous RoBs, txs with nonconsecutive nonce", func(t *testing.T) { + nonconsecutiveNonceRemoteTx := TestAccounts[1].Nonce + 5 + tobOpts := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+1, nonconsecutiveNonceRemoteTx) + tobReq := CreateToBReq(t, tobOpts) - bundle := common.CrossRollupBundle{ - BundleHash: "0xbundle1", - Txs: oSeqTx, - RevertingTxHashes: nil, - } + backend := newTestBackend(t) - tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Bundles: []*common.CrossRollupBundle{&bundle}, - }) + // sends two robs to warmup the tob + rob1, rob2 := backend.setupRoBsForToBTest(t) - // instantiate backend - backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) + backend.simulator.EXPECT().GetBlockNumber([]string{TestOriginChainIDStr, TestRemoteChainIDStr}).Return(TestBlockNumbers, nil) // register test rollup - backend.SetupRegisteredRollups(epoch, originChainID) - backend.SetupRegisteredRollups(epoch, remoteChainID) + backend.SetupRegisteredRollups(epoch, TestOriginChainID) + backend.SetupRegisteredRollups(epoch, TestRemoteChainID) - // set up mock expectations - // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - fmt.Println("========setup & send RoB============") - for domain, expectedTxs := range map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, - } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } + t.Log("========setup & send TOB============") + tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) + require.NoError(t, err) - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") + tobTxs[TestOriginChainIDStr] = slices.Insert(tobTxs[TestOriginChainIDStr], 0, rob1.Chunk.RoB.GetTxs()[0]) + tobTxs[TestRemoteChainIDStr] = slices.Insert(tobTxs[TestRemoteChainIDStr], 0, rob2.Chunk.RoB.GetTxs()[0]) - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil) + allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk, &tobReq.Chunk}) + require.NoError(t, err) + t.Log("all txs") + common.DisplayEthTxs(allTxs) + + robTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk}) + require.NoError(t, err) + + for domain, expectedTxs := range tobTxs { // simulation results, only the txs from this RoB will be simulated and the txs from ToB will be filtered out callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) for _, tx := range expectedTxs { @@ -2964,72 +2295,38 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { Revert: "", }) } - rawExpectedTxs, err := CollectRawTxs(expectedTxs) + + robDomainTxs := robTxs[domain] + rawExpectedTxs, err := CollectRawTxs(robDomainTxs) require.NoError(t, err) validationReq := common.BlockValidationRequest{ Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - StateBlockNumber: uint64ToHexString(blockNumbers[domain] - 2), + BlockNumber: uint64ToHexString(TestBlockNumbers[domain]), + StateBlockNumber: uint64ToHexString(TestBlockNumbers[domain]), } backend.simulator.EXPECT(). SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). Return(100, callBundleRes, nil) - } - rrCode1 := processBlockRequest(backend, robReq) - require.Equal(t, http.StatusOK, rrCode1) - - t.Log("========setup & send TOB============") - allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&robReq.Chunk, &tobReq.Chunk}) - require.NoError(t, err) - t.Log("all txs") - common.DisplayEthTxs(allTxs) - for domain, expectedTxs := range allTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - if domain == originChainIDStr { - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil) - } else { - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) - } + + matchTxs := ExpectedMatchTxs(expectedTxs) + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(balances, nil) } + rrCode := processBlockRequest(backend, tobReq) - require.Equal(t, http.StatusOK, rrCode) + require.Equal(t, http.StatusNoContent, rrCode) - require.Equal(t, 2, len(backend.arcadia.chunkManager.Chunks())) + require.Equal(t, 1, len(backend.arcadia.chunkManager.Chunks())) validTxs, cmHeights, err := backend.arcadia.chunkManager.Txs() require.NoError(t, err) - TxsTheSame(t, map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, - }, validTxs) - expectedLowestHeights := maps.Clone(blockNumbers) - // delete the remote block number since ToB's conflicting with previous RoBs and bundle got removed - delete(expectedLowestHeights, remoteChainIDStr) - expectedLowestHeights[originChainIDStr] = blockNumbers[originChainIDStr] - 2 + TxsTheSameUnordered(t, robTxs, validTxs) + expectedLowestHeights := maps.Clone(TestBlockNumbers) require.Equal(t, expectedLowestHeights, cmHeights) for _, bundle := range tobReq.Chunk.ToB.GetBundles() { @@ -3040,109 +2337,56 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { }) t.Run("incoming RoB conflicts with previous ToB, disallowed simulation revert or error", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // constructing ToB - bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), - }, - } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) - oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) - require.NoError(t, err) - - bundle := common.CrossRollupBundle{ - BundleHash: "0xbundle1", - Txs: oSeqTx, - RevertingTxHashes: nil, - } - - tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Bundles: []*common.CrossRollupBundle{&bundle}, - }) + tobOpts := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+1, TestAccounts[1].Nonce) + tobReq := CreateToBReq(t, tobOpts) // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce+1, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce+2, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] + 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + SeqChainID: TestSeqChainID, + RoBChainID: TestOriginChainID, + RoBBlockNumber: TestBlockNumbers[TestOriginChainIDStr] + 1, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + // sends two robs to warmup the tob + rob1, rob2 := backend.setupRoBsForToBTest(t) + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) + backend.simulator.EXPECT().GetBlockNumber([]string{TestOriginChainIDStr, TestRemoteChainIDStr}).Return(TestBlockNumbers, nil) // register test rollup - backend.SetupRegisteredRollups(epoch, originChainID) - backend.SetupRegisteredRollups(epoch, remoteChainID) + backend.SetupRegisteredRollups(epoch, TestOriginChainID) + backend.SetupRegisteredRollups(epoch, TestRemoteChainID) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - // adding new tob will create a new layer, which will try submit the previous layer to SEQ - actsChan := make(chan []chain.Action) - backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + backend.seqcli.EXPECT().Parser().Return(TestChainParser) t.Log("========setup & send TOB============") - tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) + tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk, &tobReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(tobTxs) - allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk, &robReq.Chunk}) + allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk, &tobReq.Chunk, &robReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(allTxs) // expectations for the first tob for domain, expectedTxs := range tobTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + matchTxs := ExpectedMatchTxs(expectedTxs) + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(balances, nil) // simulation results callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) @@ -3157,53 +2401,31 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { require.NoError(t, err) validationReq := common.BlockValidationRequest{ Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain]), - StateBlockNumber: uint64ToHexString(blockNumbers[domain]), + BlockNumber: uint64ToHexString(TestBlockNumbers[domain]), + StateBlockNumber: uint64ToHexString(TestBlockNumbers[domain]), } backend.simulator.EXPECT(). SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). Return(100, callBundleRes, nil) } - rrCode := processBlockRequest(backend, tobReq) require.Equal(t, http.StatusOK, rrCode) - cmTxs, _, err := backend.arcadia.chunkManager.Txs() - require.NoError(t, err) - TxsTheSame(t, bundleTxs, cmTxs) t.Log("========setup & send RoB============") for domain, expectedTxs := range allTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + matchTxs := ExpectedMatchTxs(expectedTxs) + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(balances, nil) // simulation results, the tx from RoB will be reverted // var callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) var callBundleRes []flashbotsrpc.FlashbotsCallBundleResult // not RoB - if domain != originChainIDStr { + if domain != TestOriginChainIDStr { for _, tx := range expectedTxs { callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ TxHash: tx.Hash().Hex(), @@ -3232,8 +2454,8 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { require.NoError(t, err) validationReq := common.BlockValidationRequest{ Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain]), - StateBlockNumber: uint64ToHexString(blockNumbers[domain]), + BlockNumber: uint64ToHexString(TestBlockNumbers[domain]), + StateBlockNumber: uint64ToHexString(TestBlockNumbers[domain]), } backend.simulator.EXPECT(). SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). @@ -3244,15 +2466,10 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { require.Equal(t, http.StatusOK, rrCode1) require.Equal(t, 2, len(backend.arcadia.chunkManager.Chunks())) - validTxs, cmHeights, err := backend.arcadia.chunkManager.Txs() + _, cmHeights, err := backend.arcadia.chunkManager.Txs() require.NoError(t, err) - t.Log("expected txs") - common.DisplayEthTxs(bundleTxs) - t.Log("validTxs from chunk manager") - common.DisplayEthTxs(validTxs) - TxsTheSame(t, bundleTxs, validTxs) - require.Equal(t, blockNumbers, cmHeights) + require.Equal(t, TestBlockNumbers, cmHeights) for _, bundle := range tobReq.Chunk.ToB.GetBundles() { status, err := backend.redis.GetBundleStatus(bundle.BundleHash) require.NoError(t, err) @@ -3261,59 +2478,35 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { }) t.Run("below intrinsic gas", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // constructing ToB bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { + TestOriginChainIDStr: { // this tx below intrinsic gas - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 20999, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 20999, nil), }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), + TestRemoteChainIDStr: { + CreateEthTransfer(t, TestRemoteChainID, TestAccounts[1].PrivateKey, TestAccounts[0].Address, 100, TestAccounts[1].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) - oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) - require.NoError(t, err) + tobOpts := buildToBOptsWithBundles(t, epoch, TestAccounts[0].Nonce+1, TestAccounts[1].Nonce, bundleTxs) + tobReq := CreateToBReq(t, tobOpts) - bundle := common.CrossRollupBundle{ - BundleHash: "0xbundle1", - Txs: oSeqTx, - RevertingTxHashes: nil, - } + backend := newTestBackend(t) - tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Bundles: []*common.CrossRollupBundle{&bundle}, - }) + // sends two robs to warmup the tob + rob1, rob2 := backend.setupRoBsForToBTest(t) + + backend.setTestAuctionWinner(t, epoch) - backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) + backend.simulator.EXPECT().GetBlockNumber([]string{TestOriginChainIDStr, TestRemoteChainIDStr}).Return(TestBlockNumbers, nil) - backend.SetupRegisteredRollups(epoch, originChainID) - backend.SetupRegisteredRollups(epoch, remoteChainID) + backend.SetupRegisteredRollups(epoch, TestOriginChainID) + backend.SetupRegisteredRollups(epoch, TestRemoteChainID) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - // adding new tob will create a new layer, which will try submit the previous layer to SEQ - actsChan := make(chan []chain.Action) - backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + backend.seqcli.EXPECT().Parser().Return(TestChainParser) t.Log("========setup & send TOB============") tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) @@ -3322,41 +2515,24 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(allTxs) + tobTxs[TestOriginChainIDStr] = slices.Insert(tobTxs[TestOriginChainIDStr], 0, rob1.Chunk.RoB.GetTxs()[0]) + tobTxs[TestRemoteChainIDStr] = slices.Insert(tobTxs[TestRemoteChainIDStr], 0, rob2.Chunk.RoB.GetTxs()[0]) // expectations for the first tob for domain, expectedTxs := range tobTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + matchTxs := ExpectedMatchTxs(expectedTxs) + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(balances, nil) } + backend.setupSimBlockAndGetUsedExpectation(t, rob1.Chunk.RoB.GetTxs(), TestBlockNumbers[TestOriginChainIDStr], TestOriginChainIDStr) + backend.setupSimBlockAndGetUsedExpectation(t, rob2.Chunk.RoB.GetTxs(), TestBlockNumbers[TestRemoteChainIDStr], TestRemoteChainIDStr) rrCode := processBlockRequest(backend, tobReq) - require.Equal(t, http.StatusOK, rrCode) - cmTxs, _, err := backend.arcadia.chunkManager.Txs() - require.NoError(t, err) - TxsTheSame(t, nil, cmTxs) - require.Equal(t, 2, len(backend.arcadia.chunkManager.Chunks())) + require.Equal(t, http.StatusNoContent, rrCode) + require.Equal(t, 1, len(backend.arcadia.chunkManager.Chunks())) for _, bundle := range tobReq.Chunk.ToB.GetBundles() { status, err := backend.redis.GetBundleStatus(bundle.BundleHash) require.NoError(t, err) @@ -3365,26 +2541,18 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { }) t.Run("tx simulation allowed revert", func(t *testing.T) { - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - originTx := CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil) + originTx := CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, 1, 21000, nil) // constructing ToB bundleTxs := map[string]ethtypes.Transactions{ - originChainIDStr: { + TestOriginChainIDStr: { originTx, }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), + TestRemoteChainIDStr: { + CreateEthTransfer(t, TestRemoteChainID, TestAccounts[1].PrivateKey, TestAccounts[0].Address, 100, TestAccounts[1].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) require.NoError(t, err) @@ -3398,80 +2566,49 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + ToBBlockNumber: TestBlockNumbers, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Bundles: []*common.CrossRollupBundle{&bundle}, }) - // constructing RoB - robTxs := ethtypes.Transactions{ - // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), - } - robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch, - SeqChainID: testSeqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] + 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - Txs: robTxs, - }) - backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + + // sends two robs to warmup the tob + rob1, rob2 := backend.setupRoBsForToBTest(t) + + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) + backend.simulator.EXPECT().GetBlockNumber([]string{TestOriginChainIDStr, TestRemoteChainIDStr}).Return(TestBlockNumbers, nil) - backend.SetupRegisteredRollups(epoch, originChainID) - backend.SetupRegisteredRollups(epoch, remoteChainID) + backend.SetupRegisteredRollups(epoch, TestOriginChainID) + backend.SetupRegisteredRollups(epoch, TestRemoteChainID) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - // adding new tob will create a new layer, which will try submit the previous layer to SEQ - actsChan := make(chan []chain.Action) - backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + backend.seqcli.EXPECT().Parser().Return(TestChainParser) t.Log("========setup & send TOB============") tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(tobTxs) - allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk, &robReq.Chunk}) + allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk, &rob1.Chunk, &rob2.Chunk}) require.NoError(t, err) common.DisplayEthTxs(allTxs) + tobTxs[TestOriginChainIDStr] = slices.Insert(tobTxs[TestOriginChainIDStr], 0, rob1.Chunk.RoB.GetTxs()[0]) + tobTxs[TestRemoteChainIDStr] = slices.Insert(tobTxs[TestRemoteChainIDStr], 0, rob2.Chunk.RoB.GetTxs()[0]) // expectations for the first tob for domain, expectedTxs := range tobTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - fmt.Printf("expected balance for domain(%s): %+v\n", domain, balances) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + + matchTxs := ExpectedMatchTxs(expectedTxs) + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(TestBlockNumbers[domain])).Return(balances, nil) // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) @@ -3494,8 +2631,8 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { require.NoError(t, err) validationReq := common.BlockValidationRequest{ Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain]), - StateBlockNumber: uint64ToHexString(blockNumbers[domain]), + BlockNumber: uint64ToHexString(TestBlockNumbers[domain]), + StateBlockNumber: uint64ToHexString(TestBlockNumbers[domain]), } backend.simulator.EXPECT(). SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). @@ -3507,7 +2644,9 @@ func TestHandleSubmitNewBlockRequest(t *testing.T) { cmTxs, _, err := backend.arcadia.chunkManager.Txs() require.NoError(t, err) - TxsTheSame(t, bundleTxs, cmTxs) + + TxsTheSameUnordered(t, allTxs, cmTxs) + for _, bundle := range tobReq.Chunk.ToB.GetBundles() { status, err := backend.redis.GetBundleStatus(bundle.BundleHash) require.NoError(t, err) @@ -3527,21 +2666,9 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { for _, tc := range tests { epoch := uint64(0) - // Build hypersdk registry - var chainParser = &srpc.Parser{} - _, _ = chainParser.Registry() - - // Build test builder keys - testBuilderSecretKey, err := bls.GenerateRandomSecretKey() - require.NoError(t, err) - testBuilderPublicKey, err := bls.PublicKeyFromSecretKey(testBuilderSecretKey) - require.NoError(t, err) - testSeqChainID := ids.GenerateTestID() - builderPkBytes := testBuilderPublicKey.Bytes() - // test ethereum signing keys t.Log("account info") - for _, acct := range testAccounts { + for _, acct := range TestAccounts { t.Logf("acct(%s) info: nonce(%d), balance(%d)\n", acct.Address.Hex(), acct.Nonce, acct.Balance.Int64()) } @@ -3557,28 +2684,27 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, + SeqChainID: TestSeqChainID, RoBChainID: originChainID, RoBBlockNumber: blockNumbers[originChainIDStr], - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) // instantiate backend backend := newTestBackendWithFlags(t, tc.disableRedis) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) targetEpoch := epoch chainID := originChainID - mockPubKeyBytes := mockPublicKey.Bytes() + mockPubKeyBytes := TestMockPublicKey.Bytes() namespace := binary.LittleEndian.AppendUint64(nil, chainID.Uint64()) rollup := actions.RollupInfo{ Namespace: namespace, @@ -3586,8 +2712,7 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { SequencerPublicKey: mockPubKeyBytes[:], } rollups := []*actions.RollupInfo{&rollup} - err = backend.SetMockSeqClientRollupExpectations(targetEpoch, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(targetEpoch, rollups) t.Run(tc.name()+"rollup getPayload first", func(t *testing.T) { payload := &common.GetPayloadRequest{ @@ -3602,7 +2727,7 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { payloadBytes, err := json.Marshal(payload) require.NoError(t, err) payloadHash, _ := common.Sha256HashPayload(payloadBytes) - sig := bls.Sign(mockSecretKey, payloadHash[:]) + sig := bls.Sign(TestMockSecretKey, payloadHash[:]) sigBytes := sig.Bytes() sigStr := hexutil.Encode(sigBytes[:]) rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) @@ -3612,7 +2737,7 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { t.Run(tc.name()+"submit req for fetched height that would fail", func(t *testing.T) { // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) rr := backend.request(http.MethodPost, pathSubmitNewBlockRequest, robReq) require.Equal(t, http.StatusBadRequest, rr.Code) msg, err := io.ReadAll(rr.Body) @@ -3623,40 +2748,29 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { t.Run(tc.name()+"submit block req for the next block should work", func(t *testing.T) { robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq2 := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, - SeqChainID: testSeqChainID, + SeqChainID: TestSeqChainID, RoBChainID: originChainID, RoBBlockNumber: blockNumbers[originChainIDStr] + 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) for domain, expectedTxs := range map[string]ethtypes.Transactions{ originChainIDStr: robTxs, } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) + matchTxs := ExpectedMatchTxs(expectedTxs) backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil).Maybe() @@ -3690,18 +2804,6 @@ func TestLastFetchedRollupNumberFilterCheck(t *testing.T) { func TestWarmupPeriodAdvancement(t *testing.T) { warmupEpoch := uint64(1) - // Build hypersdk registry - var chainParser = &srpc.Parser{} - _, _ = chainParser.Registry() - - // Build test builder keys - testBuilderSecretKey, err := bls.GenerateRandomSecretKey() - require.NoError(t, err) - testBuilderPublicKey, err := bls.PublicKeyFromSecretKey(testBuilderSecretKey) - require.NoError(t, err) - testSeqChainID := ids.GenerateTestID() - builderPkBytes := testBuilderPublicKey.Bytes() - // Helper for processing block requests to the backend. Returns the status code of the request. processBlockRequest := func(backend *testBackend, blockReq *common.SubmitNewBlockRequest) int { rr := backend.request(http.MethodPost, pathSubmitNewBlockRequest, blockReq) @@ -3710,7 +2812,7 @@ func TestWarmupPeriodAdvancement(t *testing.T) { // test ethereum signing keys t.Log("account info") - for _, acct := range testAccounts { + for _, acct := range TestAccounts { t.Logf("acct(%s) info: nonce(%d), balance(%d)\n", acct.Address.Hex(), acct.Nonce, acct.Balance.Int64()) } @@ -3726,49 +2828,37 @@ func TestWarmupPeriodAdvancement(t *testing.T) { // constructing RoB robTxs := ethtypes.Transactions{ // conflicting tx with prev ToB - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: warmupEpoch + 1, - SeqChainID: testSeqChainID, + SeqChainID: TestSeqChainID, RoBChainID: originChainID, RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Txs: robTxs, }) // instantiate backend backend := newTestBackend(t) - err = backend.arcadia.datastore.SetAuctionWinner(warmupEpoch+1, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, warmupEpoch+1) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) for domain, expectedTxs := range map[string]ethtypes.Transactions{ originChainIDStr: robTxs, } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) + matchTxs := ExpectedMatchTxs(expectedTxs) backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil) backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil) @@ -3809,7 +2899,7 @@ func TestWarmupPeriodAdvancement(t *testing.T) { // rollup setup targetEpoch := warmupEpoch + 2 chainID := *big.NewInt(45200) - mockPubKeyBytes := mockPublicKey.Bytes() + mockPubKeyBytes := TestMockPublicKey.Bytes() namespace := binary.LittleEndian.AppendUint64(nil, chainID.Uint64()) rollup := actions.RollupInfo{ Namespace: namespace, @@ -3817,8 +2907,7 @@ func TestWarmupPeriodAdvancement(t *testing.T) { SequencerPublicKey: mockPubKeyBytes[:], } rollups := []*actions.RollupInfo{&rollup} - err = backend.SetMockSeqClientRollupExpectations(targetEpoch, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(targetEpoch, rollups) // advances to next epoch, note we have to skip slot consistency check because we advance slot already backend.AdvanceToNextEpoch(t, nil, false) @@ -3831,6 +2920,7 @@ func TestWarmupPeriodAdvancement(t *testing.T) { // This tests assumes that the chunks have been preconf'd and the payload(wherever it has to go) // is in the right place in the database that we can retrieve. +// TODO: Verify this one func TestGetPayload(t *testing.T) { // Setup backend with headSlot and genesisTime epoch := uint64(0) @@ -3841,22 +2931,11 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) seqPKBytes := seqPK.Bytes() - // Build hypersdk registry - var cli = srpc.Parser{} - _, _ = cli.Registry() - chainParser := &cli - chainID1 := *big.NewInt(45200) chainID2 := *big.NewInt(45201) chainID3 := *big.NewInt(45202) // Build test builder keys - testBuilderSecretKey, err := bls.GenerateRandomSecretKey() - require.NoError(t, err) - testBuilderPublicKey, err := bls.PublicKeyFromSecretKey(testBuilderSecretKey) - require.NoError(t, err) - testSeqChainID := ids.GenerateTestID() - builderPkBytes := testBuilderPublicKey.Bytes() testCurrToBNonce := uint64(3) testPrevToBNonce := uint64(0) @@ -3864,6 +2943,7 @@ func TestGetPayload(t *testing.T) { processBlockRequest := func(backend *testBackend, payload *common.GetPayloadRequest, sigStr string) (int, []byte) { // new HTTP req rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) + require.Equal(t, http.StatusOK, rr.Code) if rr.Body != nil { return rr.Code, rr.Body.Bytes() @@ -3875,8 +2955,7 @@ func TestGetPayload(t *testing.T) { setupBackend := func() *testBackend { backend := newTestBackend(t) backend.arcadia.sizeTracker.SetLowestSlot(slot) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) //register rollup rollup1 := &actions.RollupInfo{ Namespace: common.ChainIDToNamespace(&chainID1), @@ -3915,14 +2994,14 @@ func TestGetPayload(t *testing.T) { testChunkID := ids.GenerateTestID() robBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkID, ChainIDs: []*big.Int{&chainID1}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundle, } - robChunkReq := CreateTestChunkSubmission(t, robBlockOpts, chainParser) + robChunkReq := CreateTestChunkSubmission(t, robBlockOpts, TestChainParser) robChunk := robChunkReq.Chunk.RoB err = backend.arcadia.redis.SetRoBChunk(robChunk) require.NoError(t, err) @@ -3934,7 +3013,7 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) manager := backend.arcadia.chunkManager manager.SetHighestPreconfedToB(testCurrToBNonce + 1) - require.NoError(t, robChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, robChunkReq.Chunk.Initialize(TestChainParser)) ethTxs, err := robChunkReq.Chunk.Txs() require.NoError(t, err) ethOtxs := make(map[string][]hexutil.Bytes) @@ -3977,8 +3056,8 @@ func TestGetPayload(t *testing.T) { sigBytes := sig.Bytes() sigStr := hexutil.Encode(sigBytes[:]) - rrCode, _ := processBlockRequest(backend, payload, sigStr) - require.Equal(t, http.StatusServiceUnavailable, rrCode) + rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) + require.Equal(t, http.StatusServiceUnavailable, rr.Code) }) t.Run("case 1: 1 rollup, only rob", func(t *testing.T) { @@ -3988,15 +3067,15 @@ func TestGetPayload(t *testing.T) { testChunkID := ids.GenerateTestID() robBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkID, ChainIDs: []*big.Int{&chainID1}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundle, } - robChunkReq := CreateTestChunkSubmission(t, robBlockOpts, chainParser) + robChunkReq := CreateTestChunkSubmission(t, robBlockOpts, TestChainParser) robChunk := robChunkReq.Chunk.RoB chainIDBigInt, err := common.StrToChainID(robChunk.ChainID) @@ -4013,7 +3092,7 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) manager := backend.arcadia.chunkManager manager.SetHighestPreconfedToB(testCurrToBNonce + 1) - require.NoError(t, robChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, robChunkReq.Chunk.Initialize(TestChainParser)) ethTxs, err := robChunkReq.Chunk.Txs() require.NoError(t, err) @@ -4077,7 +3156,7 @@ func TestGetPayload(t *testing.T) { t.Run("case 2: 2 rollups, rob for each rollup.", func(t *testing.T) { backend := setupBackend() - backend.seqcli.EXPECT().Parser().Return(chainParser).Maybe() + backend.seqcli.EXPECT().Parser().Return(TestChainParser).Maybe() numOfTxsPerChunkOrBundle1 := 8 numOfTxsPerChunkOrBundle2 := 9 testChunkID1 := ids.GenerateTestID() @@ -4085,25 +3164,25 @@ func TestGetPayload(t *testing.T) { manager := backend.arcadia.chunkManager r1robBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkID1, ChainIDs: []*big.Int{&chainID1}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundle1, } r2robBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkID2, ChainIDs: []*big.Int{&chainID2}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundle2, } // setup r1 rob chunk. - r1robChunkReq := CreateTestChunkSubmission(t, r1robBlockOpts, chainParser) + r1robChunkReq := CreateTestChunkSubmission(t, r1robBlockOpts, TestChainParser) r1robChunk := r1robChunkReq.Chunk.RoB err = backend.arcadia.redis.SetRoBChunk(r1robChunk) require.NoError(t, err) @@ -4113,9 +3192,9 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) err = backend.arcadia.db.SetToBNonceOfRoB(r1robChunk.ChainID, r1robChunk.BlockNumber, testCurrToBNonce) require.NoError(t, err) - require.NoError(t, r1robChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, r1robChunkReq.Chunk.Initialize(TestChainParser)) // setup r2 rob chunk. - r2robChunkReq := CreateTestChunkSubmission(t, r2robBlockOpts, chainParser) + r2robChunkReq := CreateTestChunkSubmission(t, r2robBlockOpts, TestChainParser) r2robChunk := r2robChunkReq.Chunk.RoB err = backend.arcadia.redis.SetRoBChunk(r2robChunk) require.NoError(t, err) @@ -4125,7 +3204,7 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) err = backend.arcadia.db.SetToBNonceOfRoB(r2robChunk.ChainID, r2robChunk.BlockNumber, testCurrToBNonce) require.NoError(t, err) - require.NoError(t, r2robChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, r2robChunkReq.Chunk.Initialize(TestChainParser)) for nonce := testPrevToBNonce; nonce <= testCurrToBNonce; nonce++ { err := backend.redis.SetPayloadTxsToB(nonce, r1robChunk.ChainID, &common.PayloadTxs{ @@ -4227,10 +3306,8 @@ func TestGetPayload(t *testing.T) { t.Run("test one ToB and two RoBs, where the RoB for Chain2 is not submitted, but we still can deliver the payload", func(t *testing.T) { backend := setupBackend() - backend.seqcli.EXPECT().Parser().Return(chainParser).Maybe() - // in case AddChunk trigger submission - actsChan := make(chan []chain.Action) - backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + backend.seqcli.EXPECT().Parser().Return(TestChainParser).Maybe() + backend.SetupMockDASubmitter() numOfTxsPerChunkOrBundleToB := 8 numOfTxsPerChunkOrBundleRoB := 9 @@ -4241,39 +3318,40 @@ func TestGetPayload(t *testing.T) { manager := backend.arcadia.chunkManager tobBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkIDToB, ChainIDs: []*big.Int{&chainID1, &chainID2}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundleToB, } // setup tob chunk. - tobChunkReq := CreateTestChunkSubmission(t, tobBlockOpts, chainParser) + tobChunkReq := CreateTestChunkSubmission(t, tobBlockOpts, TestChainParser) tobChunk := tobChunkReq.Chunk.ToB err = backend.arcadia.redis.SetToBChunk(testCurrToBNonce, tobChunk) require.NoError(t, err) - require.NoError(t, tobChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, tobChunkReq.Chunk.Initialize(TestChainParser)) // setup r1 rob chunk. r1robBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkIDRoB, ChainIDs: []*big.Int{&chainID1}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundleRoB, RoBBlockNumber: tobChunk.GetBlockNumber()[chainID1str], } - r1robChunkReq := CreateTestChunkSubmission(t, r1robBlockOpts, chainParser) + r1robChunkReq := CreateTestChunkSubmission(t, r1robBlockOpts, TestChainParser) r1robChunk := r1robChunkReq.Chunk.RoB err = backend.arcadia.redis.SetRoBChunk(r1robChunk) require.NoError(t, err) err = backend.arcadia.db.SetRoBChunk(r1robChunk) require.NoError(t, err) + // set the previous rob ToBNonce and current ToBNonce err = backend.arcadia.redis.SetToBNonceOfRoB(r1robChunk.ChainID, r1robChunk.BlockNumber-1, testPrevToBNonce) require.NoError(t, err) @@ -4283,7 +3361,7 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) err = backend.arcadia.db.SetToBNonceOfRoB(r1robChunk.ChainID, r1robChunk.BlockNumber, testCurrToBNonce) require.NoError(t, err) - require.NoError(t, r1robChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, r1robChunkReq.Chunk.Initialize(TestChainParser)) // make sure the head tobnonce equals to testCurrToBNonce manager.SetHighestPreconfedToB(testCurrToBNonce) @@ -4374,6 +3452,7 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) Txs := resp.Transactions require.Equal(t, numOfTxsPerChunkOrBundleToB+numOfTxsPerChunkOrBundleRoB, len(Txs)) + // rob2 were not preconfirmed, but we still can deliver r2Payload := &common.GetPayloadRequest{ ChainID: chainID2str, @@ -4407,9 +3486,9 @@ func TestGetPayload(t *testing.T) { manager := backend.arcadia.chunkManager tob1Opts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkIDToB1, ChainIDs: []*big.Int{&chainID1, &chainID2}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundleToB, @@ -4417,40 +3496,40 @@ func TestGetPayload(t *testing.T) { } // setup tob chunk. - tob1ChunkReq := CreateTestChunkSubmission(t, tob1Opts, chainParser) + tob1ChunkReq := CreateTestChunkSubmission(t, tob1Opts, TestChainParser) tob1Chunk := tob1ChunkReq.Chunk.ToB err = backend.arcadia.redis.SetToBChunk(tob1ChunkReq.Chunk.ToBNonce, tob1Chunk) require.NoError(t, err) - require.NoError(t, tob1ChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, tob1ChunkReq.Chunk.Initialize(TestChainParser)) tob2Opts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkIDToB2, ChainIDs: []*big.Int{&chainID1, &chainID2}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundleToB, ToBBlockNumber: tob1Chunk.GetBlockNumber(), } - tob2ChunkReq := CreateTestChunkSubmission(t, tob2Opts, chainParser) + tob2ChunkReq := CreateTestChunkSubmission(t, tob2Opts, TestChainParser) tob2Chunk := tob2ChunkReq.Chunk.ToB err = backend.arcadia.redis.SetToBChunk(tob2ChunkReq.Chunk.ToBNonce, tob2Chunk) require.NoError(t, err) - require.NoError(t, tob2ChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, tob2ChunkReq.Chunk.Initialize(TestChainParser)) // setup r1 rob chunk. r1robBlockOpts := &CreateTestBlockSubmission2Opts{ Epoch: epoch, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, - SeqChainID: testSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + SeqChainID: TestSeqChainID, ChunkID: testChunkIDRoB, ChainIDs: []*big.Int{&chainID1}, NumOfTxsPerChunkOrBundle: numOfTxsPerChunkOrBundleRoB, RoBBlockNumber: tob1Chunk.GetBlockNumber()[chainID1str], } - r1robChunkReq := CreateTestChunkSubmission(t, r1robBlockOpts, chainParser) + r1robChunkReq := CreateTestChunkSubmission(t, r1robBlockOpts, TestChainParser) r1robChunk := r1robChunkReq.Chunk.RoB err = backend.arcadia.redis.SetRoBChunk(r1robChunk) require.NoError(t, err) @@ -4465,7 +3544,7 @@ func TestGetPayload(t *testing.T) { require.NoError(t, err) err = backend.arcadia.db.SetToBNonceOfRoB(r1robChunk.ChainID, r1robChunk.BlockNumber, testCurrToBNonce) require.NoError(t, err) - require.NoError(t, r1robChunkReq.Chunk.Initialize(chainParser)) + require.NoError(t, r1robChunkReq.Chunk.Initialize(TestChainParser)) manager.SetHighestPreconfedToB(testCurrToBNonce) @@ -4621,44 +3700,38 @@ func TestOverallBasicFlow(t *testing.T) { auctionBidPath := "/arcadia/v1/builder/auction_bid" getBestBidPath := "/arcadia/v1/builder/best_auction_bid/" - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - originNamespace := common.ChainIDToNamespace(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - epoch := arcadia.headEpoch.Load() require.Equal(t, epoch, uint64(0)) - var cli = srpc.Parser{} - _, _ = cli.Registry() - chainParser := &cli - - builderSk, builderPk, err := bls.GenerateNewKeypair() - require.NoError(t, err) - builderPkBytes := builderPk.Bytes() + builderSk := TestBuilderSecretKey + builderPk := TestBuilderPublicKey + builderPkBytes := TestBuilderPkBytes seqChainID := ids.GenerateTestID() blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, + TestOriginChainIDStr: 100, + TestRemoteChainIDStr: 50, } // test ethereum signing keys robTxs := ethtypes.Transactions{ - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } - // constructing RoB, we register at currentEpoch + 2 - robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ + robOpts := &CreateTestBlockSubmissionOpts{ Epoch: epoch + 2, - SeqChainID: seqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, + OriginChainID: *TestRemoteChainID, + RemoteChainID: *TestOriginChainID, + SeqChainID: TestSeqChainID, BuilderPubkey: *builderPk, BuilderSecretkey: *builderSk, + RoBBlockNumber: blockNumbers[TestRemoteChainIDStr] - 1, + RoBChainID: TestRemoteChainID, + ChunkID: TestChunkID, Txs: robTxs, - }) + IsToB: false, + } + robReq := CreateRoBReq(t, robOpts) // validators setup validatorSk1, validatorPk1, err := bls.GenerateNewKeypair() @@ -4710,16 +3783,21 @@ func TestOverallBasicFlow(t *testing.T) { validators := []*hrpc.Validator{&validatorNode1, &validatorNode2, &validatorNode3, &validatorNode4, &validatorNode5} // rollup setup targetEpoch := epoch + 2 - chainID := *big.NewInt(45200) - mockPubKeyBytes := mockPublicKey.Bytes() - namespace := binary.LittleEndian.AppendUint64(nil, chainID.Uint64()) + //chainID := *big.NewInt(45200) + mockPubKeyBytes := TestMockPublicKey.Bytes() + //namespace := binary.LittleEndian.AppendUint64(nil, chainID.Uint64()) + // TODO: Check logic changed in this commit. Enrolled a remote chain id rollup since submitRoBChunk registers a origin chain id rollup. rollup := actions.RollupInfo{ - Namespace: namespace, + Namespace: TestRemoteNamespace, StartEpoch: targetEpoch, SequencerPublicKey: mockPubKeyBytes[:], } rollups := []*actions.RollupInfo{&rollup} + backend.SetMockSeqClientRollupExpectations(targetEpoch, rollups) + + require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, TestRemoteNamespace)) + // set up mock seq client networkID := uint32(1337) backend.seqcli.EXPECT().CurrentValidators(mock.Anything).Return(validators).Maybe() @@ -4807,13 +3885,12 @@ func TestOverallBasicFlow(t *testing.T) { // registers rollups epochs for n+1. seq client will then check to see if rollup is allowed to fetch payload later using this info. // Advance to epoch 1 // Note the expected registered rollups should be for epoch 2 - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, nil, true) require.Equal(t, epoch, uint64(1)) - require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, originNamespace)) + require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, TestRemoteNamespace)) // Epoch lowest tob nonce should have been set lowestToBNonce, err := backend.redis.GetEpochLowestToBNonce(epoch) @@ -4889,13 +3966,12 @@ func TestOverallBasicFlow(t *testing.T) { // This should be triggered when we advance the slots and auction notification happens // create test chain tx to return for seqclient mock originChainID := big.NewInt(0x40000) - testSeqChainID := ids.GenerateTestID() bundleTxs := map[string]ethtypes.Transactions{ "0x40000": { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) backend.seqcli.EXPECT().GenerateTransaction(mock.Anything, mock.Anything).Return(seqTx, nil) // Advance to epoch 2 // This will cause a seq block with the above auction winner to be produced @@ -4906,8 +3982,7 @@ func TestOverallBasicFlow(t *testing.T) { SecretKey: &sk, } - err = backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, seqAuctionWinnerInfo, true) require.Equal(t, epoch, uint64(2)) @@ -4933,64 +4008,15 @@ func TestOverallBasicFlow(t *testing.T) { }) t.Run("basic flow - builder submit new block, just RoB for epoch 2"+tc.name(), func(t *testing.T) { - // Helper for processing block requests to the backend. Returns the status code of the request. - processBlockRequest := func(backend *testBackend, blockReq *common.SubmitNewBlockRequest) int { - rr := backend.request(http.MethodPost, pathSubmitNewBlockRequest, blockReq) - return rr.Code - } - redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - - for domain, expectedTxs := range map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, - } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil) - - // simulation results, only the txs from this RoB will be simulated and the txs from ToB will be filtered out - callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) - for _, tx := range expectedTxs { - callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ - TxHash: tx.Hash().Hex(), - Error: "", - Revert: "", - }) - } - rawExpectedTxs, err := CollectRawTxs(expectedTxs) - require.NoError(t, err) - validationReq := common.BlockValidationRequest{ - Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - StateBlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - } - backend.simulator.EXPECT(). - SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). - Return(100, callBundleRes, nil) - } + backend.seqcli.EXPECT().Parser().Return(TestChainParser) - rrCode1 := processBlockRequest(backend, robReq) - require.Equal(t, http.StatusOK, rrCode1) + robReq.Epoch = backend.arcadia.headEpoch.Load() + robReq = backend.submitRoBChunk(t, robOpts, TestChainParser) }) t.Run("basic flow - receive preconfs for pending blocks and construct payloads"+tc.name(), func(t *testing.T) { @@ -5065,8 +4091,7 @@ func TestOverallBasicFlow(t *testing.T) { }) t.Run("basic flow - rollup fetches payload"+tc.name(), func(t *testing.T) { - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, nil, true) require.Equal(t, epoch, uint64(3)) @@ -5077,7 +4102,6 @@ func TestOverallBasicFlow(t *testing.T) { rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) if rr.Body != nil { - fmt.Printf("error: %s\n", rr.Body.String()) return rr.Code, rr.Body.Bytes() } else { return rr.Code, nil @@ -5159,7 +4183,7 @@ func TestOverallBasicFlow(t *testing.T) { payloadBytes, err := json.Marshal(payload) require.NoError(t, err) payloadHash, _ := common.Sha256HashPayload(payloadBytes) - sig := bls.Sign(mockSecretKey, payloadHash[:]) + sig := bls.Sign(TestMockSecretKey, payloadHash[:]) sigBytes := sig.Bytes() sigStr := hex.EncodeToString(sigBytes[:]) rrCode, rrBytes := processBlockRequest(backend, payload, "0x"+sigStr) @@ -5187,29 +4211,22 @@ func TestOverallFlow(t *testing.T) { } for _, tc := range tests { + t.Log("*** Running Overall Flow param disable_redis [" + strconv.FormatBool(tc.disableRedis) + "] ***") + backend := newTestBackendWithFlags(t, tc.disableRedis) arcadia := backend.GetArcadia() auctionBidPath := "/arcadia/v1/builder/auction_bid" getBestBidPath := "/arcadia/v1/builder/best_auction_bid/" - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - originNamespace := common.ChainIDToNamespace(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - epoch := arcadia.headEpoch.Load() - var cli = srpc.Parser{} - _, _ = cli.Registry() - chainParser := &cli - // builder 1 builderSK, err := bls.GenerateRandomSecretKey() require.NoError(t, err) builderPK, err := bls.PublicKeyFromSecretKey(builderSK) require.NoError(t, err) builderPkBytes := builderPK.Bytes() + // builder 2 builderSK2, err := bls.GenerateRandomSecretKey() require.NoError(t, err) @@ -5217,104 +4234,16 @@ func TestOverallFlow(t *testing.T) { require.NoError(t, err) builderPkBytes2 := builderPK2.Bytes() - seqChainID := ids.GenerateTestID() - blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, - } - - // test ethereum signing keys - robTxs := ethtypes.Transactions{ - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), - } - - // constructing RoB - robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch + 2, - SeqChainID: seqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, - BuilderPubkey: *builderPK2, - BuilderSecretkey: *builderSK2, - Txs: robTxs, - }) - - // constructing tob1 - bundle1Txs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce+1, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), - }, - } - seqTx1 := CreateHypersdkBundleTx(t, seqChainID, bundle1Txs) - oSeqTx1, err := chain.MarshalTxs([]*chain.Transaction{seqTx1}) - require.NoError(t, err) - - bundle := common.CrossRollupBundle{ - BundleHash: "0xbundle1", - Txs: oSeqTx1, - RevertingTxHashes: nil, - } - tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch + 2, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *builderPK2, - BuilderSecretkey: *builderSK2, - Bundles: []*common.CrossRollupBundle{&bundle}, - }) - - // constructing tob2 - bundle2Txs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce+2, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce+1, 21000, nil), - }, - } - seqTx2 := CreateHypersdkBundleTx(t, seqChainID, bundle2Txs) - oSeqTx2, err := chain.MarshalTxs([]*chain.Transaction{seqTx2}) - require.NoError(t, err) + // Construct 3 TOBs + 2 RoB reqs + var robReq, robReq2 *common.SubmitNewBlockRequest + tobOpts := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+1, TestAccounts[1].Nonce) + tobReq := CreateToBReq(t, tobOpts) - bundle2 := common.CrossRollupBundle{ - BundleHash: "0xbundle2", - Txs: oSeqTx2, - RevertingTxHashes: nil, - } - tobReq2 := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch + 2, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *builderPK2, - BuilderSecretkey: *builderSK2, - Bundles: []*common.CrossRollupBundle{&bundle2}, - }) - // constructing tob3 - bundle3Txs := map[string]ethtypes.Transactions{ - originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce+3, 21000, nil), - }, - remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce+2, 21000, nil), - }, - } - seqTx3 := CreateHypersdkBundleTx(t, seqChainID, bundle3Txs) - oSeqTx3, err := chain.MarshalTxs([]*chain.Transaction{seqTx3}) - require.NoError(t, err) + tobOpts2 := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+2, TestAccounts[1].Nonce+1) + tobReq2 := CreateToBReq(t, tobOpts2) - bundle3 := common.CrossRollupBundle{ - BundleHash: "0xbundle3", - Txs: oSeqTx3, - RevertingTxHashes: nil, - } - tobReq3 := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ - Epoch: epoch + 2, - ToBBlockNumber: blockNumbers, - BuilderPubkey: *builderPK2, - BuilderSecretkey: *builderSK2, - Bundles: []*common.CrossRollupBundle{&bundle3}, - }) + tobOpts3 := buildDefaultToBOpts(t, epoch, TestAccounts[0].Nonce+3, TestAccounts[1].Nonce+2) + tobReq3 := CreateToBReq(t, tobOpts3) // validators setup validatorSk1, validatorPk1, err := bls.GenerateNewKeypair() @@ -5369,7 +4298,7 @@ func TestOverallFlow(t *testing.T) { chainID := *big.NewInt(45200) chainID2 := *big.NewInt(45201) chainID3 := *big.NewInt(45202) - mockPubKeyBytes := mockPublicKey.Bytes() + mockPubKeyBytes := TestMockPublicKey.Bytes() // creating rollups rollup := common.NewRegisterRollup(chainID.Uint64(), mockPubKeyBytes[:], targetEpoch) @@ -5377,25 +4306,20 @@ func TestOverallFlow(t *testing.T) { rollup3 := common.NewRegisterRollup(chainID3.Uint64(), mockPubKeyBytes[:], targetEpoch) rollups := []*actions.RollupInfo{rollup, rollup2, rollup3} + networkID := uint32(1337) // set up mock seq client - networkID := uint32(1337) + backend.seqcli.EXPECT().GetNetworkID().Return(networkID).Maybe() + backend.seqcli.EXPECT().GetChainID().Return(TestSeqChainID).Maybe() + backend.seqcli.EXPECT().CurrentValidators(mock.Anything).Return(validators).Maybe() backend.seqcli.EXPECT().GetValidatorWeight(validatorNode1.PublicKey).Return(validatorNode1.Weight).Maybe() backend.seqcli.EXPECT().GetValidatorWeight(validatorNode2.PublicKey).Return(validatorNode2.Weight).Maybe() backend.seqcli.EXPECT().GetValidatorWeight(validatorNode3.PublicKey).Return(validatorNode3.Weight).Maybe() backend.seqcli.EXPECT().GetValidatorWeight(validatorNode4.PublicKey).Return(validatorNode4.Weight).Maybe() backend.seqcli.EXPECT().GetValidatorWeight(validatorNode5.PublicKey).Return(validatorNode5.Weight).Maybe() - backend.seqcli.EXPECT().GetChainID().Return(seqChainID).Maybe() - backend.seqcli.EXPECT().GetNetworkID().Return(networkID).Maybe() backend.seqcli.EXPECT().CurrentValidators(mock.Anything).Return(validators) - // Shut down mock Redis after rollups register since Arcadia relies on SEQ state for valid rollup list, not postgres in this case - //err = backend.redis.Close() - //require.NoError(t, err) - - //backend.useRedis = false - var conns = make([]*websocket.Conn, 0) t.Run("subscribe multiple seq validators to arcadia in order to receive chunk(s)", func(t *testing.T) { path := "/ws/arcadia/v1/validator/subscribe" @@ -5441,7 +4365,7 @@ func TestOverallFlow(t *testing.T) { require.NoError(t, err) t.Log(randomBytes) - uwm, err := warp.NewUnsignedMessage(networkID, seqChainID, randomBytes) + uwm, err := warp.NewUnsignedMessage(networkID, TestSeqChainID, randomBytes) require.NoError(t, err) t.Log(uwm) uwmBytes := uwm.Bytes() @@ -5473,13 +4397,12 @@ func TestOverallFlow(t *testing.T) { // registers rollups epochs for n+1. seq client will then check to see if rollup is allowed to fetch payload later using this info. // Advance to epoch 1 // Note the expected registered rollups should be for epoch 2 - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, nil, true) require.Equal(t, epoch, uint64(1)) - require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, originNamespace)) + require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, TestOriginNamespace)) // Epoch lowest tob nonce should have been set if backend.useRedis { @@ -5592,25 +4515,23 @@ func TestOverallFlow(t *testing.T) { // This should be triggered when we advance the slots and auction notification happens // create test chain tx to return for seqclient mock originChainID := big.NewInt(0x40000) - testSeqChainID := ids.GenerateTestID() bundleTxs := map[string]ethtypes.Transactions{ "0x40000": { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) backend.seqcli.EXPECT().GenerateTransaction(mock.Anything, mock.Anything).Return(seqTx, nil).Maybe() // Advance to epoch 2 // This will cause a seq block with the above auction winner to be produced // Note the expected registered rollups should be for epoch 3 seqAuctionWinnerInfo := &SimAuctionWinnerInfo{ - SeqChainID: seqChainID, + SeqChainID: TestSeqChainID, Bid: bid, SecretKey: &sk, } - err = backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, seqAuctionWinnerInfo, true) require.Equal(t, epoch, uint64(2)) @@ -5622,7 +4543,7 @@ func TestOverallFlow(t *testing.T) { if lowestToBNonce != nil { require.Equal(t, arcadia.chunkManager.ToBNonce(), *lowestToBNonce) } else { - fmt.Printf("No ToB nonce found for epoch %d in Redis\n", epoch4auction) + t.Logf("No ToB nonce found for epoch %d in Redis\n", epoch4auction) } } else { lowestToBNonce, err := backend.arcadia.db.GetEpochLowestToBNonce(epoch4auction) @@ -5630,7 +4551,7 @@ func TestOverallFlow(t *testing.T) { if lowestToBNonce != nil { require.Equal(t, arcadia.chunkManager.ToBNonce(), *lowestToBNonce) } else { - fmt.Printf("No ToB nonce found for epoch %d in DB\n", epoch4auction) + t.Logf("No ToB nonce found for epoch %d in DB\n", epoch4auction) } } @@ -5644,122 +4565,12 @@ func TestOverallFlow(t *testing.T) { }) t.Run("submit new block, 3 ToBs(1 each rollup) and 1 RoB"+tc.name(), func(t *testing.T) { - // Helper for processing block requests to the backend. Returns the status code of the request. - processBlockRequest := func(backend *testBackend, blockReq *common.SubmitNewBlockRequest) int { - rr := backend.request(http.MethodPost, pathSubmitNewBlockRequest, blockReq) - return rr.Code - } - redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) - - t.Log("=========setup RoB simulation===========") - for domain, expectedTxs := range map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, - } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Once().Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Once().Return(balances, nil) - - // simulation results, only the txs from this RoB will be simulated and the txs from ToB will be filtered out - callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) - for _, tx := range expectedTxs { - callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ - TxHash: tx.Hash().Hex(), - Error: "", - Revert: "", - }) - } - rawExpectedTxs, err := CollectRawTxs(expectedTxs) - require.NoError(t, err) - validationReq := common.BlockValidationRequest{ - Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - StateBlockNumber: uint64ToHexString(blockNumbers[domain] - 2), - } - backend.simulator.EXPECT(). - SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). - Once(). - Return(uint64(100), callBundleRes, nil) - } - - t.Log("=========setup simulation mocks before tob1==============") - tobChunks := []*common.ArcadiaChunk{&tobReq.Chunk, &tobReq2.Chunk, &tobReq3.Chunk} - for i, tobChunk := range tobChunks { - backend.simulator.EXPECT().GetBlockNumber(tobChunk.ToB.Domains()).Return(blockNumbers, nil) - chunksSoFar := []*common.ArcadiaChunk{&robReq.Chunk} - chunksSoFar = append(chunksSoFar, tobChunks[:i+1]...) - t.Logf("num chunks to set mocks: %d", len(chunksSoFar)) - allTxsSoFar, err := common.CollectTxsFromChunks(chunksSoFar) - require.NoError(t, err) - // setup expectations for simulation - for domain, expectedTxs := range allTxsSoFar { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - blockNumberForDomain := blockNumbers[domain] - if domain == robReq.Chunk.RoB.ChainID { - blockNumberForDomain = blockNumbers[domain] - 2 - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) - require.NoError(t, err) - nonces := CollectNoncesFromEthAccounts(relatedAccounts) - balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumberForDomain)).Once().Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumberForDomain)).Once().Return(balances, nil) - - // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out - callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) - for _, tx := range expectedTxs { - callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ - TxHash: tx.Hash().Hex(), - Error: "", - Revert: "", - }) - } - rawExpectedTxs, err := CollectRawTxs(expectedTxs) - require.NoError(t, err) - validationReq := common.BlockValidationRequest{ - Txs: rawExpectedTxs, - BlockNumber: uint64ToHexString(blockNumberForDomain), - StateBlockNumber: uint64ToHexString(blockNumberForDomain), - } - backend.simulator.EXPECT(). - SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). - Once(). - Return(uint64(100), callBundleRes, nil).Maybe() - } - } + backend.seqcli.EXPECT().Parser().Return(TestChainParser) err = backend.arcadia.datastore.SetAuctionWinner(targetEpoch, builderPkBytes2[:]) require.NoError(t, err) @@ -5769,24 +4580,44 @@ func TestOverallFlow(t *testing.T) { require.NoError(t, err) t.Log(winner) - rrCode1 := processBlockRequest(backend, robReq) - require.Equal(t, http.StatusOK, rrCode1) + robReq, robReq2 = backend.setupRoBsForToBTestWithBlockNumOffset(t, targetEpoch, 1) + robChunks := common.CollectChunksFromRequests(robReq, robReq2) - rrCode2 := processBlockRequest(backend, tobReq) - require.Equal(t, http.StatusOK, rrCode2) + tobOpts.Epoch = targetEpoch + tobReq := backend.submitToBChunk(t, tobOpts, TestChainParser, robChunks) + require.NotNil(t, tobReq) - rrCode3 := processBlockRequest(backend, tobReq2) - require.Equal(t, http.StatusOK, rrCode3) + tobOpts2.Epoch = targetEpoch + robChunks2 := common.CollectChunksFromRequests(robReq, robReq2, tobReq) + tobReq2 := backend.submitToBChunk(t, tobOpts2, TestChainParser, robChunks2) + require.NotNil(t, tobReq2) - rrCode4 := processBlockRequest(backend, tobReq3) - require.Equal(t, http.StatusOK, rrCode4) + tobOpts3.Epoch = targetEpoch + robChunks3 := common.CollectChunksFromRequests(robReq, robReq2, tobReq, tobReq2) + tobReq3 := backend.submitToBChunk(t, tobOpts3, TestChainParser, robChunks3) + require.NotNil(t, tobReq3) + + backend.simulator.AssertExpectations(t) + backend.seqcli.AssertExpectations(t) }) t.Run("receive preconfs for pending blocks and construct payloads"+tc.name(), func(t *testing.T) { + backend.seqcli.EXPECT().GetNetworkID().Return(networkID).Maybe() + backend.seqcli.EXPECT().GetChainID().Return(TestSeqChainID).Maybe() + + backend.seqcli.EXPECT().CurrentValidators(mock.Anything).Return(validators).Maybe() + backend.seqcli.EXPECT().GetValidatorWeight(validatorNode1.PublicKey).Return(validatorNode1.Weight).Maybe() + backend.seqcli.EXPECT().GetValidatorWeight(validatorNode2.PublicKey).Return(validatorNode2.Weight).Maybe() + backend.seqcli.EXPECT().GetValidatorWeight(validatorNode3.PublicKey).Return(validatorNode3.Weight).Maybe() + backend.seqcli.EXPECT().GetValidatorWeight(validatorNode4.PublicKey).Return(validatorNode4.Weight).Maybe() + backend.seqcli.EXPECT().GetValidatorWeight(validatorNode5.PublicKey).Return(validatorNode5.Weight).Maybe() + backend.seqcli.EXPECT().CurrentValidators(mock.Anything).Return(validators) + // set up backend backend.SetupMockDASubmitter() backend.arcadia.srvStarted.Store(true) rob1 := robReq + rob2 := robReq2 tob1 := tobReq tob2 := tobReq2 tob3 := tobReq3 @@ -5795,6 +4626,10 @@ func TestOverallFlow(t *testing.T) { Chunk: &rob1.Chunk, }) + backend.arcadia.pendingChunksPreConfs.Store(rob2.ChunkID, &common.PreConfInfo{ + Chunk: &rob2.Chunk, + }) + // tob1 backend.arcadia.pendingChunksPreConfs.Store(tob1.ChunkID, &common.PreConfInfo{ Chunk: &tob1.Chunk, @@ -5811,7 +4646,7 @@ func TestOverallFlow(t *testing.T) { // rob case robmsg1 := append([]byte{}, rob1.ChunkID[:]...) - uwm, err := warp.NewUnsignedMessage(networkID, seqChainID, robmsg1) + uwm, err := warp.NewUnsignedMessage(networkID, TestSeqChainID, robmsg1) require.NoError(t, err) uwmBytes := uwm.Bytes() @@ -5874,8 +4709,7 @@ func TestOverallFlow(t *testing.T) { }) t.Run("rollup(s) fetch their payload(s)"+tc.name(), func(t *testing.T) { - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, nil, true) require.Equal(t, epoch, uint64(3)) @@ -5884,9 +4718,7 @@ func TestOverallFlow(t *testing.T) { processBlockRequest := func(backend *testBackend, payload *common.GetPayloadRequest, sigStr string) (int, []byte) { // new HTTP req rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) - if rr.Body != nil { - t.Log("error: ", rr.Body.String()) return rr.Code, rr.Body.Bytes() } else { return rr.Code, nil @@ -5933,42 +4765,57 @@ func TestOverallFlow(t *testing.T) { err = backend.arcadia.redis.SetPayloadTxsRoB(robChunk.BlockNumber, chainID, &common.PayloadTxs{ Txs: ethOtxs[chainID], }) + require.NoError(t, err) } else { err = backend.arcadia.db.SetPayloadTxsRoB(robChunk.BlockNumber, chainID, &common.PayloadTxs{ Txs: ethOtxs[chainID], }) + require.NoError(t, err) + + payloadResp := common.GetPayloadResponse{ + Transactions: ethOtxs[chainID], + } + err = backend.arcadia.db.SetPayloadResp(chainID, robChunk.BlockNumber, &payloadResp) + require.NoError(t, err) } - require.NoError(t, err) + payload := &common.GetPayloadRequest{ ChainID: robChunk.ChainID, BlockNumber: robChunk.BlockNumber, } chainIDu64 := binary.LittleEndian.Uint64(rollup.Namespace) chainIDstr := hexutil.EncodeBig(big.NewInt(int64(chainIDu64))) + if backend.useRedis { reqRollup, err := backend.redis.GetRegisterRollup(epoch, chainIDstr) require.NoError(t, err) t.Log(reqRollup) } + payloadBytes, err := json.Marshal(payload) require.NoError(t, err) payloadHash, _ := common.Sha256HashPayload(payloadBytes) - sig := bls.Sign(mockSecretKey, payloadHash[:]) + sig := bls.Sign(TestMockSecretKey, payloadHash[:]) sigBytes := sig.Bytes() sigStr := hex.EncodeToString(sigBytes[:]) + rrCode, rrBytes := processBlockRequest(backend, payload, "0x"+sigStr) require.Equal(t, http.StatusOK, rrCode) + respRoB := new(common.GetPayloadResponse) err = json.Unmarshal(rrBytes, respRoB) require.NoError(t, err) + robTxs := respRoB.Transactions - require.NotNil(t, len(robTxs)) + require.Equal(t, len(robTxs), 1) }) for _, conn := range conns { err := conn.Close() require.NoError(t, err) } + + t.Log("*** Finished Overall Flow param disable_redis [" + strconv.FormatBool(tc.disableRedis) + "] ***") } } @@ -5980,40 +4827,30 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { auctionBidPath := "/arcadia/v1/builder/auction_bid" getBestBidPath := "/arcadia/v1/builder/best_auction_bid/" - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - originNamespace := common.ChainIDToNamespace(originChainID) - epoch := arcadia.headEpoch.Load() require.Equal(t, epoch, uint64(0)) - var cli = srpc.Parser{} - _, _ = cli.Registry() - chainParser := &cli - builderSk, builderPk, err := bls.GenerateNewKeypair() require.NoError(t, err) builderPkBytes := builderPk.Bytes() seqChainID := ids.GenerateTestID() blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, + TestOriginChainIDStr: 100, + TestRemoteChainIDStr: 50, } // test ethereum signing keys robTxs := ethtypes.Transactions{ - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } // constructing RoB, we register at currentEpoch + 2 robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch + 2, SeqChainID: seqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, + RoBChainID: TestOriginChainID, + RoBBlockNumber: blockNumbers[TestOriginChainIDStr] - 1, BuilderPubkey: *builderPk, BuilderSecretkey: *builderSk, Txs: robTxs, @@ -6052,7 +4889,7 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { // rollup setup targetEpoch := epoch + 2 chainID := *big.NewInt(45200) - mockPubKeyBytes := mockPublicKey.Bytes() + mockPubKeyBytes := TestMockPublicKey.Bytes() namespace := binary.LittleEndian.AppendUint64(nil, chainID.Uint64()) rollup := actions.RollupInfo{ Namespace: namespace, @@ -6129,23 +4966,25 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { require.NoError(t, err) require.True(t, v) - // _, resultMessage, err := c.ReadMessage() - // require.NoError(t, err) + // TODO: Verify the below code and whether we should keep this check + /* + _, resultMessage, err := c.ReadMessage() + require.NoError(t, err) - // require.Equal(t, resultMessage, []byte("Subscription successful")) + require.Equal(t, resultMessage, []byte("Subscription successful")) + */ }) t.Run("basic flow - register a rollup and process the block", func(t *testing.T) { // registers rollups epochs for n+1. seq client will then check to see if rollup is allowed to fetch payload later using this info. // Advance to epoch 1 // Note the expected registered rollups should be for epoch 2 - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, nil, false) require.Equal(t, epoch, uint64(1)) - require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, originNamespace)) + require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, TestOriginNamespace)) // Epoch lowest tob nonce should have been set lowestToBNonce, err := backend.redis.GetEpochLowestToBNonce(epoch) @@ -6211,13 +5050,12 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { // This should be triggered when we advance the slots and auction notification happens // create test chain tx to return for seqclient mock originChainID := big.NewInt(0x40000) - testSeqChainID := ids.GenerateTestID() bundleTxs := map[string]ethtypes.Transactions{ "0x40000": { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) backend.seqcli.EXPECT().GenerateTransaction(mock.Anything, mock.Anything).Return(seqTx, nil) // Advance to epoch 2 @@ -6228,8 +5066,7 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { Bid: bid, SecretKey: &sk, } - err = backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, seqAuctionWinnerInfo, false) require.Equal(t, epoch, uint64(2)) @@ -6262,27 +5099,16 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) for domain, expectedTxs := range map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, + TestOriginChainIDStr: robTxs, } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) + matchTxs := ExpectedMatchTxs(expectedTxs) backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil) backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil) @@ -6312,8 +5138,7 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { }) t.Run("basic flow - rollup fetches payload before preconf", func(t *testing.T) { - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) // Helper for processing block requests to the backend. Returns the status code of the request. processBlockRequest := func(backend *testBackend, payload *common.GetPayloadRequest, sigStr string) (int, []byte) { @@ -6321,7 +5146,7 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) if rr.Body != nil { - fmt.Printf("error: %s\n", rr.Body.String()) + t.Logf("error: %s\n", rr.Body.String()) return rr.Code, rr.Body.Bytes() } else { return rr.Code, nil @@ -6346,7 +5171,7 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { payloadBytes, err := json.Marshal(payload) require.NoError(t, err) payloadHash, _ := common.Sha256HashPayload(payloadBytes) - sig := bls.Sign(mockSecretKey, payloadHash[:]) + sig := bls.Sign(TestMockSecretKey, payloadHash[:]) sigBytes := sig.Bytes() sigStr := hex.EncodeToString(sigBytes[:]) rrCode, rrBytes := processBlockRequest(backend, payload, "0x"+sigStr) @@ -6361,18 +5186,6 @@ func TestFetchPayloadBeforePreconf(t *testing.T) { func TestToBNonceState(t *testing.T) { epoch := uint64(0) - // Build hypersdk registry - var chainParser = &srpc.Parser{} - _, _ = chainParser.Registry() - - // Build test builder keys - testBuilderSecretKey, err := bls.GenerateRandomSecretKey() - require.NoError(t, err) - testBuilderPublicKey, err := bls.PublicKeyFromSecretKey(testBuilderSecretKey) - require.NoError(t, err) - testSeqChainID := ids.GenerateTestID() - builderPkBytes := testBuilderPublicKey.Bytes() - // Helper for processing block requests to the backend. Returns the status code of the request. processBlockRequest := func(backend *testBackend, blockReq *common.SubmitNewBlockRequest) int { rr := backend.request(http.MethodPost, pathSubmitNewBlockRequest, blockReq) @@ -6381,9 +5194,10 @@ func TestToBNonceState(t *testing.T) { // test ethereum signing keys t.Log("account info") - for _, acct := range testAccounts { + for _, acct := range TestAccounts { t.Logf("acct(%s) info: nonce(%d), balance(%d)\n", acct.Address.Hex(), acct.Nonce, acct.Balance.Int64()) } + backend := newTestBackend(t) t.Run("Run valid base case, just ToB", func(t *testing.T) { @@ -6396,16 +5210,19 @@ func TestToBNonceState(t *testing.T) { remoteChainIDStr: 50, } + // sends two robs to warmup the tob + rob1, rob2 := backend.setupRoBsForToBTest(t) + // TODO: change below to ToB helper // constructing ToB bundleTxs := map[string]ethtypes.Transactions{ originChainIDStr: { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, 1, 21000, nil), }, remoteChainIDStr: { - CreateEthTransfer(t, remoteChainID, testAccounts[1].PrivateKey, testAccounts[0].Address, 100, testAccounts[1].Nonce, 21000, nil), + CreateEthTransfer(t, remoteChainID, TestAccounts[1].PrivateKey, TestAccounts[0].Address, 100, 0, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) require.NoError(t, err) @@ -6418,63 +5235,45 @@ func TestToBNonceState(t *testing.T) { tobReq := CreateToBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch, ToBBlockNumber: blockNumbers, - BuilderPubkey: *testBuilderPublicKey, - BuilderSecretkey: *testBuilderSecretKey, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, Bundles: []*common.CrossRollupBundle{&bundle}, }) - err = backend.arcadia.datastore.SetAuctionWinner(epoch, builderPkBytes[:]) - require.NoError(t, err) + backend.setTestAuctionWinner(t, epoch) redis := backend.redis redis.SetSizeTracker(backend.arcadia.sizeTracker) require.Equal(t, uint64(1), backend.arcadia.chunkManager.ToBNonce()) - backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) backend.SetupRegisteredRollups(epoch, originChainID) backend.SetupRegisteredRollups(epoch, remoteChainID) - // set up mock expectations - // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) // adding new tob will create a new layer, which will try submit the previous layer to SEQ actsChan := make(chan []chain.Action) backend.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + backend.resetSeqClientMockExpectations(t) + backend.simulator.EXPECT().GetBlockNumber([]string{originChainIDStr, remoteChainIDStr}).Return(blockNumbers, nil) + t.Log("========setup & send TOB============") tobTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(tobTxs) - allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&tobReq.Chunk}) + + allTxs, err := common.CollectTxsFromChunks([]*common.ArcadiaChunk{&rob1.Chunk, &rob2.Chunk, &tobReq.Chunk}) require.NoError(t, err) common.DisplayEthTxs(allTxs) - // expectations for the first tob - for domain, expectedTxs := range tobTxs { - matchTxs := func(txs ethtypes.Transactions) bool { - - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - fmt.Printf("expected txs for domain: %s\n", domain) - fmt.Printf("[") - for _, tx := range expectedTxs { - fmt.Printf("%s, ", tx.Hash().Hex()) - } - fmt.Printf("]\n") - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + // expectations for the first tob + for domain, expectedTxs := range allTxs { + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) - backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) - backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) + + backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(ExpectedMatchTxs(expectedTxs)), uint64ToHexString(blockNumbers[domain])).Return(nonces, nil) + backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(ExpectedMatchTxs(expectedTxs)), uint64ToHexString(blockNumbers[domain])).Return(balances, nil) // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) @@ -6501,7 +5300,7 @@ func TestToBNonceState(t *testing.T) { require.Equal(t, http.StatusOK, rrCode) cmTxs, cmHeights, err := backend.arcadia.chunkManager.Txs() require.NoError(t, err) - TxsTheSame(t, bundleTxs, cmTxs) + TxsTheSame(t, allTxs, cmTxs) require.Equal(t, blockNumbers, cmHeights) for _, bundle := range tobReq.Chunk.ToB.GetBundles() { @@ -6510,8 +5309,10 @@ func TestToBNonceState(t *testing.T) { require.Equal(t, common.BundleAccepted, status) } }) + arcadia2, err := NewArcadiaAPI(*backend.currOpts) require.NoError(t, err) + // TODO: this was 1 and failing since cm tob nonce returns 2 below. Since we sent another tob then it should be 2? require.Equal(t, uint64(2), arcadia2.chunkManager.ToBNonce()) } @@ -6523,40 +5324,30 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { auctionBidPath := "/arcadia/v1/builder/auction_bid" getBestBidPath := "/arcadia/v1/builder/best_auction_bid/" - originChainID := big.NewInt(45200) - originChainIDStr := hexutil.EncodeBig(originChainID) - remoteChainID := big.NewInt(45201) - remoteChainIDStr := hexutil.EncodeBig(remoteChainID) - originNamespace := common.ChainIDToNamespace(originChainID) - epoch := arcadia.headEpoch.Load() require.Equal(t, epoch, uint64(0)) - var cli = srpc.Parser{} - _, _ = cli.Registry() - chainParser := &cli - builderSk, builderPk, err := bls.GenerateNewKeypair() require.NoError(t, err) builderPkBytes := builderPk.Bytes() seqChainID := ids.GenerateTestID() blockNumbers := map[string]uint64{ - originChainIDStr: 100, - remoteChainIDStr: 50, + TestOriginChainIDStr: 100, + TestRemoteChainIDStr: 50, } // test ethereum signing keys robTxs := ethtypes.Transactions{ - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 20, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), } // constructing RoB, we register at currentEpoch + 2 robReq := CreateRoBReq(t, &CreateTestBlockSubmissionOpts{ Epoch: epoch + 2, SeqChainID: seqChainID, - RoBChainID: originChainID, - RoBBlockNumber: blockNumbers[originChainIDStr] - 1, + RoBChainID: TestOriginChainID, + RoBBlockNumber: blockNumbers[TestOriginChainIDStr] - 1, BuilderPubkey: *builderPk, BuilderSecretkey: *builderSk, Txs: robTxs, @@ -6595,7 +5386,7 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { // rollup setup targetEpoch := epoch + 2 chainID := *big.NewInt(45200) - mockPubKeyBytes := mockPublicKey.Bytes() + mockPubKeyBytes := TestMockPublicKey.Bytes() namespace := binary.LittleEndian.AppendUint64(nil, chainID.Uint64()) rollup := actions.RollupInfo{ Namespace: namespace, @@ -6677,13 +5468,12 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { // registers rollups epochs for n+1. seq client will then check to see if rollup is allowed to fetch payload later using this info. // Advance to epoch 1 // Note the expected registered rollups should be for epoch 2 - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, nil, true) require.Equal(t, epoch, uint64(1)) - require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, originNamespace)) + require.True(t, backend.arcadia.datastore.IsRollupRegistered(targetEpoch, TestOriginNamespace)) // Epoch lowest tob nonce should have been set lowestToBNonce, err := backend.redis.GetEpochLowestToBNonce(epoch) @@ -6749,13 +5539,12 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { // This should be triggered when we advance the slots and auction notification happens // create test chain tx to return for seqclient mock originChainID := big.NewInt(0x40000) - testSeqChainID := ids.GenerateTestID() bundleTxs := map[string]ethtypes.Transactions{ "0x40000": { - CreateEthTransfer(t, originChainID, testAccounts[0].PrivateKey, testAccounts[1].Address, 100, testAccounts[0].Nonce, 21000, nil), + CreateEthTransfer(t, originChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, TestAccounts[0].Nonce, 21000, nil), }, } - seqTx := CreateHypersdkBundleTx(t, testSeqChainID, bundleTxs) + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) backend.seqcli.EXPECT().GenerateTransaction(mock.Anything, mock.Anything).Return(seqTx, nil) // Advance to epoch 2 @@ -6767,8 +5556,7 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { SecretKey: &sk, } - err = backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) epoch = backend.AdvanceToNextEpoch(t, seqAuctionWinnerInfo, true) require.Equal(t, epoch, uint64(2)) @@ -6802,28 +5590,17 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { // set up mock expectations // shared calls for both chunks - backend.seqcli.EXPECT().Parser().Return(chainParser) + backend.seqcli.EXPECT().Parser().Return(TestChainParser) backend.seqcli.EXPECT().CurrentValidatorsTotalWeight().Return(100).Maybe() for domain, expectedTxs := range map[string]ethtypes.Transactions{ - originChainIDStr: robTxs, + TestOriginChainIDStr: robTxs, } { - matchTxs := func(txs ethtypes.Transactions) bool { - if len(txs) != len(expectedTxs) { - return false - } - for i, tx := range txs { - if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { - return false - } - } - return true - } - - relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, testAccounts) + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) require.NoError(t, err) nonces := CollectNoncesFromEthAccounts(relatedAccounts) balances := CollectBalancesFromEthAccounts(relatedAccounts) + matchTxs := ExpectedMatchTxs(expectedTxs) backend.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(nonces, nil).Maybe() backend.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNumbers[domain]-2)).Return(balances, nil).Maybe() @@ -6848,8 +5625,7 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { Return(100, callBundleRes, nil).Maybe() } - err := backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) - require.NoError(t, err) + backend.SetMockSeqClientRollupExpectations(epoch+2, rollups) // preparation for getPayload // Helper for processing block requests to th:e backend. Returns the status code of the request. @@ -6858,7 +5634,7 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { rr := backend.RequestWithPayloadHeader(http.MethodPost, pathGetPayload, payload, sigStr) if rr.Body != nil { - fmt.Printf("error: %s\n", rr.Body.String()) + t.Logf("error: %s\n", rr.Body.String()) return rr.Code, rr.Body.Bytes() } else { return rr.Code, nil @@ -6908,7 +5684,7 @@ func TestConcurrentSubmitReqNGetPayload(t *testing.T) { payloadBytes, err := json.Marshal(payload) require.NoError(t, err) payloadHash, _ := common.Sha256HashPayload(payloadBytes) - sig := bls.Sign(mockSecretKey, payloadHash[:]) + sig := bls.Sign(TestMockSecretKey, payloadHash[:]) sigBytes := sig.Bytes() sigStr := hex.EncodeToString(sigBytes[:]) @@ -6953,6 +5729,8 @@ func TestReloadLowestStateToSettledChunks(t *testing.T) { seqCli := mseq.NewMockBaseSeqClient(t) seqCli.EXPECT().GetHighestSettledToBNonce(mock.Anything).Return(highestSettledToBNonce, nil) + actsChan := make(chan []chain.Action) + seqCli.EXPECT().ActsChan().Return(actsChan).Maybe() storeChunk := func(nonce uint64, chunk *common.ArcadiaChunk) { chunkID, err := chunk.ID() @@ -7013,6 +5791,7 @@ func TestReloadLowestStateToSettledChunks(t *testing.T) { t.Run("chunks between LowestStateToBNonce and HighestSettledToBNonce should be reloaded into chunk manager", func(t *testing.T) { backend := newTestbackendWithCustomSEQCliNDatastore(t, seqCli, redisCache, db, false) + layers := backend.arcadia.chunkManager.Chunks() require.Equal(t, int(highestSettledToBNonce-lowestStateToBNonce+2), len(layers)) totalRestored := 0 diff --git a/services/api/test_backend.go b/services/api/test_backend.go new file mode 100644 index 00000000..60026da3 --- /dev/null +++ b/services/api/test_backend.go @@ -0,0 +1,742 @@ +package api + +import ( + "bytes" + "context" + "encoding/hex" + "encoding/json" + "fmt" + "math/big" + "net/http" + "net/http/httptest" + "strconv" + "testing" + "time" + + chunkmanager "github.com/AnomalyFi/Arcadia/chunk_manager" + "github.com/AnomalyFi/Arcadia/common" + "github.com/AnomalyFi/Arcadia/database" + mda "github.com/AnomalyFi/Arcadia/datalayer/mocks" + "github.com/AnomalyFi/Arcadia/datastore" + mseq "github.com/AnomalyFi/Arcadia/seq/mocks" + msim "github.com/AnomalyFi/Arcadia/simulator/mocks" + "github.com/AnomalyFi/flashbotsrpc" + "github.com/AnomalyFi/hypersdk/actions" + "github.com/AnomalyFi/hypersdk/chain" + "github.com/AnomalyFi/hypersdk/crypto/ed25519" + srpc "github.com/AnomalyFi/nodekit-seq/rpc" + "github.com/alicebob/miniredis/v2" + "github.com/ava-labs/avalanchego/ids" + "github.com/ethereum/go-ethereum/common/hexutil" + ethtypes "github.com/ethereum/go-ethereum/core/types" + "github.com/flashbots/go-boost-utils/bls" + "github.com/labstack/gommon/log" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" +) + +const ( + testManagerSecretKey = "0x3fae9bafcf1572be9a4d4b7f8e6cb1d0c4bca8ad1e6f75d3d1286ad0e3e5fba1" +) + +type testBackend struct { + t require.TestingT + arcadia *ArcadiaAPI + datastore *datastore.Datastore + redis *datastore.RedisCache + simManager *bls.SecretKey + seqcli *mseq.MockBaseSeqClient + da *mda.MockIDASubmitter + simulator *msim.MockIBlockSimRateLimiter + currOpts *ArcadiaAPIOpts + useRedis bool + daChunksChan chan *common.ArcadiaToSEQChunkMessage +} + +type testParams struct { + disableRedis bool +} + +func (tp *testParams) name() string { + return "tp_" + strconv.FormatBool(tp.disableRedis) + "_" +} + +func newTestBackend(t *testing.T) *testBackend { + return newTestBackendWithFlags(t, false) +} + +func newTestBackendWithRedisDown(t *testing.T) *testBackend { + return newTestBackendWithFlags(t, true) +} + +func newTestBackendWithFlags(t *testing.T, disableRedis bool) *testBackend { + redisClient, err := miniredis.Run() + require.NoError(t, err) + + redisCache, err := datastore.NewRedisCache("", redisClient.Addr(), "") + require.NoError(t, err) + + db := database.NewMockDB() + return newTestbackendWithCustomDatastore(t, redisCache, db, disableRedis) +} + +func newTestbackendWithCustomSEQCliNDatastore(t *testing.T, seqClient *mseq.MockBaseSeqClient, redisCache *datastore.RedisCache, db database.IDatabaseService, disableRedis bool) *testBackend { + logger := common.TestLog + logger.Logger.SetLevel(logrus.DebugLevel) + + ds, err := datastore.NewDatastore(redisCache, db, logger) + require.NoError(t, err) + + managerSkBytes, err := hexutil.Decode(testManagerSecretKey) + require.NoError(t, err) + managerSk, err := bls.SecretKeyFromBytes(managerSkBytes) + require.NoError(t, err) + + blockSim := msim.NewMockIBlockSimRateLimiter(t) + + seqClient.EXPECT().SetOnNewBlockHandler(mock.Anything).Return().Maybe() + seqClient.EXPECT().Parser().Return(&srpc.Parser{}).Maybe() + + highestSettledToBNonce, err := seqClient.GetHighestSettledToBNonce(context.TODO()) + require.NoError(t, err) + var lowestToBNonce uint64 + lowestToBNonceRef, err := ds.LoadLowestManagedStateToBNonce() + require.NoError(t, err) + if lowestToBNonceRef != nil { + lowestToBNonce = *lowestToBNonceRef + } + + t.Logf("initializing chunk manager with lowestStateToBNonce: %d settledToBNonce: %d", lowestToBNonce, highestSettledToBNonce) + + config := chunkmanager.ChunkManagerConfig{ + ExpirationTime: 5 * time.Minute, + GCInterval: 10 * time.Second, + LayerSubmissionCheckInterval: 100 * time.Second, + HighestSettledToBNonce: highestSettledToBNonce, + StateLowestToBNonce: lowestToBNonce, + Datastore: ds, + SEQClient: seqClient, + SEQChainParser: seqClient.Parser(), + Logger: common.TestLog, + } + + cm, err := chunkmanager.NewChunkManager(&config) + require.NoError(t, err) + + da := mda.NewMockIDASubmitter(t) + + opts := ArcadiaAPIOpts{ + Log: common.TestLog, + ListenAddr: "localhost:12345", + Datastore: ds, + Redis: redisCache, + DB: db, + DA: da, + ChunkManager: cm, + SeqClient: seqClient, + BlockSimulator: blockSim, + mockMode: true, + SlotSizeLimit: DefaultSizeLimit, + TestScenarioRedisDown: disableRedis, + } + + arcadia, err := NewArcadiaAPI(opts) + require.NoError(t, err) + + backend := testBackend{ + t: t, + arcadia: arcadia, + datastore: ds, + redis: redisCache, + simManager: managerSk, + seqcli: seqClient, + da: da, + simulator: blockSim, + currOpts: &opts, + useRedis: !disableRedis, + daChunksChan: make(chan *common.ArcadiaToSEQChunkMessage), + } + + mockPublicKeyBytes := bls.PublicKeyToBytes(TestMockPublicKey) + mockPublicKeyHex := hex.EncodeToString(mockPublicKeyBytes[:]) + backend.datastore.SetKnownValidator("0x"+common.PubkeyHex(mockPublicKeyHex), 0) + backend.setLowestToBNonceForEpoch(t, 0, 0) + return &backend +} + +func newTestbackendWithCustomDatastore(t *testing.T, redisCache *datastore.RedisCache, db database.IDatabaseService, disableRedis bool) *testBackend { + logger := common.TestLog + logger.Logger.SetLevel(logrus.DebugLevel) + + ds, err := datastore.NewDatastore(redisCache, db, logger) + require.NoError(t, err) + + managerSkBytes, err := hexutil.Decode(testManagerSecretKey) + require.NoError(t, err) + managerSk, err := bls.SecretKeyFromBytes(managerSkBytes) + require.NoError(t, err) + + seqClient := mseq.NewMockBaseSeqClient(t) + seqClient.EXPECT().SetOnNewBlockHandler(mock.Anything).Return() + seqClient.EXPECT().Parser().Return(&srpc.Parser{}).Maybe() + + actsChan := make(chan []chain.Action) + seqClient.EXPECT().ActsChan().Return(actsChan).Maybe() + + blockSim := msim.NewMockIBlockSimRateLimiter(t) + + config := chunkmanager.ChunkManagerConfig{ + ExpirationTime: 5 * time.Minute, + GCInterval: 10 * time.Second, + LayerSubmissionCheckInterval: 100 * time.Second, + Datastore: ds, + SEQClient: seqClient, + Logger: common.TestLog, + } + + cm, err := chunkmanager.NewChunkManager(&config) + require.NoError(t, err) + + da := mda.NewMockIDASubmitter(t) + + opts := ArcadiaAPIOpts{ + Log: common.TestLog, + ListenAddr: "localhost:12345", + Datastore: ds, + Redis: redisCache, + DB: db, + DA: da, + ChunkManager: cm, + SeqClient: seqClient, + BlockSimulator: blockSim, + mockMode: true, + SlotSizeLimit: DefaultSizeLimit, + TestScenarioRedisDown: disableRedis, + } + + arcadia, err := NewArcadiaAPI(opts) + require.NoError(t, err) + + backend := testBackend{ + t: t, + arcadia: arcadia, + datastore: ds, + redis: redisCache, + simManager: managerSk, + seqcli: seqClient, + da: da, + simulator: blockSim, + currOpts: &opts, + useRedis: !disableRedis, + daChunksChan: make(chan *common.ArcadiaToSEQChunkMessage), + } + + mockPublicKeyBytes := bls.PublicKeyToBytes(TestMockPublicKey) + mockPublicKeyHex := hex.EncodeToString(mockPublicKeyBytes[:]) + backend.datastore.SetKnownValidator("0x"+common.PubkeyHex(mockPublicKeyHex), 0) + backend.setLowestToBNonceForEpoch(t, 0, 0) + + backend.redis.SetSizeTracker(backend.arcadia.sizeTracker) + return &backend +} + +// resetMockExpectations is used to reset seqclient mock expectations +func (be *testBackend) resetSeqClientMockExpectations(t *testing.T) { + be.seqcli.AssertExpectations(t) + + be.seqcli = mseq.NewMockBaseSeqClient(t) + be.seqcli.EXPECT().SetOnNewBlockHandler(mock.Anything).Return().Maybe() + be.seqcli.EXPECT().Parser().Return(&srpc.Parser{}).Maybe() + + be.simulator = msim.NewMockIBlockSimRateLimiter(t) + be.arcadia.setBlockSimRateLimiter(be.simulator) + + actsChan := make(chan []chain.Action) + be.seqcli.EXPECT().ActsChan().Return(actsChan).Maybe() + + be.arcadia.setSeqClient(be.seqcli) +} + +func (be *testBackend) setupRoBsForToBTest(t *testing.T) (*common.SubmitNewBlockRequest, *common.SubmitNewBlockRequest) { + return be.setupRoBsForToBTestWithBlockNumOffset(t, TestEpoch, 1) +} + +// setupRoBsForToBTest is a helper which sends two RoB blocks for origin and remote chains. +// This primes a tob to be sent by sending a RoB for both origin and remote id. +// Returns both robs block requests that are sent out so that the txs can be examined if needed +func (be *testBackend) setupRoBsForToBTestWithBlockNumOffset(t *testing.T, epoch uint64, offset int) (*common.SubmitNewBlockRequest, *common.SubmitNewBlockRequest) { + be.SetupRegisteredRollups(TestEpoch, TestOriginChainID) + be.SetupRegisteredRollups(TestEpoch, TestRemoteChainID) + + robBlockNum1 := TestBlockNumbers[TestOriginChainIDStr] + robBlockNum2 := TestBlockNumbers[TestRemoteChainIDStr] + + if offset >= 0 { + robBlockNum1 += uint64(offset) + robBlockNum2 += uint64(offset) + } else { + robBlockNum1 -= uint64(-offset) + robBlockNum2 -= uint64(-offset) + } + + log.Infof("Setting up RoBs: OriginChainID Block: %x, RemoteChainID Block: %x", robBlockNum1, robBlockNum2) + + // send in rob chunk for origin chain id + // needed for ToB to be accepted + robChainID := TestOriginChainIDInt + robOpts1 := &CreateTestBlockSubmissionOpts{ + Epoch: epoch, + OriginChainID: *robChainID, + RemoteChainID: *TestRemoteChainIDInt, + SeqChainID: TestSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + RoBBlockNumber: robBlockNum1, + RoBChainID: robChainID, + ChunkID: ids.GenerateTestID(), + Txs: nil, + IsToB: false, + } + rob1 := be.submitRoBChunk(t, robOpts1, TestChainParser) + + // send in rob chunk for remote chain id + // needed for ToB to be accepted + robChainID = TestRemoteChainIDInt + robOpts2 := &CreateTestBlockSubmissionOpts{ + Epoch: epoch, + OriginChainID: *robChainID, + RemoteChainID: *TestRemoteChainIDInt, + SeqChainID: TestSeqChainID, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + RoBBlockNumber: robBlockNum2, + RoBChainID: robChainID, + ChunkID: ids.GenerateTestID(), + Txs: nil, + IsToB: false, + } + rob2 := be.submitRoBChunk(t, robOpts2, TestChainParser) + + be.resetSeqClientMockExpectations(t) + return rob1, rob2 +} + +func newDatastores(t *testing.T) (*datastore.RedisCache, database.IDatabaseService) { + redisClient, err := miniredis.Run() + require.NoError(t, err) + + redisCache, err := datastore.NewRedisCache("", redisClient.Addr(), "") + require.NoError(t, err) + + db := database.NewMockDB() + + return redisCache, db +} + +// setTestAuctionWinner sets the test builder public key as the auction winner for the given epoch +func (be *testBackend) setTestAuctionWinner(t *testing.T, epoch uint64) { + err := be.arcadia.datastore.SetAuctionWinner(epoch, TestBuilderPkBytes[:]) + require.NoError(t, err) +} + +// submitRoBChunk is a helper for submitting an rob chunk to the backend +// Note it will use the opts origin chain id for the chain and opts rob chain id must equal origin chain id +func (be *testBackend) submitRoBChunk(t *testing.T, opts *CreateTestBlockSubmissionOpts, parser *srpc.Parser) *common.SubmitNewBlockRequest { + if opts.RoBChainID == nil { + panic("robchain id is required") + } + + if opts.OriginChainID.Uint64() != opts.RoBChainID.Uint64() { + panic("opts origin chain id must equal rob chain id") + } + + builderPK := opts.BuilderPubkey + builderPKBytes := builderPK.Bytes() + originChainIDStr := common.ChainIDStr(&opts.OriginChainID) + redis := be.redis + redis.SetSizeTracker(be.arcadia.sizeTracker) + + // constructing RoB with txs from origin chain id + if opts.Txs == nil { + robTxs := ethtypes.Transactions{ + // for every first RoB for one chain, TestAccounts[0] is used and the tx nonce used is 0 + CreateEthTransfer(t, &opts.OriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 20, TestAccounts[0].Nonce, 21000, nil), + } + opts.Txs = robTxs + } + robReq := CreateRoBReq(t, opts) + + // builder needs to be the auction winner in order to build + err := be.arcadia.datastore.SetAuctionWinner(opts.Epoch, builderPKBytes[:]) + require.NoError(t, err) + + // register test rollup for the origin chain id so we can accept builder bids + be.SetupRegisteredRollups(opts.Epoch, &opts.OriginChainID) + + // set up chunk simulation mock expectations + be.seqcli.EXPECT().Parser().Return(parser) + + t.Log("========setup & send RoB============") + for domain, expectedTxs := range map[string]ethtypes.Transactions{ + originChainIDStr: opts.Txs, + } { + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) + require.NoError(t, err) + nonces := CollectNoncesFromEthAccounts(relatedAccounts) + balances := CollectBalancesFromEthAccounts(relatedAccounts) + + matchTxs := ExpectedMatchTxs(expectedTxs) + be.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(opts.RoBBlockNumber-1)).Return(nonces, nil) + be.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(opts.RoBBlockNumber-1)).Return(balances, nil) + + // simulation results, only the txs from this RoB will be simulated and the txs from ToB will be filtered out + callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) + for _, tx := range expectedTxs { + callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ + TxHash: tx.Hash().Hex(), + Error: "", + Revert: "", + }) + } + rawExpectedTxs, err := CollectRawTxs(expectedTxs) + require.NoError(t, err) + validationReq := common.BlockValidationRequest{ + Txs: rawExpectedTxs, + BlockNumber: uint64ToHexString(opts.RoBBlockNumber - 1), + StateBlockNumber: uint64ToHexString(opts.RoBBlockNumber - 1), + } + be.simulator.EXPECT(). + SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). + Return(100, callBundleRes, nil) + } + + rr := be.request(http.MethodPost, pathSubmitNewBlockRequest, robReq) + require.Equal(t, http.StatusOK, rr.Code) + + return robReq +} + +// Builds tob bundle opts for origin and remote tx using test accounts +func buildDefaultToBOpts(t *testing.T, epoch uint64, originNonce int, remoteNonce int) *CreateTestBlockSubmissionOpts { + bundleTxs := map[string]ethtypes.Transactions{ + TestOriginChainIDStr: { + CreateEthTransfer(t, TestOriginChainID, TestAccounts[0].PrivateKey, TestAccounts[1].Address, 100, originNonce, 21000, nil), + }, + TestRemoteChainIDStr: { + CreateEthTransfer(t, TestRemoteChainID, TestAccounts[1].PrivateKey, TestAccounts[0].Address, 100, remoteNonce, 21000, nil), + }, + } + + return buildToBOptsWithBundles(t, epoch, originNonce, remoteNonce, bundleTxs) +} + +func buildToBOptsWithBundles( + t *testing.T, + epoch uint64, + originNonce int, + remoteNonce int, + bundleTxs map[string]ethtypes.Transactions, +) *CreateTestBlockSubmissionOpts { + seqTx := CreateHypersdkBundleTx(t, TestSeqChainID, bundleTxs) + oSeqTx, err := chain.MarshalTxs([]*chain.Transaction{seqTx}) + require.NoError(t, err) + + bundle := common.CrossRollupBundle{ + BundleHash: "0xbundle1", + Txs: oSeqTx, + RevertingTxHashes: nil, + } + + return &CreateTestBlockSubmissionOpts{ + Epoch: epoch, + ToBBlockNumber: TestBlockNumbers, + BuilderPubkey: *TestBuilderPublicKey, + BuilderSecretkey: *TestBuilderSecretKey, + Bundles: []*common.CrossRollupBundle{&bundle}, + } +} + +// submitToBChunk is helper for submitting a tob chunk, first submits two robs first +func (be *testBackend) submitToBChunk(t *testing.T, opts *CreateTestBlockSubmissionOpts, parser *srpc.Parser, addlChunks []*common.ArcadiaChunk) *common.SubmitNewBlockRequest { + return be.submitToBChunkWithOffset(t, opts, parser, addlChunks, 0) +} + +// submitToBChunk is helper for submitting a tob chunk, first submits two robs first +func (be *testBackend) submitToBChunkWithOffset(t *testing.T, opts *CreateTestBlockSubmissionOpts, parser *srpc.Parser, addlChunks []*common.ArcadiaChunk, offset int) *common.SubmitNewBlockRequest { + be.simulator.EXPECT().GetBlockNumber([]string{TestOriginChainIDStr, TestRemoteChainIDStr}).Return(TestBlockNumbers, nil) + be.seqcli.EXPECT().Parser().Return(parser) + + tobReq := CreateToBReq(t, opts) + + t.Log("========setup & send TOB============") + + chunks := []*common.ArcadiaChunk{} + chunks = append(chunks, addlChunks...) + chunks = append(chunks, &tobReq.Chunk) + + tobTxs, err := common.CollectTxsFromChunks(chunks) + require.NoError(t, err) + common.DisplayEthTxs(tobTxs) + + // expectations for the first tob + for domain, expectedTxs := range tobTxs { + fmt.Printf("expected txs for domain: %s\n", domain) + fmt.Printf("[") + for _, tx := range expectedTxs { + fmt.Printf("%s, ", tx.Hash().Hex()) + } + fmt.Printf("]\n") + relatedAccounts, err := CollectAccountsFromTxs(expectedTxs, TestAccounts) + require.NoError(t, err) + nonces := CollectNoncesFromEthAccounts(relatedAccounts) + balances := CollectBalancesFromEthAccounts(relatedAccounts) + + matchTxs := ExpectedMatchTxs(expectedTxs) + blockNum := TestBlockNumbers[domain] + if offset >= 0 { + blockNum = blockNum + uint64(offset) + } else { + blockNum = blockNum - uint64(-offset) + } + + be.simulator.EXPECT().GetNonces(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNum)).Return(nonces, nil) + be.simulator.EXPECT().GetBalances(domain, mock.MatchedBy(matchTxs), uint64ToHexString(blockNum)).Return(balances, nil) + + // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out + callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) + for _, tx := range expectedTxs { + callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ + TxHash: tx.Hash().Hex(), + Error: "", + Revert: "", + }) + } + rawExpectedTxs, err := CollectRawTxs(expectedTxs) + + require.NoError(t, err) + validationReq := common.BlockValidationRequest{ + Txs: rawExpectedTxs, + BlockNumber: uint64ToHexString(blockNum), + StateBlockNumber: uint64ToHexString(blockNum), + } + be.simulator.EXPECT(). + SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). + Return(100, callBundleRes, nil) + } + + rr := be.request(http.MethodPost, pathSubmitNewBlockRequest, tobReq) + require.Equal(t, http.StatusOK, rr.Code) + + be.resetSeqClientMockExpectations(t) + return tobReq +} + +func (be *testBackend) setupSimBlockAndGetUsedExpectation(t *testing.T, expectedTxs ethtypes.Transactions, blockNum uint64, domain string) { + // simulation results, only the txs from this ToB will be simulated and the txs from RoB will be filtered out + callBundleRes := make([]flashbotsrpc.FlashbotsCallBundleResult, 0, len(expectedTxs)) + for _, tx := range expectedTxs { + callBundleRes = append(callBundleRes, flashbotsrpc.FlashbotsCallBundleResult{ + TxHash: tx.Hash().Hex(), + Error: "", + Revert: "", + }) + } + rawExpectedTxs, err := CollectRawTxs(expectedTxs) + require.NoError(t, err) + validationReq := common.BlockValidationRequest{ + Txs: rawExpectedTxs, + BlockNumber: uint64ToHexString(blockNum), + StateBlockNumber: uint64ToHexString(blockNum), + } + be.simulator.EXPECT(). + SimBlockAndGetGasUsedForChain(mock.Anything, domain, &validationReq). + Return(100, callBundleRes, nil) +} + +// setLowestToBNonceForEpoch is used to set the lowest tob nonce for epoch in datastore for testing +// fails with error only if both redis and database error out +func (be *testBackend) setLowestToBNonceForEpoch(t *testing.T, epoch uint64, tobNonce uint64) { + redisErr := be.redis.SetEpochLowestToBNonce(epoch, tobNonce) + if redisErr != nil { + dbErr := be.arcadia.db.SetEpochLowestToBNonce(epoch, tobNonce) + require.NoError(t, dbErr) + } +} + +func (be *testBackend) GetArcadia() *ArcadiaAPI { + return be.arcadia +} + +func (be *testBackend) GetRedis() *datastore.RedisCache { + return be.redis +} + +func (be *testBackend) request(method, path string, payload any) *httptest.ResponseRecorder { + var req *http.Request + var err error + + path = "/api" + path + + if payload == nil { + req, err = http.NewRequest(method, path, bytes.NewReader(nil)) + } else { + payloadBytes, err2 := json.Marshal(payload) + require.NoError(be.t, err2) + req, err = http.NewRequest(method, path, bytes.NewReader(payloadBytes)) + } + require.NoError(be.t, err) + + rr := httptest.NewRecorder() + be.arcadia.getRouter().ServeHTTP(rr, req) + return rr +} + +func (be *testBackend) RequestWithHeaders(method, path string, payload any, headers map[string]string) *httptest.ResponseRecorder { + var req *http.Request + var err error + path = "/api" + path + + if payload == nil { + req, err = http.NewRequest(method, path, bytes.NewReader(nil)) + } else { + payloadBytes, err2 := json.Marshal(payload) + require.NoError(be.t, err2) + req, err = http.NewRequest(method, path, bytes.NewReader(payloadBytes)) + } + require.NoError(be.t, err) + for header, value := range headers { + req.Header.Set(header, value) + } + rr := httptest.NewRecorder() + be.arcadia.getRouter().ServeHTTP(rr, req) + return rr +} + +func (be *testBackend) RequestWithPayloadHeader(method, path string, payload any, sig string) *httptest.ResponseRecorder { + return be.RequestWithHeaders(method, path, payload, map[string]string{ + GetPayloadHeaderRollupSig: sig, + }) +} + +func (be *testBackend) SetupRegisteredRollups(epoch uint64, chainID *big.Int) { + namespace := common.ChainIDToNamespace(chainID) + rollup := &actions.RollupInfo{ + Namespace: namespace, + } + err := be.GetArcadia().datastore.SetRegisteredRollup(epoch, rollup) + if err != nil { + panic("failed to set registered rollup") + } +} + +func (be *testBackend) SetupRegisteredRollupsWithPublicKey(epoch uint64, chainID *big.Int, seqPk []byte) { + namespace := common.ChainIDToNamespace(chainID) + rollup := &actions.RollupInfo{ + Namespace: namespace, + SequencerPublicKey: seqPk, + } + err := be.GetArcadia().datastore.SetRegisteredRollup(epoch, rollup) + if err != nil { + panic("failed to set registered rollup") + } +} + +func (be *testBackend) SetupMockDASubmitter() { + blobID := [][]byte{[]byte("0xDEADBEEF")} + mockBlobInfo := &common.BlobInfo{ + BlobID: blobID, + Proof: nil, // Or mock a valid proof if needed + IsFinalized: true, + } + be.da.EXPECT().SubmitAndFinalizeBlob(context.Background(), mock.Anything).Return(mockBlobInfo, nil).Maybe() + be.seqcli.EXPECT().SubmitActions(mock.Anything, mock.Anything).Return(ids.Empty, nil).Maybe() + be.da.EXPECT().ChunksChan().Return(be.daChunksChan).Maybe() +} + +type SimAuctionWinnerInfo struct { + SeqChainID ids.ID + Bid *common.Auction + SecretKey *ed25519.PrivateKey +} + +// AdvanceToNextEpoch advance the arcadia backend to next epoch +// Can optionally take pointer to SimAuctionWinnerInfo which will produce a winning block with auction when needed +// Returns the epoch advanced that is advanced to +func (be *testBackend) AdvanceToNextEpoch( + t *testing.T, + auctionWinner *SimAuctionWinnerInfo, + checkSlotConsistency bool, +) uint64 { + currEpoch := be.arcadia.headEpoch.Load() + targetEpoch := currEpoch + 1 + targetSlot := targetEpoch * 6 + + slot := be.arcadia.headSlot.Load() + if checkSlotConsistency { + if slot != currEpoch*6 { + t.Fatal("AdvanceToNextEpoch() detected slot and epoch inconsistency") + } + } + + // process the left block for this epoch + for currSlot := slot; currSlot <= targetSlot; currSlot++ { + var block *chain.StatefulBlock + var results []*chain.Result + if auctionWinner == nil || currSlot%common.SlotsPerEpoch != 5 { + block, results = MakeEmptySEQBlock(currSlot) + } else { + block, results = MakeSEQBlockWithOneAuction(t, auctionWinner.SecretKey, currSlot, auctionWinner.SeqChainID, auctionWinner.Bid) + } + + be.arcadia.onNewSeqBlock(block, results) + + require.Equal(t, currSlot, be.arcadia.headSlot.Load()) + } + // sleep to wait announceAuctionWinnerToSeq to be finished + if auctionWinner != nil { + time.Sleep(3 * time.Second) + } + + require.Equal(t, be.arcadia.headEpoch.Load(), targetEpoch) + require.Equal(t, be.arcadia.headSlot.Load(), targetSlot) + + return targetEpoch +} + +func (be *testBackend) AdvanceToNextSlotInSameEpoch(t *testing.T) { + currEpoch := be.arcadia.headEpoch.Load() + slot := be.arcadia.headSlot.Load() + targetSlot := slot + 1 + + if targetSlot/common.SlotsPerEpoch > currEpoch { + return + } + + var block *chain.StatefulBlock + var results []*chain.Result + block, results = MakeEmptySEQBlock(targetSlot) + be.arcadia.onNewSeqBlock(block, results) + require.Equal(t, targetSlot, be.arcadia.headSlot.Load()) + + require.Equal(t, be.arcadia.headEpoch.Load(), currEpoch) // shouldn't advance + require.Equal(t, be.arcadia.headSlot.Load(), targetSlot) +} + +func (be *testBackend) SetMockSeqClientRollupExpectations(targetEpoch uint64, rollups []*actions.RollupInfo) { + // The last fetched registered rollup value is checked when Arcadia updates the registered rollups map. + // If it doesn't equal our expected epoch, then the update doesn't take place. + be.seqcli.EXPECT().GetRollupsValidAtEpoch(mock.Anything, targetEpoch).Return(rollups, nil).Maybe() + + for _, rollup := range rollups { + err := be.arcadia.datastore.SetRegisteredRollup(targetEpoch, rollup) + require.NoError(be.t, err) + } + + if be.useRedis { + for _, rollup := range rollups { + err := be.redis.InsertRollupAtEpoch(targetEpoch, rollup) + require.NoError(be.t, err) + } + } +} diff --git a/services/api/test_helpers.go b/services/api/test_helpers.go new file mode 100644 index 00000000..2432e6f7 --- /dev/null +++ b/services/api/test_helpers.go @@ -0,0 +1,45 @@ +package api + +import ( + srpc "github.com/AnomalyFi/nodekit-seq/rpc" + "math/big" + + "github.com/AnomalyFi/Arcadia/common" + "github.com/ava-labs/avalanchego/ids" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/flashbots/go-boost-utils/bls" +) + +const ( + mockSecretKeyHex = "0x4e343a647c5a5c44d76c2c58b63f02cdf3a9a0ec40f102ebc26363b4b1b95033" +) + +// Shared test variables +var ( + TestMockSecretKeyBytes, _ = hexutil.Decode(mockSecretKeyHex) + TestMockSecretKey, _ = bls.SecretKeyFromBytes(TestMockSecretKeyBytes) + TestMockPublicKey, _ = bls.PublicKeyFromSecretKey(TestMockSecretKey) + TestNumAccounts = 10 + TestAccounts = GenerateTestEthAccounts(TestNumAccounts) + TestOriginChainID = big.NewInt(45200) + TestOriginChainIDStr = hexutil.EncodeBig(TestOriginChainID) + TestRemoteChainID = big.NewInt(45201) + TestRemoteChainIDStr = hexutil.EncodeBig(TestRemoteChainID) + TestBlockNumbers = map[string]uint64{ + TestOriginChainIDStr: 100, + TestRemoteChainIDStr: 50, + } + TestOriginChainIDInt, _ = common.ChainIDStrToChainID(TestOriginChainIDStr) + TestRemoteChainIDInt, _ = common.ChainIDStrToChainID(TestRemoteChainIDStr) + TestSeqChainID = ids.GenerateTestID() + TestEpoch = uint64(0) + TestOriginNamespace = common.ChainIDToNamespace(TestOriginChainID) + TestRemoteNamespace = common.ChainIDToNamespace(TestRemoteChainID) + + TestBuilderSecretKey, _ = bls.GenerateRandomSecretKey() + TestBuilderPublicKey, _ = bls.PublicKeyFromSecretKey(TestBuilderSecretKey) + TestBuilderPkBytes = TestBuilderPublicKey.Bytes() + TestChainParser = &srpc.Parser{} + TestChunkID = ids.GenerateTestID() + TestSeqNetworkID = uint32(1337) +) diff --git a/services/api/testing_utils.go b/services/api/testing_utils.go index c4284185..7ba21f4d 100644 --- a/services/api/testing_utils.go +++ b/services/api/testing_utils.go @@ -149,7 +149,6 @@ func CreateTestChunkSubmission( blockReq.Pubkey = builderPubkeyBytes[:] require.NoError(t, err) - // blockReq.Signature = &bls.Signature{} isToB := len(opts.ChainIDs) > 1 var chunk common.ArcadiaChunk var payloadHash [32]byte @@ -562,6 +561,15 @@ func TxsTheSame(t *testing.T, expected map[string]ethtypes.Transactions, actual } } +func TxsTheSameUnordered(t *testing.T, expected map[string]ethtypes.Transactions, actual map[string]ethtypes.Transactions) { + require.Equal(t, len(expected), len(actual)) + for domain, expectedTxs := range expected { + actualTxs := actual[domain] + require.NotNil(t, actualTxs) + require.True(t, common.TxsHashUnorderedMatch(expectedTxs, actualTxs)) + } +} + func CreateTestEthTransaction(nonce uint64, value big.Int, gasLimit uint64, gasPrice big.Int, data []byte) *ethtypes.Transaction { toAddress := ethcommon.HexToAddress(TestAddressValue) _, err := crypto.HexToECDSA(TestPrivateKeyValue) @@ -666,3 +674,18 @@ func waitForServerReady(addr string, timeout time.Duration) error { } return fmt.Errorf("server not ready after %v", timeout) } + +// ExpectedMatchTxs creates a matcher functor that can be used in mock testing for tx matching +func ExpectedMatchTxs(expectedTxs ethtypes.Transactions) func(txs ethtypes.Transactions) bool { + return func(txs ethtypes.Transactions) bool { + if len(txs) != len(expectedTxs) { + return false + } + for i, tx := range txs { + if tx.Hash().Hex() != expectedTxs[i].Hash().Hex() { + return false + } + } + return true + } +} diff --git a/services/api/tx_table.go b/services/api/tx_table.go index 58a2c10a..dece3471 100644 --- a/services/api/tx_table.go +++ b/services/api/tx_table.go @@ -31,7 +31,7 @@ func (t *TxTable) AddBundledTxns(txns []*ethtypes.Transaction) error { for _, tx := range txns { sender, err := common.ExtractSender(tx) if err != nil { - return fmt.Errorf("unable to extract sender from tx: %s err: %w", tx.Hash().Hex(), err) + return fmt.Errorf("unable to extract sender from tx: [%s], sender: [%s], err: %w", tx.Hash().Hex(), sender, err) } // nonce check @@ -44,7 +44,7 @@ func (t *TxTable) AddBundledTxns(txns []*ethtypes.Transaction) error { nonce = nonce + uint64(len(t.table[sender])) + uint64(len(pending[sender])) // nonce not consecutive if tx.Nonce() != nonce { - return fmt.Errorf("nonce not consecutive: wanted: %d, actual: %d", nonce, tx.Nonce()) + return fmt.Errorf("nonce not consecutive: wanted: [%d], actual: [%d], for tx [%s] with sender [%s]", nonce, tx.Nonce(), tx.Hash().Hex(), sender) } // balance check, the balance consumed can be negative for consuming or positive for receiving funds(transfer) @@ -59,14 +59,14 @@ func (t *TxTable) AddBundledTxns(txns []*ethtypes.Transaction) error { // Intrinsic Gas Calculation: https://github.com/wolflo/evm-opcodes/blob/main/gas.md gas := intrinsicGas(tx) if tx.Gas() < uint64(gas) { - return fmt.Errorf("provided insufficient gas, want: %d, provided: %d", gas, tx.Gas()) + return fmt.Errorf("provided insufficient gas, want: %d, provided: %d, for tx [%s] with sender [%s]", gas, tx.Gas(), tx.Hash().Hex(), sender) } // accumulates txValue + gas to consumed consumed = consumed.Add(consumed, txValue) consumed = consumed.Add(consumed, big.NewInt(int64(gas))) - // consumed = consumed.Add(consumed, big.NewInt(int64(21000))) balanceConsumed[sender] = consumed.Neg(consumed) + if tx.Value().Cmp(big.NewInt(0)) != 0 { recipient := tx.To().Hex() if _, ok := balanceConsumed[recipient]; !ok { @@ -98,6 +98,7 @@ func (t *TxTable) AddBundledTxns(txns []*ethtypes.Transaction) error { balanceAfterConsuming := big.NewInt(0) balanceAfterConsuming = balanceAfterConsuming.Add(balanceAfterConsuming, consumed) balanceAfterConsuming = balanceAfterConsuming.Add(balanceAfterConsuming, remainBalance) + // no enough balance to cover tx fee + value if balanceAfterConsuming.Cmp(big.NewInt(0)) == -1 { return fmt.Errorf("remaining balance for sender(%s) cannot cover tx execution(value + gas)", sender)