diff --git a/.github/workflows/amoy_deb_profiles.yml b/.github/workflows/amoy_deb_profiles.yml index ebcdfca0ba..4258967ec3 100644 --- a/.github/workflows/amoy_deb_profiles.yml +++ b/.github/workflows/amoy_deb_profiles.yml @@ -194,7 +194,6 @@ jobs: ARCH: all NODE: sentry NETWORK: amoy - - name: Running package build for ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} run: dpkg-deb --build --root-owner-group packaging/deb/bor-pbss-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }} env: @@ -233,6 +232,12 @@ jobs: ARCH: all NODE: validator NETWORK: amoy + - name: Copying systemd file for ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} + run: cp -rp packaging/templates/systemd/bor.service packaging/deb/bor-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }}/lib/systemd/system/ + env: + ARCH: all + NODE: validator + NETWORK: amoy - name: Building bor ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} profile run: dpkg-deb --build --root-owner-group packaging/deb/bor-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }} env: @@ -271,6 +276,12 @@ jobs: ARCH: all NODE: validator NETWORK: amoy + - name: Copying systemd file for ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} + run: cp -rp packaging/templates/systemd/bor.service packaging/deb/bor-pbss-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }}/lib/systemd/system/ + env: + ARCH: all + NODE: validator + NETWORK: amoy - name: Building bor ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} profile run: dpkg-deb --build --root-owner-group packaging/deb/bor-pbss-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }} env: @@ -315,6 +326,12 @@ jobs: ARCH: all NODE: archive NETWORK: amoy + - name: Copying systemd file for ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} + run: cp -rp packaging/templates/systemd/bor.service packaging/deb/bor-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }}/lib/systemd/system/ + env: + ARCH: all + NODE: archive + NETWORK: amoy - name: Building bor ${{ env.NODE }} on ${{ env.NETWORK }} on ${{ env.ARCH }} profile run: dpkg-deb --build --root-owner-group packaging/deb/bor-${{ env.NETWORK }}-${{ env.NODE }}-config_${{ env.GIT_TAG }}-${{ env.ARCH }} env: diff --git a/builder/files/config.toml b/builder/files/config.toml index df34031979..d35c46c0b6 100644 --- a/builder/files/config.toml +++ b/builder/files/config.toml @@ -58,9 +58,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/cmd/clef/intapi_changelog.md b/cmd/clef/intapi_changelog.md index eaeb2e6862..dbfd665412 100644 --- a/cmd/clef/intapi_changelog.md +++ b/cmd/clef/intapi_changelog.md @@ -14,7 +14,7 @@ Additional labels for pre-release and build metadata are available as extensions Added `clef_New` to the internal API callable from a UI. -> `New` creates a new password protected Account. The private key is protected with +> `New` creates a new password-protected Account. The private key is protected with > the given password. Users are responsible to backup the private key that is stored > in the keystore location that was specified when this API was created. > This method is the same as New on the external API, the difference being that diff --git a/consensus/bor/bor.go b/consensus/bor/bor.go index baac3b4001..3bf38f6ac0 100644 --- a/consensus/bor/bor.go +++ b/consensus/bor/bor.go @@ -468,8 +468,9 @@ func (c *Bor) verifyCascadingFields(chain consensus.ChainHeaderReader, header *t // Verify the validator list match the local contract if IsSprintStart(number+1, c.config.CalculateSprint(number)) { - newValidators, err := c.spanner.GetCurrentValidatorsByBlockNrOrHash(context.Background(), rpc.BlockNumberOrHashWithNumber(rpc.LatestBlockNumber), number+1) - + // Use parent block's hash to make the eth_call to fetch validators so that the state being + // used to make the call is of the same fork. + newValidators, err := c.spanner.GetCurrentValidatorsByBlockNrOrHash(context.Background(), rpc.BlockNumberOrHashWithHash(header.ParentHash, false), number+1) if err != nil { return err } diff --git a/core/blockchain.go b/core/blockchain.go index 818b72e953..6c849f8e82 100644 --- a/core/blockchain.go +++ b/core/blockchain.go @@ -39,7 +39,6 @@ import ( "github.com/ethereum/go-ethereum/common/mclock" "github.com/ethereum/go-ethereum/common/prque" "github.com/ethereum/go-ethereum/consensus" - "github.com/ethereum/go-ethereum/core/blockstm" "github.com/ethereum/go-ethereum/core/rawdb" "github.com/ethereum/go-ethereum/core/state" "github.com/ethereum/go-ethereum/core/state/snapshot" @@ -86,12 +85,15 @@ var ( blockImportTimer = metrics.NewRegisteredMeter("chain/imports", nil) triedbCommitTimer = metrics.NewRegisteredTimer("chain/triedb/commits", nil) - blockInsertTimer = metrics.NewRegisteredTimer("chain/inserts", nil) - blockValidationTimer = metrics.NewRegisteredTimer("chain/validation", nil) - blockExecutionTimer = metrics.NewRegisteredTimer("chain/execution", nil) - blockWriteTimer = metrics.NewRegisteredTimer("chain/write", nil) - blockExecutionParallelCounter = metrics.NewRegisteredCounter("chain/execution/parallel", nil) - blockExecutionSerialCounter = metrics.NewRegisteredCounter("chain/execution/serial", nil) + blockInsertTimer = metrics.NewRegisteredTimer("chain/inserts", nil) + blockValidationTimer = metrics.NewRegisteredTimer("chain/validation", nil) + blockExecutionTimer = metrics.NewRegisteredTimer("chain/execution", nil) + blockWriteTimer = metrics.NewRegisteredTimer("chain/write", nil) + blockExecutionParallelCounter = metrics.NewRegisteredCounter("chain/execution/parallel", nil) + blockExecutionSerialCounter = metrics.NewRegisteredCounter("chain/execution/serial", nil) + blockExecutionParallelErrorCounter = metrics.NewRegisteredCounter("chain/execution/parallel/error", nil) + blockExecutionParallelTimer = metrics.NewRegisteredTimer("chain/execution/parallel/timer", nil) + blockExecutionSerialTimer = metrics.NewRegisteredTimer("chain/execution/serial/timer", nil) blockReorgMeter = metrics.NewRegisteredMeter("chain/reorg/executes", nil) blockReorgAddMeter = metrics.NewRegisteredMeter("chain/reorg/add", nil) @@ -569,7 +571,7 @@ func NewParallelBlockChain(db ethdb.Database, cacheConfig *CacheConfig, genesis return bc, nil } -func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ types.Receipts, _ []*types.Log, _ uint64, _ *state.StateDB, blockEndErr error) { +func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ types.Receipts, _ []*types.Log, _ uint64, _ *state.StateDB, vtime time.Duration, blockEndErr error) { // Process the block using processor and parallelProcessor at the same time, take the one which finishes first, cancel the other, and return the result ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -597,6 +599,7 @@ func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ err error statedb *state.StateDB counter metrics.Counter + parallel bool } resultChan := make(chan Result, 2) @@ -606,7 +609,7 @@ func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ if bc.parallelProcessor != nil { parallelStatedb, err := state.New(parent.Root, bc.stateCache, bc.snaps) if err != nil { - return nil, nil, 0, nil, err + return nil, nil, 0, nil, 0, err } parallelStatedb.SetLogger(bc.logger) @@ -614,15 +617,22 @@ func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ go func() { parallelStatedb.StartPrefetcher("chain", nil) + pstart := time.Now() receipts, logs, usedGas, err := bc.parallelProcessor.Process(block, parallelStatedb, bc.vmConfig, ctx) - resultChan <- Result{receipts, logs, usedGas, err, parallelStatedb, blockExecutionParallelCounter} + blockExecutionParallelTimer.UpdateSince(pstart) + if err == nil { + vstart := time.Now() + err = bc.validator.ValidateState(block, parallelStatedb, receipts, usedGas, false) + vtime = time.Since(vstart) + } + resultChan <- Result{receipts, logs, usedGas, err, parallelStatedb, blockExecutionParallelCounter, true} }() } if bc.processor != nil { statedb, err := state.New(parent.Root, bc.stateCache, bc.snaps) if err != nil { - return nil, nil, 0, nil, err + return nil, nil, 0, nil, 0, err } statedb.SetLogger(bc.logger) @@ -630,20 +640,27 @@ func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ go func() { statedb.StartPrefetcher("chain", nil) + pstart := time.Now() receipts, logs, usedGas, err := bc.processor.Process(block, statedb, bc.vmConfig, ctx) - resultChan <- Result{receipts, logs, usedGas, err, statedb, blockExecutionSerialCounter} + blockExecutionSerialTimer.UpdateSince(pstart) + if err == nil { + vstart := time.Now() + err = bc.validator.ValidateState(block, statedb, receipts, usedGas, false) + vtime = time.Since(vstart) + } + resultChan <- Result{receipts, logs, usedGas, err, statedb, blockExecutionSerialCounter, false} }() } result := <-resultChan - if _, ok := result.err.(blockstm.ParallelExecFailedError); ok { + if result.parallel && result.err != nil { log.Warn("Parallel state processor failed", "err", result.err) - + blockExecutionParallelErrorCounter.Inc(1) // If the parallel processor failed, we will fallback to the serial processor if enabled if processorCount == 2 { - result.statedb.StopPrefetcher() result = <-resultChan + result.statedb.StopPrefetcher() processorCount-- } } @@ -658,7 +675,7 @@ func (bc *BlockChain) ProcessBlock(block *types.Block, parent *types.Header) (_ }() } - return result.receipts, result.logs, result.usedGas, result.statedb, result.err + return result.receipts, result.logs, result.usedGas, result.statedb, vtime, result.err } // empty returns an indicator whether the blockchain is empty. @@ -2323,7 +2340,7 @@ func (bc *BlockChain) insertChain(chain types.Blocks, setHead bool) (int, error) // Process block using the parent state as reference point pstart := time.Now() - receipts, logs, usedGas, statedb, err := bc.ProcessBlock(block, parent) + receipts, logs, usedGas, statedb, vtime, err := bc.ProcessBlock(block, parent) activeState = statedb if err != nil { @@ -2338,18 +2355,8 @@ func (bc *BlockChain) insertChain(chain types.Blocks, setHead bool) (int, error) bc.stateSyncFeed.Send(StateSyncEvent{Data: data}) } // BOR - ptime := time.Since(pstart) - - vstart := time.Now() - - if err := bc.validator.ValidateState(block, statedb, receipts, usedGas, false); err != nil { - bc.reportBlock(block, receipts, err) - followupInterrupt.Store(true) - - return it.index, err - } + ptime := time.Since(pstart) - vtime - vtime := time.Since(vstart) proctime := time.Since(start) // processing + validation // Update the metrics touched during block processing and validation diff --git a/core/blockchain_test.go b/core/blockchain_test.go index 43c7f2c4b0..47738f2bc6 100644 --- a/core/blockchain_test.go +++ b/core/blockchain_test.go @@ -17,6 +17,7 @@ package core import ( + "context" "errors" "fmt" "math/big" @@ -170,7 +171,7 @@ func testBlockChainImport(chain types.Blocks, blockchain *BlockChain) error { return err } - receipts, _, usedGas, statedb, err := blockchain.ProcessBlock(block, blockchain.GetBlockByHash(block.ParentHash()).Header()) + receipts, _, usedGas, statedb, _, err := blockchain.ProcessBlock(block, blockchain.GetBlockByHash(block.ParentHash()).Header()) if err != nil { blockchain.reportBlock(block, receipts, err) @@ -216,6 +217,75 @@ func testParallelBlockChainImport(t *testing.T, scheme string) { } } +type AlwaysFailParallelStateProcessor struct { +} + +func (p *AlwaysFailParallelStateProcessor) Process(block *types.Block, statedb *state.StateDB, cfg vm.Config, interruptCtx context.Context) (types.Receipts, []*types.Log, uint64, error) { + return nil, nil, 0, errors.New("always fail") +} + +type SlowSerialStateProcessor struct { + s Processor +} + +func NewSlowSerialStateProcessor(s Processor) *SlowSerialStateProcessor { + return &SlowSerialStateProcessor{s: s} +} + +func (p *SlowSerialStateProcessor) Process(block *types.Block, statedb *state.StateDB, cfg vm.Config, interruptCtx context.Context) (types.Receipts, []*types.Log, uint64, error) { + time.Sleep(100 * time.Millisecond) + return p.s.Process(block, statedb, cfg, interruptCtx) +} + +func TestSuccessfulBlockImportParallelFailed(t *testing.T) { + t.Parallel() + + testSuccessfulBlockImportParallelFailed(t, rawdb.HashScheme) + testSuccessfulBlockImportParallelFailed(t, rawdb.PathScheme) +} + +func testSuccessfulBlockImportParallelFailed(t *testing.T, scheme string) { + // Create a new blockchain with 10 initial blocks + db, _, blockchain, err := newCanonical(ethash.NewFaker(), 10, true, scheme) + blockchain.parallelProcessor = &AlwaysFailParallelStateProcessor{} + blockchain.processor = NewSlowSerialStateProcessor(blockchain.processor) + if err != nil { + t.Fatalf("failed to create canonical chain: %v", err) + } + defer blockchain.Stop() + + // Create valid blocks to import + block := blockchain.GetBlockByHash(blockchain.CurrentBlock().Hash()) + blocks := makeBlockChain(blockchain.chainConfig, block, 5, ethash.NewFaker(), db, canonicalSeed) + + // Import the blocks + n, err := blockchain.InsertChain(blocks) + if err != nil { + t.Fatalf("failed to import valid blocks: %v", err) + } + + // Verify all blocks were imported + if n != len(blocks) { + t.Errorf("imported %d blocks, wanted %d", n, len(blocks)) + } + + // Verify the last block is properly linked + if blockchain.CurrentBlock().Hash() != blocks[len(blocks)-1].Hash() { + t.Errorf("current block hash mismatch: got %x, want %x", + blockchain.CurrentBlock().Hash(), + blocks[len(blocks)-1].Hash()) + } + + // Verify block numbers are sequential + for i, block := range blocks { + expectedNumber := uint64(11 + i) // 10 initial blocks + new blocks + if block.NumberU64() != expectedNumber { + t.Errorf("block %d has wrong number: got %d, want %d", + i, block.NumberU64(), expectedNumber) + } + } +} + // testHeaderChainImport tries to process a chain of header, writing them into // the database if successful. func testHeaderChainImport(chain []*types.Header, blockchain *BlockChain) error { diff --git a/core/state/statedb.go b/core/state/statedb.go index 2ea0662ce0..f1cf27795d 100644 --- a/core/state/statedb.go +++ b/core/state/statedb.go @@ -854,22 +854,28 @@ func (s *StateDB) SetState(addr common.Address, key, value common.Hash) { // storage. This function should only be used for debugging and the mutations // must be discarded afterwards. func (s *StateDB) SetStorage(addr common.Address, storage map[common.Hash]common.Hash) { - // SetStorage needs to wipe existing storage. We achieve this by pretending - // that the account self-destructed earlier in this block, by flagging - // it in stateObjectsDestruct. The effect of doing so is that storage lookups - // will not hit disk, since it is assumed that the disk-data is belonging + // SetStorage needs to wipe the existing storage. We achieve this by marking + // the account as self-destructed in this block. The effect is that storage + // lookups will not hit the disk, as it is assumed that the disk data belongs // to a previous incarnation of the object. // - // TODO(rjl493456442) this function should only be supported by 'unwritable' - // state and all mutations made should all be discarded afterwards. - if _, ok := s.stateObjectsDestruct[addr]; !ok { - s.stateObjectsDestruct[addr] = nil + // TODO (rjl493456442): This function should only be supported by 'unwritable' + // state, and all mutations made should be discarded afterward. + obj := s.getStateObject(addr) + if obj != nil { + if _, ok := s.stateObjectsDestruct[addr]; !ok { + s.stateObjectsDestruct[addr] = obj + } } - - stateObject := s.getOrNewStateObject(addr) - + newObj := s.createObject(addr) for k, v := range storage { - stateObject.SetState(k, v) + newObj.SetState(k, v) + } + // Inherit the metadata of original object if it was existent + if obj != nil { + newObj.SetCode(common.BytesToHash(obj.CodeHash()), obj.code) + newObj.SetNonce(obj.Nonce()) + newObj.SetBalance(obj.Balance(), tracing.BalanceChangeUnspecified) } } @@ -988,7 +994,7 @@ func (s *StateDB) getStateObject(addr common.Address) *stateObject { var data *types.StateAccount if s.snap != nil { start := time.Now() - acc, err := s.snap.Account(crypto.HashData(s.hasher, addr.Bytes())) + acc, err := s.snap.Account(crypto.HashData(crypto.NewKeccakState(), addr.Bytes())) s.SnapshotAccountReads += time.Since(start) if err == nil { if acc == nil { diff --git a/docs/cli/server.md b/docs/cli/server.md index adeacd731a..aae0c19a8e 100644 --- a/docs/cli/server.md +++ b/docs/cli/server.md @@ -290,13 +290,13 @@ The ```bor server``` command runs the Bor client. ### Transaction Pool Options -- ```txpool.accountqueue```: Maximum number of non-executable transaction slots permitted per account (default: 16) +- ```txpool.accountqueue```: Maximum number of non-executable transaction slots permitted per account (default: 64) - ```txpool.accountslots```: Minimum number of executable transaction slots guaranteed per account (default: 16) -- ```txpool.globalqueue```: Maximum number of non-executable transaction slots for all accounts (default: 32768) +- ```txpool.globalqueue```: Maximum number of non-executable transaction slots for all accounts (default: 131072) -- ```txpool.globalslots```: Maximum number of executable transaction slots for all accounts (default: 32768) +- ```txpool.globalslots```: Maximum number of executable transaction slots for all accounts (default: 131072) - ```txpool.journal```: Disk journal for local transaction to survive node restarts (default: transactions.rlp) diff --git a/internal/cli/server/api_service.go b/internal/cli/server/api_service.go index 3781b6b45d..5af1a4adf6 100644 --- a/internal/cli/server/api_service.go +++ b/internal/cli/server/api_service.go @@ -49,6 +49,10 @@ func (s *Server) HeaderByNumber(ctx context.Context, req *protobor.GetHeaderByNu return nil, err } + if header == nil { + return nil, errors.New("header not found") + } + return &protobor.GetHeaderByNumberResponse{Header: headerToProtoborHeader(header)}, nil } @@ -62,6 +66,10 @@ func (s *Server) BlockByNumber(ctx context.Context, req *protobor.GetBlockByNumb return nil, err } + if block == nil { + return nil, errors.New("block not found") + } + return &protobor.GetBlockByNumberResponse{Block: blockToProtoBlock(block)}, nil } diff --git a/internal/cli/server/config.go b/internal/cli/server/config.go index 5960fdeab0..89078b30e4 100644 --- a/internal/cli/server/config.go +++ b/internal/cli/server/config.go @@ -658,9 +658,9 @@ func DefaultConfig() *Config { PriceLimit: params.BorDefaultTxPoolPriceLimit, // bor's default PriceBump: 10, AccountSlots: 16, - GlobalSlots: 32768, - AccountQueue: 16, - GlobalQueue: 32768, + GlobalSlots: 131072, + AccountQueue: 64, + GlobalQueue: 131072, LifeTime: 3 * time.Hour, }, Sealer: &SealerConfig{ diff --git a/internal/cli/server/testdata/default.toml b/internal/cli/server/testdata/default.toml index 0df37d610c..fcf2121270 100644 --- a/internal/cli/server/testdata/default.toml +++ b/internal/cli/server/testdata/default.toml @@ -61,9 +61,9 @@ devfakeauthor = false pricelimit = 25000000000 pricebump = 10 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "3h0m0s" [miner] diff --git a/internal/ethapi/api_test.go b/internal/ethapi/api_test.go index 07e5761951..a6d620b330 100644 --- a/internal/ethapi/api_test.go +++ b/internal/ethapi/api_test.go @@ -855,15 +855,24 @@ func TestEstimateGas(t *testing.T) { func TestCall(t *testing.T) { t.Parallel() + // Initialize test accounts var ( accounts = newAccounts(3) + dad = common.HexToAddress("0x0000000000000000000000000000000000000dad") genesis = &core.Genesis{ Config: params.MergedTestChainConfig, Alloc: types.GenesisAlloc{ accounts[0].addr: {Balance: big.NewInt(params.Ether)}, accounts[1].addr: {Balance: big.NewInt(params.Ether)}, accounts[2].addr: {Balance: big.NewInt(params.Ether)}, + dad: { + Balance: big.NewInt(params.Ether), + Nonce: 1, + Storage: map[common.Hash]common.Hash{ + common.Hash{}: common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"), + }, + }, }, } genBlocks = 10 @@ -1024,6 +1033,32 @@ func TestCall(t *testing.T) { // }, // want: "0x0122000000000000000000000000000000000000000000000000000000000000", // }, + // Clear the entire storage set + { + blockNumber: rpc.LatestBlockNumber, + call: TransactionArgs{ + From: &accounts[1].addr, + // Yul: + // object "Test" { + // code { + // let dad := 0x0000000000000000000000000000000000000dad + // if eq(balance(dad), 0) { + // revert(0, 0) + // } + // let slot := sload(0) + // mstore(0, slot) + // return(0, 32) + // } + // } + Input: hex2Bytes("610dad6000813103600f57600080fd5b6000548060005260206000f3"), + }, + overrides: StateOverride{ + dad: OverrideAccount{ + State: &map[common.Hash]common.Hash{}, + }, + }, + want: "0x0000000000000000000000000000000000000000000000000000000000000000", + }, } for i, tc := range testSuite { result, err := api.Call(context.Background(), tc.call, &rpc.BlockNumberOrHash{BlockNumber: &tc.blockNumber}, &tc.overrides, &tc.blockOverrides) diff --git a/p2p/simulations/README.md b/p2p/simulations/README.md index 1f9f72dcda..149be82b23 100644 --- a/p2p/simulations/README.md +++ b/p2p/simulations/README.md @@ -116,7 +116,7 @@ the expectation and what network events were emitted during the step run. ## HTTP API -The simulation framework includes a HTTP API that can be used to control the +The simulation framework includes an HTTP API that can be used to control the simulation. The API is initialised with a particular node adapter and has the following diff --git a/packaging/templates/mainnet-v1/archive/config.toml b/packaging/templates/mainnet-v1/archive/config.toml index a62dda85b1..77699dae04 100644 --- a/packaging/templates/mainnet-v1/archive/config.toml +++ b/packaging/templates/mainnet-v1/archive/config.toml @@ -53,9 +53,9 @@ gcmode = "archive" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/mainnet-v1/sentry/sentry/bor/config.toml b/packaging/templates/mainnet-v1/sentry/sentry/bor/config.toml index 0ee56f339c..9148973338 100644 --- a/packaging/templates/mainnet-v1/sentry/sentry/bor/config.toml +++ b/packaging/templates/mainnet-v1/sentry/sentry/bor/config.toml @@ -53,9 +53,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/mainnet-v1/sentry/sentry/bor/pbss_config.toml b/packaging/templates/mainnet-v1/sentry/sentry/bor/pbss_config.toml index 3d1d06400e..7a11e6a7d2 100644 --- a/packaging/templates/mainnet-v1/sentry/sentry/bor/pbss_config.toml +++ b/packaging/templates/mainnet-v1/sentry/sentry/bor/pbss_config.toml @@ -55,9 +55,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/mainnet-v1/sentry/validator/bor/config.toml b/packaging/templates/mainnet-v1/sentry/validator/bor/config.toml index bcf19bbcbe..9ab0802d46 100644 --- a/packaging/templates/mainnet-v1/sentry/validator/bor/config.toml +++ b/packaging/templates/mainnet-v1/sentry/validator/bor/config.toml @@ -55,9 +55,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/mainnet-v1/sentry/validator/bor/pbss_config.toml b/packaging/templates/mainnet-v1/sentry/validator/bor/pbss_config.toml index 6e71ad1cf9..75954b38e6 100644 --- a/packaging/templates/mainnet-v1/sentry/validator/bor/pbss_config.toml +++ b/packaging/templates/mainnet-v1/sentry/validator/bor/pbss_config.toml @@ -57,9 +57,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/mainnet-v1/without-sentry/bor/config.toml b/packaging/templates/mainnet-v1/without-sentry/bor/config.toml index 24d25b4406..d43b87c23e 100644 --- a/packaging/templates/mainnet-v1/without-sentry/bor/config.toml +++ b/packaging/templates/mainnet-v1/without-sentry/bor/config.toml @@ -55,9 +55,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/mainnet-v1/without-sentry/bor/pbss_config.toml b/packaging/templates/mainnet-v1/without-sentry/bor/pbss_config.toml index ac23de5789..20a5b02f0a 100644 --- a/packaging/templates/mainnet-v1/without-sentry/bor/pbss_config.toml +++ b/packaging/templates/mainnet-v1/without-sentry/bor/pbss_config.toml @@ -56,9 +56,9 @@ syncmode = "full" nolocals = true pricelimit = 25000000000 accountslots = 16 - globalslots = 32768 - accountqueue = 16 - globalqueue = 32768 + globalslots = 131072 + accountqueue = 64 + globalqueue = 131072 lifetime = "1h30m0s" # locals = [] # journal = "" diff --git a/packaging/templates/package_scripts/control b/packaging/templates/package_scripts/control deleted file mode 100644 index 90e6823dae..0000000000 --- a/packaging/templates/package_scripts/control +++ /dev/null @@ -1,12 +0,0 @@ -Source: bor -Version: 1.5.3 -Section: develop -Priority: standard -Maintainer: Polygon -Build-Depends: debhelper-compat (= 13) -Package: bor -Rules-Requires-Root: yes -Architecture: amd64 -Multi-Arch: foreign -Depends: -Description: This is the bor package from Polygon Technology. diff --git a/packaging/templates/package_scripts/control.arm64 b/packaging/templates/package_scripts/control.arm64 deleted file mode 100644 index 6b964b276f..0000000000 --- a/packaging/templates/package_scripts/control.arm64 +++ /dev/null @@ -1,13 +0,0 @@ -Source: bor -Version: 1.5.3 -Section: develop -Priority: standard -Maintainer: Polygon -Build-Depends: debhelper-compat (= 13) -Rules-Requires-Root: yes -Package: bor -Architecture: arm64 -Multi-Arch: foreign -Depends: -Description: This is the bor package from Polygon Technology. - diff --git a/packaging/templates/package_scripts/control.profile.amd64 b/packaging/templates/package_scripts/control.profile.amd64 deleted file mode 100644 index 40ab62ac87..0000000000 --- a/packaging/templates/package_scripts/control.profile.amd64 +++ /dev/null @@ -1,14 +0,0 @@ -Source: bor-profile -Version: 1.5.3 -Section: develop -Priority: standard -Maintainer: Polygon -Build-Depends: debhelper-compat (= 13) -Rules-Requires-Root: yes -Package: bor-profile -Architecture: amd64 -Multi-Arch: foreign -Depends: -Description: This is the bor package from Polygon Technology. - - diff --git a/packaging/templates/package_scripts/control.profile.arm64 b/packaging/templates/package_scripts/control.profile.arm64 deleted file mode 100644 index 16ec4c00ed..0000000000 --- a/packaging/templates/package_scripts/control.profile.arm64 +++ /dev/null @@ -1,12 +0,0 @@ -Source: bor-profile -Version: 1.5.3 -Section: develop -Priority: standard -Maintainer: Polygon -Build-Depends: debhelper-compat (= 13) -Rules-Requires-Root: yes -Package: bor-profile -Architecture: arm64 -Multi-Arch: foreign -Depends: -Description: This is the bor package from Polygon Technology. diff --git a/packaging/templates/package_scripts/control.validator b/packaging/templates/package_scripts/control.validator deleted file mode 100644 index 73a5c0fb3b..0000000000 --- a/packaging/templates/package_scripts/control.validator +++ /dev/null @@ -1,12 +0,0 @@ -Source: bor-profile -Version: 1.5.3 -Section: develop -Priority: standard -Maintainer: Polygon -Build-Depends: debhelper-compat (= 13) -Package: bor-profile -Rules-Requires-Root: yes -Architecture: amd64 -Multi-Arch: foreign -Depends: -Description: This is the bor package from Polygon Technology. diff --git a/packaging/templates/package_scripts/control.validator.arm64 b/packaging/templates/package_scripts/control.validator.arm64 deleted file mode 100644 index d122a2328e..0000000000 --- a/packaging/templates/package_scripts/control.validator.arm64 +++ /dev/null @@ -1,13 +0,0 @@ -Source: bor-profile -Version: 1.5.3 -Section: develop -Priority: standard -Maintainer: Polygon -Build-Depends: debhelper-compat (= 13) -Rules-Requires-Root: yes -Package: bor-profile -Architecture: arm64 -Multi-Arch: foreign -Depends: -Description: This is the bor package from Polygon Technology. - diff --git a/params/version.go b/params/version.go index 3bd5d0572a..03cd330b7d 100644 --- a/params/version.go +++ b/params/version.go @@ -23,7 +23,7 @@ import ( const ( VersionMajor = 1 // Major version component of the current release VersionMinor = 5 // Minor version component of the current release - VersionPatch = 3 // Patch version component of the current release + VersionPatch = 4 // Patch version component of the current release VersionMeta = "" // Version metadata to append to the version string ) diff --git a/scripts/updateVersion.sh b/scripts/updateVersion.sh index ab8f3aedfa..ac29f075b2 100755 --- a/scripts/updateVersion.sh +++ b/scripts/updateVersion.sh @@ -51,22 +51,6 @@ fi echo "" echo "New version is: $version" -# update version in all the 6 templates -replace="Version: "$version -fileArray=( - "${DIR}/../packaging/templates/package_scripts/control" - "${DIR}/../packaging/templates/package_scripts/control.arm64" - "${DIR}/../packaging/templates/package_scripts/control.profile.amd64" - "${DIR}/../packaging/templates/package_scripts/control.profile.arm64" - "${DIR}/../packaging/templates/package_scripts/control.validator" - "${DIR}/../packaging/templates/package_scripts/control.validator.arm64" -) -for file in ${fileArray[@]}; do - # get the line starting with `Version` in the control file and store it in the $temp variable - temp=$(grep "^Version.*" $file) - sed -i '' "s%$temp%$replace%" $file -done - # update version in ../params/version.go versionFile="${DIR}/../params/version.go" sed -i '' "s% = .*// Major% = $VersionMajor // Major%g" $versionFile