aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/blockchain_test.go149
-rw-r--r--test/blockinfodatabase_test.go40
-rw-r--r--test/chainwriter_test.go129
-rw-r--r--test/coindatabase_test.go96
-rw-r--r--test/utils.go207
5 files changed, 621 insertions, 0 deletions
diff --git a/test/blockchain_test.go b/test/blockchain_test.go
new file mode 100644
index 0000000..412bb46
--- /dev/null
+++ b/test/blockchain_test.go
@@ -0,0 +1,149 @@
+package test
+
+import (
+ "Chain/pkg/blockchain"
+ "Chain/pkg/utils"
+ "os"
+ "testing"
+)
+
+func TestNewChain(t *testing.T) {
+ defer cleanUp()
+ bc := blockchain.New(blockchain.DefaultConfig())
+ if bc.Length != 1 {
+ t.Errorf("Expected chain length: %v\n Actual chain length: %v", 1, bc.Length)
+ }
+ if _, err := os.Stat("./blockinfodata"); os.IsNotExist(err) {
+ t.Errorf("Did not create leveldb blockinfodata")
+ }
+ if _, err := os.Stat("./coindata"); os.IsNotExist(err) {
+ t.Errorf("Did not create leveldb coindata")
+ }
+ if len(bc.UnsafeHashes) != 0 {
+ t.Errorf("unsafe hashes not initialized properly")
+ }
+ if bc.LastBlock == nil {
+ t.Errorf("Did not initialize last block")
+ }
+}
+
+func TestHandleAppendingBlock(t *testing.T) {
+ defer cleanUp()
+ bc := blockchain.New(blockchain.DefaultConfig())
+ lastBlock := bc.LastBlock
+ newBlock := MakeBlockFromPrev(lastBlock)
+ bc.HandleBlock(newBlock)
+ if bc.Length != 2 {
+ t.Errorf("Expected chain length: %v\n Actual chain length: %v", 2, bc.Length)
+ }
+ if bc.LastHash != newBlock.Hash() {
+ t.Errorf("Expected last hash: %v\nActual last hash: %v", newBlock.Hash(), bc.LastHash)
+ }
+ if bc.LastBlock != newBlock {
+ t.Errorf("Expected block: %v\n Actual block: %v", newBlock, bc.LastBlock)
+ }
+}
+
+func TestHandleForkingBlock(t *testing.T) {
+ defer cleanUp()
+ bc := blockchain.New(blockchain.DefaultConfig())
+ currBlock := bc.LastBlock
+ currForkingBlock := bc.LastBlock
+
+ for i := 0; i < 4; i++ {
+ newBlock := MakeBlockFromPrev(currBlock)
+ newForkingBlock := MakeBlockFromPrev(currForkingBlock)
+ newForkingBlock.Header.Version = 2
+ if newBlock.Hash() == newForkingBlock.Hash() {
+ t.Errorf("Hashes should not be the same")
+ }
+ if i < 3 {
+ bc.HandleBlock(newBlock)
+ }
+ bc.HandleBlock(newForkingBlock)
+ currBlock = newBlock
+ currForkingBlock = newForkingBlock
+ }
+ if bc.Length != 5 {
+ t.Errorf("Expected chain length: %v\n Actual chain length: %v", 5, bc.Length)
+ }
+ if bc.LastHash != currForkingBlock.Hash() {
+ t.Errorf("Expected last hash: %v\nActual last hash: %v", currForkingBlock.Hash(), bc.LastHash)
+ }
+ if bc.LastBlock != currForkingBlock {
+ t.Errorf("Expected block: %v\n Actual block: %v", currForkingBlock, bc.LastBlock)
+ }
+}
+
+func TestHandleInvalidBlock(t *testing.T) {
+ defer cleanUp()
+ bc := blockchain.New(blockchain.DefaultConfig())
+ lastBlock := bc.LastBlock
+ block1 := MakeBlockFromPrev(lastBlock)
+ block2 := MakeBlockFromPrev(block1)
+ bc.HandleBlock(block2)
+ if bc.Length != 1 {
+ t.Errorf("Expected chain length: %v\n Actual chain length: %v", 1, bc.Length)
+ }
+ if bc.LastHash != lastBlock.Hash() {
+ t.Errorf("Expected last hash: %v\nActual last hash: %v", lastBlock.Hash(), bc.LastHash)
+ }
+ if bc.LastBlock != lastBlock {
+ t.Errorf("Expected block: %v\n Actual block: %v", lastBlock, bc.LastBlock)
+ }
+}
+
+func TestHandle50Blocks(t *testing.T) {
+ defer cleanUp()
+ bc := blockchain.New(blockchain.DefaultConfig())
+ currBlock := bc.LastBlock
+ for i := 0; i < 50; i++ {
+ newBlock := MakeBlockFromPrev(currBlock)
+ bc.HandleBlock(newBlock)
+ currBlock = bc.LastBlock
+ utils.Debug.Printf("iteration: %v/49", i)
+ }
+ if bc.Length != 51 {
+ t.Errorf("Expected chain length: %v\n Actual chain length: %v", 51, bc.Length)
+ }
+ if bc.LastHash != currBlock.Hash() {
+ t.Errorf("Expected last hash: %v\nActual last hash: %v", currBlock.Hash(), bc.LastHash)
+ }
+ if bc.LastBlock != currBlock {
+ t.Errorf("Expected block: %v\n Actual block: %v", currBlock, bc.LastBlock)
+ }
+}
+
+func TestHandle2Forks(t *testing.T) {
+ defer cleanUp()
+ bc := blockchain.New(blockchain.DefaultConfig())
+ currBlock := bc.LastBlock
+ currForkingBlock := bc.LastBlock
+
+ for i := 0; i < 3; i++ {
+ newBlock := MakeBlockFromPrev(currBlock)
+ newForkingBlock := MakeBlockFromPrev(currForkingBlock)
+ newForkingBlock.Header.Version = 2
+ if newBlock.Hash() == newForkingBlock.Hash() {
+ t.Errorf("Hashes should not be the same")
+ }
+ if i != 1 {
+ bc.HandleBlock(newBlock)
+ bc.HandleBlock(newForkingBlock)
+ } else {
+ bc.HandleBlock(newForkingBlock)
+ bc.HandleBlock(newBlock)
+ }
+ currBlock = newBlock
+ currForkingBlock = newForkingBlock
+ }
+ if bc.Length != 4 {
+ t.Errorf("Expected chain length: %v\n Actual chain length: %v", 4, bc.Length)
+ }
+ if bc.LastHash != currBlock.Hash() {
+ t.Errorf("Expected last hash: %v\nActual last hash: %v", currBlock.Hash(), bc.LastHash)
+ }
+ if bc.LastBlock != currBlock {
+ t.Errorf("Expected block: %v\n Actual block: %v", currBlock, bc.LastBlock)
+ }
+}
diff --git a/test/blockinfodatabase_test.go b/test/blockinfodatabase_test.go
new file mode 100644
index 0000000..5205c99
--- /dev/null
+++ b/test/blockinfodatabase_test.go
@@ -0,0 +1,40 @@
+package test
+
+import (
+ "Chain/pkg/blockchain/blockinfodatabase"
+ "reflect"
+ "testing"
+)
+
+func TestStoreBlockRecord(t *testing.T) {
+ defer cleanUp()
+ blockinfo := blockinfodatabase.New(blockinfodatabase.DefaultConfig())
+ br := MockedBlockRecord()
+ blockinfo.StoreBlockRecord("hash", br)
+}
+
+func TestGetSameRecord(t *testing.T) {
+ defer cleanUp()
+ blockinfo := blockinfodatabase.New(blockinfodatabase.DefaultConfig())
+ br := MockedBlockRecord()
+ blockinfo.StoreBlockRecord("hash", br)
+ br2 := blockinfo.GetBlockRecord("hash")
+ if !reflect.DeepEqual(br, br2) {
+ t.Errorf("Block records not equal")
+ }
+}
+
+func TestGetDifferentRecords(t *testing.T) {
+ defer cleanUp()
+ blockinfo := blockinfodatabase.New(blockinfodatabase.DefaultConfig())
+ br := MockedBlockRecord()
+ br2 := MockedBlockRecord()
+ br2.UndoEndOffset = 20
+ blockinfo.StoreBlockRecord("hash", br)
+ blockinfo.StoreBlockRecord("hash2", br2)
+ rbr := blockinfo.GetBlockRecord("hash")
+ rbr2 := blockinfo.GetBlockRecord("hash2")
+ if reflect.DeepEqual(rbr, rbr2) {
+ t.Errorf("Block records should not be equal")
+ }
+}
diff --git a/test/chainwriter_test.go b/test/chainwriter_test.go
new file mode 100644
index 0000000..4ea22ac
--- /dev/null
+++ b/test/chainwriter_test.go
@@ -0,0 +1,129 @@
+package test
+
+import (
+ "Chain/pkg/block"
+ "Chain/pkg/blockchain/chainwriter"
+ "google.golang.org/protobuf/proto"
+ "reflect"
+ "testing"
+)
+
+func TestStoreOrphanBlock(t *testing.T) {
+ defer cleanUp()
+ cw := chainwriter.New(chainwriter.DefaultConfig())
+ bl := MockedBlock()
+ ub := &chainwriter.UndoBlock{}
+ br := cw.StoreBlock(bl, ub, 0)
+ if br.BlockFile != "data/block_0.txt" {
+ t.Errorf("Expected file name: %v Actual file name: %v", "data/block_0.txt", br.BlockFile)
+ }
+ if br.UndoFile != "" {
+ t.Errorf("Expected file name: %v Actual file name: %v", "", br.UndoFile)
+ }
+}
+
+func TestStoreBlock(t *testing.T) {
+ defer cleanUp()
+ cw := chainwriter.New(chainwriter.DefaultConfig())
+ bl := MockedBlock()
+ ub := MockedUndoBlock()
+ br := cw.StoreBlock(bl, ub, 0)
+ if br.BlockFile != "data/block_0.txt" {
+ t.Errorf("Expected file name: %v Actual file name: %v", "data/block_0", br.BlockFile)
+ }
+ if br.UndoFile != "data/undo_0.txt" {
+ t.Errorf("Expected file name: %v Actual file name: %v", "", br.UndoFile)
+ }
+}
+
+func TestWriteBlock(t *testing.T) {
+ defer cleanUp()
+ cw := chainwriter.New(chainwriter.DefaultConfig())
+ b := MockedBlock()
+ pb := block.EncodeBlock(b)
+ serializedBlock, _ := proto.Marshal(pb)
+ fi := cw.WriteBlock(serializedBlock)
+ if fi.StartOffset != 0 {
+ t.Errorf("Expected start offset: %v\nActual start offset: %v", 0, fi.StartOffset)
+ }
+ if int(fi.EndOffset) != len(serializedBlock) {
+ t.Errorf("Expected end offset: %v\nActual end offset: %v", 0, fi.EndOffset)
+ }
+ if fi.FileName != "data/block_0.txt" {
+ t.Errorf("Expected file name: %v Actual file name: %v", "data/block_0", fi.FileName)
+ }
+}
+
+func TestWriteUndoBlock(t *testing.T) {
+ defer cleanUp()
+ cw := chainwriter.New(chainwriter.DefaultConfig())
+ ub := MockedUndoBlock()
+ pub := chainwriter.EncodeUndoBlock(ub)
+ serializedUndoBlock, _ := proto.Marshal(pub)
+ ufi := cw.WriteUndoBlock(serializedUndoBlock)
+ if ufi.StartOffset != 0 {
+ t.Errorf("Expected start offset: %v\nActual start offset: %v", 0, ufi.StartOffset)
+ }
+ if int(ufi.EndOffset) != len(serializedUndoBlock) {
+ t.Errorf("Expected end offset: %v\nActual end offset: %v", 0, ufi.EndOffset)
+ }
+ if ufi.FileName != "data/undo_0.txt" {
+ t.Errorf("Expected file name: %v Actual file name: %v", "data/block_0", ufi.FileName)
+ }
+}
+
+func TestReadBlock(t *testing.T) {
+ defer cleanUp()
+ cw := chainwriter.New(chainwriter.DefaultConfig())
+ b := MockedBlock()
+ pb := block.EncodeBlock(b)
+ serializedBlock, _ := proto.Marshal(pb)
+ fi := cw.WriteBlock(serializedBlock)
+ b2 := cw.ReadBlock(fi)
+ if !reflect.DeepEqual(b, b2) {
+ t.Errorf("Expected block: %v\nActual block: %v", b, b2)
+ }
+}
+
+func TestReadUndoBlock(t *testing.T) {
+ defer cleanUp()
+ cw := chainwriter.New(chainwriter.DefaultConfig())
+ ub := MockedUndoBlock()
+ pub := chainwriter.EncodeUndoBlock(ub)
+ serializedUndoBlock, _ := proto.Marshal(pub)
+ ufi := cw.WriteUndoBlock(serializedUndoBlock)
+ ub2 := cw.ReadUndoBlock(ufi)
+ if !reflect.DeepEqual(ub, ub2) {
+ t.Errorf("Expected block: %v\nActual block: %v", ub, ub2)
+ }
+}
+
+func TestRead100Blocks(t *testing.T) {
+ defer cleanUp()
+ config := chainwriter.DefaultConfig()
+ config.MaxBlockFileSize = 100
+ cw := chainwriter.New(config)
+
+ var blocks []*block.Block
+ var fileInfos []*chainwriter.FileInfo
+
+ // write blocks
+ for i := 0; i < 100; i++ {
+ b := MockedBlock()
+ b.Header.Nonce = uint32(i)
+ blocks = append(blocks, b)
+ pb := block.EncodeBlock(b)
+ serializedBlock, _ := proto.Marshal(pb)
+ fi := cw.WriteBlock(serializedBlock)
+ fileInfos = append(fileInfos, fi)
+ }
+
+ // read blocks
+ for i := 0; i < 100; i++ {
+ b := cw.ReadBlock(fileInfos[i])
+ if !reflect.DeepEqual(blocks[i], b) {
+ t.Errorf("Block: %v/99\nExpected block: %v\nActual block: %v", i, blocks[i], b)
+ }
+ }
+
+}
diff --git a/test/coindatabase_test.go b/test/coindatabase_test.go
new file mode 100644
index 0000000..8391fba
--- /dev/null
+++ b/test/coindatabase_test.go
@@ -0,0 +1,96 @@
+package test
+
+import (
+ "Chain/pkg/block"
+ "Chain/pkg/blockchain/chainwriter"
+ "Chain/pkg/blockchain/coindatabase"
+ "reflect"
+ "testing"
+)
+
+func TestValidateValidBlock(t *testing.T) {
+ defer cleanUp()
+ genBlock := GenesisBlock()
+ coinDB := coindatabase.New(coindatabase.DefaultConfig())
+ coinDB.StoreBlock(genBlock.Transactions, true)
+ block1 := MakeBlockFromPrev(genBlock)
+ if !coinDB.ValidateBlock(block1.Transactions) {
+ t.Errorf("block1 should have validated")
+ }
+}
+
+func TestValidateInvalidBlock(t *testing.T) {
+ defer cleanUp()
+ genBlock := GenesisBlock()
+ coinDB := coindatabase.New(coindatabase.DefaultConfig())
+ coinDB.StoreBlock(genBlock.Transactions, true)
+ block1 := MakeBlockFromPrev(genBlock)
+ block2 := MakeBlockFromPrev(block1)
+ if coinDB.ValidateBlock(block2.Transactions) {
+ t.Errorf("block2 should not have validated")
+ }
+}
+
+func TestUndoCoins(t *testing.T) {
+ defer cleanUp()
+ genBlock := GenesisBlock()
+ coinDB := coindatabase.New(coindatabase.DefaultConfig())
+ coinDB.StoreBlock(genBlock.Transactions, true)
+ block1 := MakeBlockFromPrev(genBlock)
+ coinDB.StoreBlock(block1.Transactions, true)
+ block2 := MakeBlockFromPrev(block1)
+ ub2 := UndoBlockFromBlock(block2)
+ coinDB.StoreBlock(block2.Transactions, true)
+ coinDB.UndoCoins([]*block.Block{block2}, []*chainwriter.UndoBlock{ub2})
+ // make sure coins from undo block are put back
+ for i := 0; i < len(ub2.TransactionInputHashes); i++ {
+ cl := coindatabase.CoinLocator{
+ ReferenceTransactionHash: ub2.TransactionInputHashes[i],
+ OutputIndex: ub2.OutputIndexes[i],
+ }
+ coin := coinDB.GetCoin(cl)
+ if coin == nil {
+ t.Errorf("coin should exist")
+ } else {
+ if coin.IsSpent {
+ t.Errorf("coin should not be spent")
+ }
+ }
+ }
+ // make sure coins from block are deleted
+ for _, tx := range block2.Transactions {
+ txHash := tx.Hash()
+ for i := 0; i < len(tx.Outputs); i++ {
+ cl := coindatabase.CoinLocator{
+ ReferenceTransactionHash: txHash,
+ OutputIndex: uint32(i),
+ }
+ if coin := coinDB.GetCoin(cl); coin != nil {
+ t.Errorf("Coin should not exist")
+ }
+ }
+ }
+}
+
+func TestGetCoin(t *testing.T) {
+ defer cleanUp()
+ genBlock := GenesisBlock()
+ coinDB := coindatabase.New(coindatabase.DefaultConfig())
+ coinDB.StoreBlock(genBlock.Transactions, true)
+ txHash := genBlock.Transactions[0].Hash()
+ cl := coindatabase.CoinLocator{
+ ReferenceTransactionHash: txHash,
+ OutputIndex: 0,
+ }
+ coin := coinDB.GetCoin(cl)
+ if coin.IsSpent {
+ t.Errorf("Expected coin.IsSpent: %v\nActual coin.IsSpent:%v", false, coin.IsSpent)
+ }
+ if !reflect.DeepEqual(coin.TransactionOutput, genBlock.Transactions[0].Outputs[0]) {
+ t.Errorf(
+ "Expected transaction output: %v\nActual transactionoutput:%v",
+ genBlock.Transactions[0].Outputs[0],
+ coin.TransactionOutput,
+ )
+ }
+}
diff --git a/test/utils.go b/test/utils.go
new file mode 100644
index 0000000..cd0c63e
--- /dev/null
+++ b/test/utils.go
@@ -0,0 +1,207 @@
+package test
+
+import (
+ "Chain/pkg/block"
+ "Chain/pkg/blockchain/blockinfodatabase"
+ "Chain/pkg/blockchain/chainwriter"
+ "fmt"
+ "os"
+)
+
+// cleanUp removes any directories created by a test.
+func cleanUp() {
+ removeBlockInfoDB()
+ removeCoinDB()
+ removeDataDB()
+}
+
+// removeCoinDB removes the coin database's level db.
+func removeCoinDB() {
+ if _, err := os.Stat("./coindata"); !os.IsNotExist(err) {
+ if err2 := os.RemoveAll("./coindata"); err2 != nil {
+ fmt.Errorf("coudld not remove leveldb coindata")
+ }
+ }
+}
+
+// removeBlockInfoDB removes the block info database's level db.
+func removeBlockInfoDB() {
+ if _, err := os.Stat("./blockinfodata"); !os.IsNotExist(err) {
+ if err2 := os.RemoveAll("./blockinfodata"); err2 != nil {
+ fmt.Errorf("coudld not remove leveldb blockinfodata")
+ }
+ }
+}
+
+//removeDataDB removes the chain writer's data directory.
+func removeDataDB() {
+ if _, err := os.Stat("./data"); !os.IsNotExist(err) {
+ if err2 := os.RemoveAll("./data"); err2 != nil {
+ fmt.Errorf("coudld not remove directory data")
+ }
+ }
+}
+
+// MockedHeader returns a mocked Header.
+func MockedHeader() *block.Header {
+ return &block.Header{
+ Version: 0,
+ PreviousHash: "",
+ MerkleRoot: "",
+ DifficultyTarget: "",
+ Nonce: 0,
+ Timestamp: 0,
+ }
+}
+
+// MockedBlockRecord returns a mocked BlockRecord.
+func MockedBlockRecord() *blockinfodatabase.BlockRecord {
+ return &blockinfodatabase.BlockRecord{
+ Header: MockedHeader(),
+ Height: 0,
+ NumberOfTransactions: 0,
+ BlockFile: "./blockinfodata/block_0",
+ BlockStartOffset: 0,
+ BlockEndOffset: 10,
+ UndoFile: "",
+ UndoStartOffset: 0,
+ UndoEndOffset: 0,
+ }
+}
+
+// MockedTransactionInput returns a mocked TransactionInput.
+func MockedTransactionInput() *block.TransactionInput {
+ return &block.TransactionInput{
+ ReferenceTransactionHash: "",
+ OutputIndex: 0,
+ UnlockingScript: "",
+ }
+}
+
+// MockedTransactionOutput returns a mocked TransactionOutput.
+func MockedTransactionOutput() *block.TransactionOutput {
+ return &block.TransactionOutput{
+ Amount: 0,
+ LockingScript: "",
+ }
+}
+
+// MockedBlock returns a mocked Transaction.
+func MockedTransaction() *block.Transaction {
+ return &block.Transaction{
+ Version: 0,
+ Inputs: []*block.TransactionInput{MockedTransactionInput()},
+ Outputs: []*block.TransactionOutput{MockedTransactionOutput()},
+ LockTime: 0,
+ }
+}
+
+// MockedBlock returns a mocked Block.
+func MockedBlock() *block.Block {
+ return &block.Block{
+ Header: MockedHeader(),
+ Transactions: []*block.Transaction{MockedTransaction()},
+ }
+}
+
+// MockedUndoBlock returns a mocked UndoBlock.
+func MockedUndoBlock() *chainwriter.UndoBlock {
+ return &chainwriter.UndoBlock{
+ TransactionInputHashes: []string{""},
+ OutputIndexes: []uint32{1},
+ Amounts: []uint32{1},
+ LockingScripts: []string{""},
+ }
+}
+
+//GenesisBlock returns the genesis block for testing purposes.
+func GenesisBlock() *block.Block {
+ txo := &block.TransactionOutput{
+ Amount: 1_000_000_000,
+ LockingScript: "pubkey",
+ }
+ genTx := &block.Transaction{
+ Version: 0,
+ Inputs: nil,
+ Outputs: []*block.TransactionOutput{txo},
+ LockTime: 0,
+ }
+ return &block.Block{
+ Header: &block.Header{
+ Version: 0,
+ PreviousHash: "",
+ MerkleRoot: "",
+ DifficultyTarget: "",
+ Nonce: 0,
+ Timestamp: 0,
+ },
+ Transactions: []*block.Transaction{genTx},
+ }
+}
+
+// MakeBlockFromPrev creates a new Block from an existing Block,
+// using the old Block's TransactionOutputs as TransactionInputs
+// for the new Transaction.
+func MakeBlockFromPrev(b *block.Block) *block.Block {
+ newHeader := &block.Header{
+ Version: 0,
+ PreviousHash: b.Hash(),
+ MerkleRoot: "",
+ DifficultyTarget: "",
+ Nonce: 0,
+ Timestamp: 0,
+ }
+
+ var transactions []*block.Transaction
+
+ for _, tx := range b.Transactions {
+ txHash := tx.Hash()
+ for i, txo := range tx.Outputs {
+ txi := &block.TransactionInput{
+ ReferenceTransactionHash: txHash,
+ OutputIndex: uint32(i),
+ UnlockingScript: "",
+ }
+ txo1 := &block.TransactionOutput{
+ Amount: txo.Amount / 2,
+ LockingScript: "",
+ }
+ tx1 := &block.Transaction{
+ Version: uint32(i),
+ Inputs: []*block.TransactionInput{txi},
+ Outputs: []*block.TransactionOutput{txo1},
+ LockTime: 0,
+ }
+ transactions = append(transactions, tx1)
+ }
+ }
+ return &block.Block{
+ Header: newHeader,
+ Transactions: transactions,
+ }
+}
+
+// UndoBlockFromBlock creates an UndoBlock from a Block.
+// This function only works because we're not using inputs from
+// other Blocks. It also does not actually take care of amounts
+// or public keys.
+func UndoBlockFromBlock(b *block.Block) *chainwriter.UndoBlock {
+ var transactionHashes []string
+ var outputIndexes []uint32
+ var amounts []uint32
+ var lockingScripts []string
+ for _, tx := range b.Transactions {
+ for _, txi := range tx.Inputs {
+ transactionHashes = append(transactionHashes, txi.ReferenceTransactionHash)
+ outputIndexes = append(outputIndexes, txi.OutputIndex)
+ amounts = append(amounts, 0)
+ lockingScripts = append(lockingScripts, "")
+ }
+ }
+ return &chainwriter.UndoBlock{
+ TransactionInputHashes: transactionHashes,
+ OutputIndexes: outputIndexes,
+ Amounts: amounts,
+ LockingScripts: lockingScripts,
+ }
+}