Merge branch 'wallet_tests_stm'

This commit is contained in:
Darko Brdareski 2021-12-15 12:28:06 +01:00
commit be1123b5df
52 changed files with 459 additions and 35 deletions

View File

@ -1,3 +1,4 @@
//stm: #unit
package messagesigner package messagesigner
import ( import (
@ -60,6 +61,7 @@ func TestMessageSignerSignMessage(t *testing.T) {
to2, err := w.WalletNew(ctx, types.KTSecp256k1) to2, err := w.WalletNew(ctx, types.KTSecp256k1)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_MESSAGE_SIGNER_NEW_SIGNER_001, @CHAIN_MESSAGE_SIGNER_SIGN_MESSAGE_001, @CHAIN_MESSAGE_SIGNER_SIGN_MESSAGE_005
type msgSpec struct { type msgSpec struct {
msg *types.Message msg *types.Message
mpoolNonce [1]uint64 mpoolNonce [1]uint64

View File

@ -1,3 +1,4 @@
//stm:#unit
package rand_test package rand_test
import ( import (
@ -55,11 +56,13 @@ func TestNullRandomnessV1(t *testing.T) {
randEpoch := ts.TipSet.TipSet().Height() - 2 randEpoch := ts.TipSet.TipSet().Height() - 2
//stm: @BLOCKCHAIN_RAND_GET_BEACON_RANDOMNESS_V1_01, @BLOCKCHAIN_RAND_EXTRACT_BEACON_ENTRY_FOR_EPOCH_01, @BLOCKCHAIN_RAND_GET_BEACON_RANDOMNESS_TIPSET_02
rand1, err := cg.StateManager().GetRandomnessFromBeacon(ctx, pers, randEpoch, entropy, ts.TipSet.TipSet().Key()) rand1, err := cg.StateManager().GetRandomnessFromBeacon(ctx, pers, randEpoch, entropy, ts.TipSet.TipSet().Key())
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
//stm: @BLOCKCHAIN_BEACON_GET_BEACON_FOR_EPOCH_01
bch := cg.BeaconSchedule().BeaconForEpoch(randEpoch).Entry(ctx, uint64(beforeNullHeight)+offset) bch := cg.BeaconSchedule().BeaconForEpoch(randEpoch).Entry(ctx, uint64(beforeNullHeight)+offset)
select { select {
@ -68,6 +71,7 @@ func TestNullRandomnessV1(t *testing.T) {
t.Fatal(resp.Err) t.Fatal(resp.Err)
} }
//stm: @BLOCKCHAIN_RAND_DRAW_RANDOMNESS_01
rand2, err := rand.DrawRandomness(resp.Entry.Data, pers, randEpoch, entropy) rand2, err := rand.DrawRandomness(resp.Entry.Data, pers, randEpoch, entropy)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -131,11 +135,13 @@ func TestNullRandomnessV2(t *testing.T) {
randEpoch := ts.TipSet.TipSet().Height() - 2 randEpoch := ts.TipSet.TipSet().Height() - 2
//stm: @BLOCKCHAIN_RAND_GET_BEACON_RANDOMNESS_V2_01
rand1, err := cg.StateManager().GetRandomnessFromBeacon(ctx, pers, randEpoch, entropy, ts.TipSet.TipSet().Key()) rand1, err := cg.StateManager().GetRandomnessFromBeacon(ctx, pers, randEpoch, entropy, ts.TipSet.TipSet().Key())
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
//stm: @BLOCKCHAIN_BEACON_GET_BEACON_FOR_EPOCH_01
bch := cg.BeaconSchedule().BeaconForEpoch(randEpoch).Entry(ctx, uint64(ts.TipSet.TipSet().Height())+offset) bch := cg.BeaconSchedule().BeaconForEpoch(randEpoch).Entry(ctx, uint64(ts.TipSet.TipSet().Height())+offset)
select { select {
@ -144,6 +150,7 @@ func TestNullRandomnessV2(t *testing.T) {
t.Fatal(resp.Err) t.Fatal(resp.Err)
} }
//stm: @BLOCKCHAIN_RAND_DRAW_RANDOMNESS_01, @BLOCKCHAIN_RAND_EXTRACT_BEACON_ENTRY_FOR_EPOCH_01, @BLOCKCHAIN_RAND_GET_BEACON_RANDOMNESS_TIPSET_03
// note that the randEpoch passed to DrawRandomness is still randEpoch (not the latest ts height) // note that the randEpoch passed to DrawRandomness is still randEpoch (not the latest ts height)
rand2, err := rand.DrawRandomness(resp.Entry.Data, pers, randEpoch, entropy) rand2, err := rand.DrawRandomness(resp.Entry.Data, pers, randEpoch, entropy)
if err != nil { if err != nil {
@ -212,11 +219,13 @@ func TestNullRandomnessV3(t *testing.T) {
randEpoch := ts.TipSet.TipSet().Height() - 2 randEpoch := ts.TipSet.TipSet().Height() - 2
//stm: @BLOCKCHAIN_RAND_GET_BEACON_RANDOMNESS_V3_01, @BLOCKCHAIN_RAND_EXTRACT_BEACON_ENTRY_FOR_EPOCH_01
rand1, err := cg.StateManager().GetRandomnessFromBeacon(ctx, pers, randEpoch, entropy, ts.TipSet.TipSet().Key()) rand1, err := cg.StateManager().GetRandomnessFromBeacon(ctx, pers, randEpoch, entropy, ts.TipSet.TipSet().Key())
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
//stm: @BLOCKCHAIN_BEACON_GET_BEACON_FOR_EPOCH_01
bch := cg.BeaconSchedule().BeaconForEpoch(randEpoch).Entry(ctx, uint64(randEpoch)+offset) bch := cg.BeaconSchedule().BeaconForEpoch(randEpoch).Entry(ctx, uint64(randEpoch)+offset)
select { select {
@ -225,6 +234,7 @@ func TestNullRandomnessV3(t *testing.T) {
t.Fatal(resp.Err) t.Fatal(resp.Err)
} }
//stm: @BLOCKCHAIN_RAND_DRAW_RANDOMNESS_01
rand2, err := rand.DrawRandomness(resp.Entry.Data, pers, randEpoch, entropy) rand2, err := rand.DrawRandomness(resp.Entry.Data, pers, randEpoch, entropy)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)

View File

@ -1,3 +1,4 @@
//stm: #unit
package sub package sub
import ( import (
@ -49,6 +50,7 @@ func TestFetchCidsWithDedup(t *testing.T) {
} }
g := &getter{msgs} g := &getter{msgs}
//stm: @CHAIN_INCOMING_FETCH_MESSAGES_BY_CID_001
// the cids have a duplicate // the cids have a duplicate
res, err := FetchMessagesByCids(context.TODO(), g, append(cids, cids[0])) res, err := FetchMessagesByCids(context.TODO(), g, append(cids, cids[0]))

View File

@ -1,3 +1,4 @@
//stm: #unit
package chain_test package chain_test
import ( import (
@ -460,6 +461,8 @@ func (tu *syncTestUtil) waitUntilSyncTarget(to int, target *types.TipSet) {
} }
func TestSyncSimple(t *testing.T) { func TestSyncSimple(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 50 H := 50
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -476,6 +479,8 @@ func TestSyncSimple(t *testing.T) {
} }
func TestSyncMining(t *testing.T) { func TestSyncMining(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 50 H := 50
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -498,6 +503,8 @@ func TestSyncMining(t *testing.T) {
} }
func TestSyncBadTimestamp(t *testing.T) { func TestSyncBadTimestamp(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 50 H := 50
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -552,6 +559,8 @@ func (wpp badWpp) ComputeProof(context.Context, []proof2.SectorInfo, abi.PoStRan
} }
func TestSyncBadWinningPoSt(t *testing.T) { func TestSyncBadWinningPoSt(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 15 H := 15
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -581,6 +590,9 @@ func (tu *syncTestUtil) loadChainToNode(to int) {
} }
func TestSyncFork(t *testing.T) { func TestSyncFork(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 10 H := 10
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -648,6 +660,9 @@ func TestSyncFork(t *testing.T) {
// A and B both include _different_ messages from sender X with nonce N (where N is the correct nonce for X). // A and B both include _different_ messages from sender X with nonce N (where N is the correct nonce for X).
// We can confirm that the state can be correctly computed, and that `MessagesForTipset` behaves as expected. // We can confirm that the state can be correctly computed, and that `MessagesForTipset` behaves as expected.
func TestDuplicateNonce(t *testing.T) { func TestDuplicateNonce(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 10 H := 10
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -702,6 +717,7 @@ func TestDuplicateNonce(t *testing.T) {
var includedMsg cid.Cid var includedMsg cid.Cid
var skippedMsg cid.Cid var skippedMsg cid.Cid
//stm: @CHAIN_STATE_SEARCH_MSG_001
r0, err0 := tu.nds[0].StateSearchMsg(context.TODO(), ts2.TipSet().Key(), msgs[0][0].Cid(), api.LookbackNoLimit, true) r0, err0 := tu.nds[0].StateSearchMsg(context.TODO(), ts2.TipSet().Key(), msgs[0][0].Cid(), api.LookbackNoLimit, true)
r1, err1 := tu.nds[0].StateSearchMsg(context.TODO(), ts2.TipSet().Key(), msgs[1][0].Cid(), api.LookbackNoLimit, true) r1, err1 := tu.nds[0].StateSearchMsg(context.TODO(), ts2.TipSet().Key(), msgs[1][0].Cid(), api.LookbackNoLimit, true)
@ -743,6 +759,9 @@ func TestDuplicateNonce(t *testing.T) {
// This test asserts that a block that includes a message with bad nonce can't be synced. A nonce is "bad" if it can't // This test asserts that a block that includes a message with bad nonce can't be synced. A nonce is "bad" if it can't
// be applied on the parent state. // be applied on the parent state.
func TestBadNonce(t *testing.T) { func TestBadNonce(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001
//stm: @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001, @CHAIN_SYNCER_STOP_001
H := 10 H := 10
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -790,6 +809,9 @@ func TestBadNonce(t *testing.T) {
// One of the messages uses the sender's robust address, the other uses the ID address. // One of the messages uses the sender's robust address, the other uses the ID address.
// Such a block is invalid and should not sync. // Such a block is invalid and should not sync.
func TestMismatchedNoncesRobustID(t *testing.T) { func TestMismatchedNoncesRobustID(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001
//stm: @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001, @CHAIN_SYNCER_STOP_001
v5h := abi.ChainEpoch(4) v5h := abi.ChainEpoch(4)
tu := prepSyncTestWithV5Height(t, int(v5h+5), v5h) tu := prepSyncTestWithV5Height(t, int(v5h+5), v5h)
@ -802,6 +824,7 @@ func TestMismatchedNoncesRobustID(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Produce a message from the banker // Produce a message from the banker
//stm: @CHAIN_STATE_LOOKUP_ID_001
makeMsg := func(id bool) *types.SignedMessage { makeMsg := func(id bool) *types.SignedMessage {
sender := tu.g.Banker() sender := tu.g.Banker()
if id { if id {
@ -844,6 +867,9 @@ func TestMismatchedNoncesRobustID(t *testing.T) {
// One of the messages uses the sender's robust address, the other uses the ID address. // One of the messages uses the sender's robust address, the other uses the ID address.
// Such a block is valid and should sync. // Such a block is valid and should sync.
func TestMatchedNoncesRobustID(t *testing.T) { func TestMatchedNoncesRobustID(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001
//stm: @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001, @CHAIN_SYNCER_STOP_001
v5h := abi.ChainEpoch(4) v5h := abi.ChainEpoch(4)
tu := prepSyncTestWithV5Height(t, int(v5h+5), v5h) tu := prepSyncTestWithV5Height(t, int(v5h+5), v5h)
@ -856,6 +882,7 @@ func TestMatchedNoncesRobustID(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Produce a message from the banker with specified nonce // Produce a message from the banker with specified nonce
//stm: @CHAIN_STATE_LOOKUP_ID_001
makeMsg := func(n uint64, id bool) *types.SignedMessage { makeMsg := func(n uint64, id bool) *types.SignedMessage {
sender := tu.g.Banker() sender := tu.g.Banker()
if id { if id {
@ -915,6 +942,8 @@ func runSyncBenchLength(b *testing.B, l int) {
} }
func TestSyncInputs(t *testing.T) { func TestSyncInputs(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_VALIDATE_BLOCK_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_STOP_001
H := 10 H := 10
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -942,6 +971,9 @@ func TestSyncInputs(t *testing.T) {
} }
func TestSyncCheckpointHead(t *testing.T) { func TestSyncCheckpointHead(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001
//stm: @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001, @CHAIN_SYNCER_STOP_001
H := 10 H := 10
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -961,6 +993,7 @@ func TestSyncCheckpointHead(t *testing.T) {
a = tu.mineOnBlock(a, p1, []int{0}, true, false, nil, 0, true) a = tu.mineOnBlock(a, p1, []int{0}, true, false, nil, 0, true)
tu.waitUntilSyncTarget(p1, a.TipSet()) tu.waitUntilSyncTarget(p1, a.TipSet())
//stm: @CHAIN_SYNCER_CHECKPOINT_001
tu.checkpointTs(p1, a.TipSet().Key()) tu.checkpointTs(p1, a.TipSet().Key())
require.NoError(t, tu.g.ResyncBankerNonce(a1.TipSet())) require.NoError(t, tu.g.ResyncBankerNonce(a1.TipSet()))
@ -980,15 +1013,20 @@ func TestSyncCheckpointHead(t *testing.T) {
tu.waitUntilNodeHasTs(p1, b.TipSet().Key()) tu.waitUntilNodeHasTs(p1, b.TipSet().Key())
p1Head := tu.getHead(p1) p1Head := tu.getHead(p1)
require.True(tu.t, p1Head.Equals(a.TipSet())) require.True(tu.t, p1Head.Equals(a.TipSet()))
//stm: @CHAIN_SYNCER_CHECK_BAD_001
tu.assertBad(p1, b.TipSet()) tu.assertBad(p1, b.TipSet())
// Should be able to switch forks. // Should be able to switch forks.
//stm: @CHAIN_SYNCER_CHECKPOINT_001
tu.checkpointTs(p1, b.TipSet().Key()) tu.checkpointTs(p1, b.TipSet().Key())
p1Head = tu.getHead(p1) p1Head = tu.getHead(p1)
require.True(tu.t, p1Head.Equals(b.TipSet())) require.True(tu.t, p1Head.Equals(b.TipSet()))
} }
func TestSyncCheckpointEarlierThanHead(t *testing.T) { func TestSyncCheckpointEarlierThanHead(t *testing.T) {
//stm: @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01, @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_SYNC_001, @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001
//stm: @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001, @CHAIN_SYNCER_STOP_001
H := 10 H := 10
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)
@ -1008,6 +1046,7 @@ func TestSyncCheckpointEarlierThanHead(t *testing.T) {
a = tu.mineOnBlock(a, p1, []int{0}, true, false, nil, 0, true) a = tu.mineOnBlock(a, p1, []int{0}, true, false, nil, 0, true)
tu.waitUntilSyncTarget(p1, a.TipSet()) tu.waitUntilSyncTarget(p1, a.TipSet())
//stm: @CHAIN_SYNCER_CHECKPOINT_001
tu.checkpointTs(p1, a1.TipSet().Key()) tu.checkpointTs(p1, a1.TipSet().Key())
require.NoError(t, tu.g.ResyncBankerNonce(a1.TipSet())) require.NoError(t, tu.g.ResyncBankerNonce(a1.TipSet()))
@ -1027,15 +1066,19 @@ func TestSyncCheckpointEarlierThanHead(t *testing.T) {
tu.waitUntilNodeHasTs(p1, b.TipSet().Key()) tu.waitUntilNodeHasTs(p1, b.TipSet().Key())
p1Head := tu.getHead(p1) p1Head := tu.getHead(p1)
require.True(tu.t, p1Head.Equals(a.TipSet())) require.True(tu.t, p1Head.Equals(a.TipSet()))
//stm: @CHAIN_SYNCER_CHECK_BAD_001
tu.assertBad(p1, b.TipSet()) tu.assertBad(p1, b.TipSet())
// Should be able to switch forks. // Should be able to switch forks.
//stm: @CHAIN_SYNCER_CHECKPOINT_001
tu.checkpointTs(p1, b.TipSet().Key()) tu.checkpointTs(p1, b.TipSet().Key())
p1Head = tu.getHead(p1) p1Head = tu.getHead(p1)
require.True(tu.t, p1Head.Equals(b.TipSet())) require.True(tu.t, p1Head.Equals(b.TipSet()))
} }
func TestInvalidHeight(t *testing.T) { func TestInvalidHeight(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001, @CHAIN_SYNCER_START_001
//stm: @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
H := 50 H := 50
tu := prepSyncTest(t, H) tu := prepSyncTest(t, H)

View File

@ -1,3 +1,4 @@
//stm: #unit
package sealing_test package sealing_test
import ( import (
@ -28,6 +29,7 @@ import (
) )
func TestCommitBatcher(t *testing.T) { func TestCommitBatcher(t *testing.T) {
//stm: @CHAIN_STATE_MINER_PRE_COM_INFO_001, @CHAIN_STATE_MINER_INFO_001, @CHAIN_STATE_NETWORK_VERSION_001
t0123, err := address.NewFromString("t0123") t0123, err := address.NewFromString("t0123")
require.NoError(t, err) require.NoError(t, err)
@ -147,6 +149,7 @@ func TestCommitBatcher(t *testing.T) {
} }
} }
//stm: @CHAIN_STATE_MINER_INFO_001, @CHAIN_STATE_NETWORK_VERSION_001, @CHAIN_STATE_MINER_GET_COLLATERAL_001
expectSend := func(expect []abi.SectorNumber, aboveBalancer, failOnePCI bool) action { expectSend := func(expect []abi.SectorNumber, aboveBalancer, failOnePCI bool) action {
return func(t *testing.T, s *mocks.MockCommitBatcherApi, pcb *sealing.CommitBatcher) promise { return func(t *testing.T, s *mocks.MockCommitBatcherApi, pcb *sealing.CommitBatcher) promise {
s.EXPECT().StateMinerInfo(gomock.Any(), gomock.Any(), gomock.Any()).Return(miner.MinerInfo{Owner: t0123, Worker: t0123}, nil) s.EXPECT().StateMinerInfo(gomock.Any(), gomock.Any(), gomock.Any()).Return(miner.MinerInfo{Owner: t0123, Worker: t0123}, nil)

View File

@ -1,3 +1,4 @@
//stm: #unit
package sealing_test package sealing_test
import ( import (
@ -38,6 +39,7 @@ var fc = config.MinerFeeConfig{
} }
func TestPrecommitBatcher(t *testing.T) { func TestPrecommitBatcher(t *testing.T) {
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
t0123, err := address.NewFromString("t0123") t0123, err := address.NewFromString("t0123")
require.NoError(t, err) require.NoError(t, err)
@ -151,6 +153,7 @@ func TestPrecommitBatcher(t *testing.T) {
} }
} }
//stm: @CHAIN_STATE_MINER_INFO_001, @CHAIN_STATE_NETWORK_VERSION_001
expectSend := func(expect []abi.SectorNumber) action { expectSend := func(expect []abi.SectorNumber) action {
return func(t *testing.T, s *mocks.MockPreCommitBatcherApi, pcb *sealing.PreCommitBatcher) promise { return func(t *testing.T, s *mocks.MockPreCommitBatcherApi, pcb *sealing.PreCommitBatcher) promise {
s.EXPECT().ChainHead(gomock.Any()).Return(nil, abi.ChainEpoch(1), nil) s.EXPECT().ChainHead(gomock.Any()).Return(nil, abi.ChainEpoch(1), nil)
@ -171,6 +174,7 @@ func TestPrecommitBatcher(t *testing.T) {
} }
} }
//stm: @CHAIN_STATE_MINER_INFO_001, @CHAIN_STATE_NETWORK_VERSION_001
expectSendsSingle := func(expect []abi.SectorNumber) action { expectSendsSingle := func(expect []abi.SectorNumber) action {
return func(t *testing.T, s *mocks.MockPreCommitBatcherApi, pcb *sealing.PreCommitBatcher) promise { return func(t *testing.T, s *mocks.MockPreCommitBatcherApi, pcb *sealing.PreCommitBatcher) promise {
s.EXPECT().ChainHead(gomock.Any()).Return(nil, abi.ChainEpoch(1), nil) s.EXPECT().ChainHead(gomock.Any()).Return(nil, abi.ChainEpoch(1), nil)

View File

@ -1,3 +1,4 @@
//stm: #unit
package sealing_test package sealing_test
import ( import (
@ -47,6 +48,7 @@ func TestStateRecoverDealIDs(t *testing.T) {
PieceCID: idCid("newPieceCID"), PieceCID: idCid("newPieceCID"),
} }
//stm: @CHAIN_STATE_MARKET_STORAGE_DEAL_001, @CHAIN_STATE_NETWORK_VERSION_001
api.EXPECT().StateMarketStorageDealProposal(ctx, dealId, nil).Return(dealProposal, nil) api.EXPECT().StateMarketStorageDealProposal(ctx, dealId, nil).Return(dealProposal, nil)
pc := idCid("publishCID") pc := idCid("publishCID")

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -19,6 +20,12 @@ import (
) )
func TestAPI(t *testing.T) { func TestAPI(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_STATE_MINER_INFO_001
t.Run("direct", func(t *testing.T) { t.Run("direct", func(t *testing.T) {
runAPITest(t) runAPITest(t)
}) })
@ -116,11 +123,13 @@ func (ts *apiSuite) testSearchMsg(t *testing.T) {
sm, err := full.MpoolPushMessage(ctx, msg, nil) sm, err := full.MpoolPushMessage(ctx, msg, nil)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err := full.StateWaitMsg(ctx, sm.Cid(), 1, lapi.LookbackNoLimit, true) res, err := full.StateWaitMsg(ctx, sm.Cid(), 1, lapi.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, exitcode.Ok, res.Receipt.ExitCode, "message not successful") require.Equal(t, exitcode.Ok, res.Receipt.ExitCode, "message not successful")
//stm: @CHAIN_STATE_SEARCH_MSG_001
searchRes, err := full.StateSearchMsg(ctx, types.EmptyTSK, sm.Cid(), lapi.LookbackNoLimit, true) searchRes, err := full.StateSearchMsg(ctx, types.EmptyTSK, sm.Cid(), lapi.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, searchRes) require.NotNil(t, searchRes)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -14,6 +15,13 @@ import (
) )
func TestCCUpgrade(t *testing.T) { func TestCCUpgrade(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_STATE_MINER_GET_INFO_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
for _, height := range []abi.ChainEpoch{ for _, height := range []abi.ChainEpoch{
@ -85,6 +93,7 @@ func runTestCCUpgrade(t *testing.T, upgradeHeight abi.ChainEpoch) {
require.Less(t, 50000, int(exp.OnTime)) require.Less(t, 50000, int(exp.OnTime))
} }
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
dlInfo, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) dlInfo, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -11,6 +12,11 @@ import (
// TestClient does a basic test to exercise the client CLI commands. // TestClient does a basic test to exercise the client CLI commands.
func TestClient(t *testing.T) { func TestClient(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
_ = os.Setenv("BELLMAN_NO_GPU", "1") _ = os.Setenv("BELLMAN_NO_GPU", "1")
kit.QuietMiningLogs() kit.QuietMiningLogs()

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -52,6 +53,12 @@ import (
// * asserts that miner B loses power // * asserts that miner B loses power
// * asserts that miner D loses power, is inactive // * asserts that miner D loses power, is inactive
func TestDeadlineToggling(t *testing.T) { func TestDeadlineToggling(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -108,6 +115,7 @@ func TestDeadlineToggling(t *testing.T) {
{ {
minerC.PledgeSectors(ctx, sectorsC, 0, nil) minerC.PledgeSectors(ctx, sectorsC, 0, nil)
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err := client.StateMinerProvingDeadline(ctx, maddrC, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, maddrC, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -127,6 +135,7 @@ func TestDeadlineToggling(t *testing.T) {
expectedPower := types.NewInt(uint64(ssz) * sectorsC) expectedPower := types.NewInt(uint64(ssz) * sectorsC)
//stm: @CHAIN_STATE_MINER_POWER_001
p, err := client.StateMinerPower(ctx, maddrC, types.EmptyTSK) p, err := client.StateMinerPower(ctx, maddrC, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -147,12 +156,14 @@ func TestDeadlineToggling(t *testing.T) {
} }
checkMiner := func(ma address.Address, power abi.StoragePower, active, activeIfCron bool, tsk types.TipSetKey) { checkMiner := func(ma address.Address, power abi.StoragePower, active, activeIfCron bool, tsk types.TipSetKey) {
//stm: @CHAIN_STATE_MINER_POWER_001
p, err := client.StateMinerPower(ctx, ma, tsk) p, err := client.StateMinerPower(ctx, ma, tsk)
require.NoError(t, err) require.NoError(t, err)
// make sure it has the expected power. // make sure it has the expected power.
require.Equal(t, p.MinerPower.RawBytePower, power) require.Equal(t, p.MinerPower.RawBytePower, power)
//stm: @CHAIN_STATE_GET_ACTOR_001
mact, err := client.StateGetActor(ctx, ma, tsk) mact, err := client.StateGetActor(ctx, ma, tsk)
require.NoError(t, err) require.NoError(t, err)
@ -187,6 +198,7 @@ func TestDeadlineToggling(t *testing.T) {
checkMiner(maddrB, types.NewInt(0), true, true, uts.Key()) checkMiner(maddrB, types.NewInt(0), true, true, uts.Key())
} }
//stm: @CHAIN_STATE_NETWORK_VERSION_001
nv, err := client.StateNetworkVersion(ctx, types.EmptyTSK) nv, err := client.StateNetworkVersion(ctx, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.GreaterOrEqual(t, nv, network.Version12) require.GreaterOrEqual(t, nv, network.Version12)
@ -246,6 +258,7 @@ func TestDeadlineToggling(t *testing.T) {
}, nil) }, nil)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
r, err := client.StateWaitMsg(ctx, m.Cid(), 2, api.LookbackNoLimit, true) r, err := client.StateWaitMsg(ctx, m.Cid(), 2, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, exitcode.Ok, r.Receipt.ExitCode) require.Equal(t, exitcode.Ok, r.Receipt.ExitCode)
@ -298,6 +311,7 @@ func TestDeadlineToggling(t *testing.T) {
sectorbit := bitfield.New() sectorbit := bitfield.New()
sectorbit.Set(uint64(sectorNum)) sectorbit.Set(uint64(sectorNum))
//stm: @CHAIN_STATE_SECTOR_PARTITION_001
loca, err := client.StateSectorPartition(ctx, maddrD, sectorNum, types.EmptyTSK) loca, err := client.StateSectorPartition(ctx, maddrD, sectorNum, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -329,6 +343,7 @@ func TestDeadlineToggling(t *testing.T) {
t.Log("sent termination message:", smsg.Cid()) t.Log("sent termination message:", smsg.Cid())
//stm: @CHAIN_STATE_WAIT_MSG_001
r, err := client.StateWaitMsg(ctx, smsg.Cid(), 2, api.LookbackNoLimit, true) r, err := client.StateWaitMsg(ctx, smsg.Cid(), 2, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, exitcode.Ok, r.Receipt.ExitCode) require.Equal(t, exitcode.Ok, r.Receipt.ExitCode)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -12,6 +13,12 @@ import (
) )
func TestStorageDealMissingBlock(t *testing.T) { func TestStorageDealMissingBlock(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
ctx := context.Background() ctx := context.Background()
// enable 512MiB proofs so we can conduct larger transfers. // enable 512MiB proofs so we can conduct larger transfers.

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -71,6 +72,12 @@ func TestDealWithMarketAndMinerNode(t *testing.T) {
} }
func TestDealCyclesConcurrent(t *testing.T) { func TestDealCyclesConcurrent(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
if testing.Short() { if testing.Short() {
t.Skip("skipping test in short mode") t.Skip("skipping test in short mode")
} }

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -12,6 +13,12 @@ import (
) )
func TestMaxStagingDeals(t *testing.T) { func TestMaxStagingDeals(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
ctx := context.Background() ctx := context.Background()
// enable 512MiB proofs so we can conduct larger transfers. // enable 512MiB proofs so we can conduct larger transfers.

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -16,7 +17,12 @@ import (
) )
func TestOfflineDealFlow(t *testing.T) { func TestOfflineDealFlow(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
runTest := func(t *testing.T, fastRet bool, upscale abi.PaddedPieceSize) { runTest := func(t *testing.T, fastRet bool, upscale abi.PaddedPieceSize) {
ctx := context.Background() ctx := context.Background()
client, miner, ens := kit.EnsembleMinimal(t, kit.WithAllSubsystems()) // no mock proofs client, miner, ens := kit.EnsembleMinimal(t, kit.WithAllSubsystems()) // no mock proofs

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -14,7 +15,12 @@ import (
) )
func TestDealPadding(t *testing.T) { func TestDealPadding(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
var blockTime = 250 * time.Millisecond var blockTime = 250 * time.Millisecond

View File

@ -36,7 +36,12 @@ var (
) )
func TestPartialRetrieval(t *testing.T) { func TestPartialRetrieval(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
ctx := context.Background() ctx := context.Background()
policy.SetPreCommitChallengeDelay(2) policy.SetPreCommitChallengeDelay(2)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -9,6 +10,12 @@ import (
) )
func TestFirstDealEnablesMining(t *testing.T) { func TestFirstDealEnablesMining(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
// test making a deal with a fresh miner, and see if it starts to mine. // test making a deal with a fresh miner, and see if it starts to mine.
if testing.Short() { if testing.Short() {
t.Skip("skipping test in short mode") t.Skip("skipping test in short mode")

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -12,6 +13,12 @@ import (
) )
func TestQuotePriceForUnsealedRetrieval(t *testing.T) { func TestQuotePriceForUnsealedRetrieval(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
var ( var (
ctx = context.Background() ctx = context.Background()
blocktime = 50 * time.Millisecond blocktime = 50 * time.Millisecond
@ -100,6 +107,10 @@ iLoop:
} }
func TestZeroPricePerByteRetrieval(t *testing.T) { func TestZeroPricePerByteRetrieval(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
if testing.Short() { if testing.Short() {
t.Skip("skipping test in short mode") t.Skip("skipping test in short mode")
} }

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -23,6 +24,12 @@ import (
) )
func TestPublishDealsBatching(t *testing.T) { func TestPublishDealsBatching(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
var ( var (
ctx = context.Background() ctx = context.Background()
publishPeriod = 10 * time.Second publishPeriod = 10 * time.Second
@ -103,6 +110,7 @@ func TestPublishDealsBatching(t *testing.T) {
} }
// Expect a single PublishStorageDeals message that includes the first two deals // Expect a single PublishStorageDeals message that includes the first two deals
//stm: @CHAIN_STATE_LIST_MESSAGES_001
msgCids, err := client.StateListMessages(ctx, &api.MessageMatch{To: market.Address}, types.EmptyTSK, 1) msgCids, err := client.StateListMessages(ctx, &api.MessageMatch{To: market.Address}, types.EmptyTSK, 1)
require.NoError(t, err) require.NoError(t, err)
count := 0 count := 0

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -26,6 +27,12 @@ var (
) )
func TestDealsRetryLackOfFunds(t *testing.T) { func TestDealsRetryLackOfFunds(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
ctx := context.Background() ctx := context.Background()
oldDelay := policy.GetPreCommitChallengeDelay() oldDelay := policy.GetPreCommitChallengeDelay()
policy.SetPreCommitChallengeDelay(5) policy.SetPreCommitChallengeDelay(5)
@ -105,6 +112,10 @@ func TestDealsRetryLackOfFunds(t *testing.T) {
} }
func TestDealsRetryLackOfFunds_blockInPublishDeal(t *testing.T) { func TestDealsRetryLackOfFunds_blockInPublishDeal(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
ctx := context.Background() ctx := context.Background()
oldDelay := policy.GetPreCommitChallengeDelay() oldDelay := policy.GetPreCommitChallengeDelay()
policy.SetPreCommitChallengeDelay(5) policy.SetPreCommitChallengeDelay(5)
@ -181,6 +192,10 @@ func TestDealsRetryLackOfFunds_blockInPublishDeal(t *testing.T) {
} }
func TestDealsRetryLackOfFunds_belowLimit(t *testing.T) { func TestDealsRetryLackOfFunds_belowLimit(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
ctx := context.Background() ctx := context.Background()
oldDelay := policy.GetPreCommitChallengeDelay() oldDelay := policy.GetPreCommitChallengeDelay()
policy.SetPreCommitChallengeDelay(5) policy.SetPreCommitChallengeDelay(5)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -9,6 +10,12 @@ import (
) )
func TestDealsWithSealingAndRPC(t *testing.T) { func TestDealsWithSealingAndRPC(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
if testing.Short() { if testing.Short() {
t.Skip("skipping test in short mode") t.Skip("skipping test in short mode")
} }

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -38,6 +39,12 @@ const (
// TestGatewayWalletMsig tests that API calls to wallet and msig can be made on a lite // TestGatewayWalletMsig tests that API calls to wallet and msig can be made on a lite
// node that is connected through a gateway to a full API node // node that is connected through a gateway to a full API node
func TestGatewayWalletMsig(t *testing.T) { func TestGatewayWalletMsig(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blocktime := 5 * time.Millisecond blocktime := 5 * time.Millisecond
@ -116,6 +123,7 @@ func TestGatewayWalletMsig(t *testing.T) {
addProposal, err := doSend(proto) addProposal, err := doSend(proto)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err := lite.StateWaitMsg(ctx, addProposal, 1, api.LookbackNoLimit, true) res, err := lite.StateWaitMsg(ctx, addProposal, 1, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, 0, res.Receipt.ExitCode) require.EqualValues(t, 0, res.Receipt.ExitCode)
@ -127,6 +135,7 @@ func TestGatewayWalletMsig(t *testing.T) {
// Get available balance of msig: should be greater than zero and less // Get available balance of msig: should be greater than zero and less
// than initial amount // than initial amount
msig := execReturn.IDAddress msig := execReturn.IDAddress
//stm: @CHAIN_STATE_MINER_AVAILABLE_BALANCE_001
msigBalance, err := lite.MsigGetAvailableBalance(ctx, msig, types.EmptyTSK) msigBalance, err := lite.MsigGetAvailableBalance(ctx, msig, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.Greater(t, msigBalance.Int64(), int64(0)) require.Greater(t, msigBalance.Int64(), int64(0))
@ -139,6 +148,7 @@ func TestGatewayWalletMsig(t *testing.T) {
addProposal, err = doSend(proto) addProposal, err = doSend(proto)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err = lite.StateWaitMsg(ctx, addProposal, 1, api.LookbackNoLimit, true) res, err = lite.StateWaitMsg(ctx, addProposal, 1, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, 0, res.Receipt.ExitCode) require.EqualValues(t, 0, res.Receipt.ExitCode)
@ -156,6 +166,7 @@ func TestGatewayWalletMsig(t *testing.T) {
approval1, err := doSend(proto) approval1, err := doSend(proto)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err = lite.StateWaitMsg(ctx, approval1, 1, api.LookbackNoLimit, true) res, err = lite.StateWaitMsg(ctx, approval1, 1, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, 0, res.Receipt.ExitCode) require.EqualValues(t, 0, res.Receipt.ExitCode)
@ -169,6 +180,10 @@ func TestGatewayWalletMsig(t *testing.T) {
// TestGatewayMsigCLI tests that msig CLI calls can be made // TestGatewayMsigCLI tests that msig CLI calls can be made
// on a lite node that is connected through a gateway to a full API node // on a lite node that is connected through a gateway to a full API node
func TestGatewayMsigCLI(t *testing.T) { func TestGatewayMsigCLI(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blocktime := 5 * time.Millisecond blocktime := 5 * time.Millisecond
@ -180,6 +195,10 @@ func TestGatewayMsigCLI(t *testing.T) {
} }
func TestGatewayDealFlow(t *testing.T) { func TestGatewayDealFlow(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blocktime := 5 * time.Millisecond blocktime := 5 * time.Millisecond
@ -202,6 +221,10 @@ func TestGatewayDealFlow(t *testing.T) {
} }
func TestGatewayCLIDealFlow(t *testing.T) { func TestGatewayCLIDealFlow(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blocktime := 5 * time.Millisecond blocktime := 5 * time.Millisecond

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -16,6 +17,12 @@ import (
) )
func TestChainGetMessagesInTs(t *testing.T) { func TestChainGetMessagesInTs(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
ctx := context.Background() ctx := context.Background()
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -84,6 +91,7 @@ func TestChainGetMessagesInTs(t *testing.T) {
} }
for _, sm := range sms { for _, sm := range sms {
//stm: @CHAIN_STATE_WAIT_MSG_001
msgLookup, err := client.StateWaitMsg(ctx, sm.Cid(), 3, api.LookbackNoLimit, true) msgLookup, err := client.StateWaitMsg(ctx, sm.Cid(), 3, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -10,6 +11,12 @@ import (
// TestMultisig does a basic test to exercise the multisig CLI commands // TestMultisig does a basic test to exercise the multisig CLI commands
func TestMultisig(t *testing.T) { func TestMultisig(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blockTime := 5 * time.Millisecond blockTime := 5 * time.Millisecond

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -13,6 +14,12 @@ import (
) )
func TestNonceIncremental(t *testing.T) { func TestNonceIncremental(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
ctx := context.Background() ctx := context.Background()
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -51,6 +58,7 @@ func TestNonceIncremental(t *testing.T) {
} }
for _, sm := range sms { for _, sm := range sms {
//stm: @CHAIN_STATE_WAIT_MSG_001
_, err := client.StateWaitMsg(ctx, sm.Cid(), 3, api.LookbackNoLimit, true) _, err := client.StateWaitMsg(ctx, sm.Cid(), 3, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
} }

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -27,6 +28,12 @@ import (
) )
func TestPaymentChannelsAPI(t *testing.T) { func TestPaymentChannelsAPI(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
ctx := context.Background() ctx := context.Background()
@ -107,6 +114,7 @@ func TestPaymentChannelsAPI(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
preds := state.NewStatePredicates(paymentCreator) preds := state.NewStatePredicates(paymentCreator)
finished := make(chan struct{}) finished := make(chan struct{})
//stm: @CHAIN_STATE_GET_ACTOR_001
err = ev.StateChanged(func(ctx context.Context, ts *types.TipSet) (done bool, more bool, err error) { err = ev.StateChanged(func(ctx context.Context, ts *types.TipSet) (done bool, more bool, err error) {
act, err := paymentCreator.StateGetActor(ctx, channel, ts.Key()) act, err := paymentCreator.StateGetActor(ctx, channel, ts.Key())
if err != nil { if err != nil {
@ -182,6 +190,7 @@ func TestPaymentChannelsAPI(t *testing.T) {
collectMsg, err := paymentReceiver.PaychCollect(ctx, channel) collectMsg, err := paymentReceiver.PaychCollect(ctx, channel)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err = paymentReceiver.StateWaitMsg(ctx, collectMsg, 3, api.LookbackNoLimit, true) res, err = paymentReceiver.StateWaitMsg(ctx, collectMsg, 3, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, 0, res.Receipt.ExitCode, "unable to collect on payment channel") require.EqualValues(t, 0, res.Receipt.ExitCode, "unable to collect on payment channel")

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -30,6 +31,12 @@ import (
// TestPaymentChannelsBasic does a basic test to exercise the payment channel CLI // TestPaymentChannelsBasic does a basic test to exercise the payment channel CLI
// commands // commands
func TestPaymentChannelsBasic(t *testing.T) { func TestPaymentChannelsBasic(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
_ = os.Setenv("BELLMAN_NO_GPU", "1") _ = os.Setenv("BELLMAN_NO_GPU", "1")
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -87,6 +94,10 @@ type voucherSpec struct {
// TestPaymentChannelStatus tests the payment channel status CLI command // TestPaymentChannelStatus tests the payment channel status CLI command
func TestPaymentChannelStatus(t *testing.T) { func TestPaymentChannelStatus(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
_ = os.Setenv("BELLMAN_NO_GPU", "1") _ = os.Setenv("BELLMAN_NO_GPU", "1")
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -167,6 +178,12 @@ func TestPaymentChannelStatus(t *testing.T) {
// TestPaymentChannelVouchers does a basic test to exercise some payment // TestPaymentChannelVouchers does a basic test to exercise some payment
// channel voucher commands // channel voucher commands
func TestPaymentChannelVouchers(t *testing.T) { func TestPaymentChannelVouchers(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
_ = os.Setenv("BELLMAN_NO_GPU", "1") _ = os.Setenv("BELLMAN_NO_GPU", "1")
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -299,6 +316,12 @@ func TestPaymentChannelVouchers(t *testing.T) {
// TestPaymentChannelVoucherCreateShortfall verifies that if a voucher amount // TestPaymentChannelVoucherCreateShortfall verifies that if a voucher amount
// is greater than what's left in the channel, voucher create fails // is greater than what's left in the channel, voucher create fails
func TestPaymentChannelVoucherCreateShortfall(t *testing.T) { func TestPaymentChannelVoucherCreateShortfall(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
_ = os.Setenv("BELLMAN_NO_GPU", "1") _ = os.Setenv("BELLMAN_NO_GPU", "1")
kit.QuietMiningLogs() kit.QuietMiningLogs()

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -17,6 +18,13 @@ import (
) )
func TestSDRUpgrade(t *testing.T) { func TestSDRUpgrade(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
//stm: @CHAIN_STATE_NETWORK_VERSION_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
// oldDelay := policy.GetPreCommitChallengeDelay() // oldDelay := policy.GetPreCommitChallengeDelay()

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -18,6 +19,12 @@ import (
) )
func TestDealsWithFinalizeEarly(t *testing.T) { func TestDealsWithFinalizeEarly(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
if testing.Short() { if testing.Short() {
t.Skip("skipping test in short mode") t.Skip("skipping test in short mode")
} }

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -21,6 +22,12 @@ import (
) )
func TestMinerBalanceCollateral(t *testing.T) { func TestMinerBalanceCollateral(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blockTime := 5 * time.Millisecond blockTime := 5 * time.Millisecond

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -22,6 +23,12 @@ import (
) )
func TestPledgeSectors(t *testing.T) { func TestPledgeSectors(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
blockTime := 50 * time.Millisecond blockTime := 50 * time.Millisecond
@ -110,6 +117,12 @@ func TestPledgeBatching(t *testing.T) {
} }
func TestPledgeMaxBatching(t *testing.T) { func TestPledgeMaxBatching(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
blockTime := 50 * time.Millisecond blockTime := 50 * time.Millisecond
runTest := func(t *testing.T) { runTest := func(t *testing.T) {
@ -173,6 +186,7 @@ func TestPledgeMaxBatching(t *testing.T) {
} }
// Ensure that max aggregate message has propagated to the other node by checking current state // Ensure that max aggregate message has propagated to the other node by checking current state
//stm: @CHAIN_STATE_MINER_SECTORS_001
sectorInfosAfter, err := full.StateMinerSectors(ctx, miner.ActorAddr, nil, types.EmptyTSK) sectorInfosAfter, err := full.StateMinerSectors(ctx, miner.ActorAddr, nil, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, miner5.MaxAggregatedSectors+kit.DefaultPresealsPerBootstrapMiner, len(sectorInfosAfter)) assert.Equal(t, miner5.MaxAggregatedSectors+kit.DefaultPresealsPerBootstrapMiner, len(sectorInfosAfter))
@ -182,6 +196,12 @@ func TestPledgeMaxBatching(t *testing.T) {
} }
func TestPledgeBeforeNv13(t *testing.T) { func TestPledgeBeforeNv13(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
blocktime := 50 * time.Millisecond blocktime := 50 * time.Millisecond
runTest := func(t *testing.T, nSectors int) { runTest := func(t *testing.T, nSectors int) {

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -14,6 +15,12 @@ import (
) )
func TestTerminate(t *testing.T) { func TestTerminate(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -33,6 +40,7 @@ func TestTerminate(t *testing.T) {
ssz, err := miner.ActorSectorSize(ctx, maddr) ssz, err := miner.ActorSectorSize(ctx, maddr)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_MINER_POWER_001
p, err := client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err := client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, p.MinerPower, p.TotalPower) require.Equal(t, p.MinerPower, p.TotalPower)
@ -45,6 +53,7 @@ func TestTerminate(t *testing.T) {
t.Log("wait for power") t.Log("wait for power")
{ {
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
// Wait until proven. // Wait until proven.
di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -58,6 +67,7 @@ func TestTerminate(t *testing.T) {
nSectors++ nSectors++
//stm: @CHAIN_STATE_MINER_POWER_001
p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, p.MinerPower, p.TotalPower) require.Equal(t, p.MinerPower, p.TotalPower)
@ -111,6 +121,7 @@ loop:
// need to wait for message to be mined and applied. // need to wait for message to be mined and applied.
time.Sleep(5 * time.Second) time.Sleep(5 * time.Second)
//stm: @CHAIN_STATE_MINER_POWER_001
// check power decreased // check power decreased
p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -119,6 +130,7 @@ loop:
// check in terminated set // check in terminated set
{ {
//stm: @CHAIN_STATE_MINER_GET_PARTITIONS_001
parts, err := client.StateMinerPartitions(ctx, maddr, 1, types.EmptyTSK) parts, err := client.StateMinerPartitions(ctx, maddr, 1, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.Greater(t, len(parts), 0) require.Greater(t, len(parts), 0)
@ -133,6 +145,7 @@ loop:
require.Equal(t, uint64(0), bflen(parts[0].LiveSectors)) require.Equal(t, uint64(0), bflen(parts[0].LiveSectors))
} }
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -141,6 +154,7 @@ loop:
ts := client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil)) ts := client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil))
t.Logf("Now head.Height = %d", ts.Height()) t.Logf("Now head.Height = %d", ts.Height())
//stm: @CHAIN_STATE_MINER_POWER_001
p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -14,6 +15,12 @@ import (
) )
func TestTapeFix(t *testing.T) { func TestTapeFix(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.QuietMiningLogs() kit.QuietMiningLogs()
var blocktime = 2 * time.Millisecond var blocktime = 2 * time.Millisecond

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -23,6 +24,12 @@ import (
) )
func TestVerifiedClientTopUp(t *testing.T) { func TestVerifiedClientTopUp(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
blockTime := 100 * time.Millisecond blockTime := 100 * time.Millisecond
test := func(nv network.Version, shouldWork bool) func(*testing.T) { test := func(nv network.Version, shouldWork bool) func(*testing.T) {
@ -51,6 +58,7 @@ func TestVerifiedClientTopUp(t *testing.T) {
defer cancel() defer cancel()
// get VRH // get VRH
//stm: @CHAIN_STATE_VERIFIED_REGISTRY_ROOT_KEY_001
vrh, err := api.StateVerifiedRegistryRootKey(ctx, types.TipSetKey{}) vrh, err := api.StateVerifiedRegistryRootKey(ctx, types.TipSetKey{})
fmt.Println(vrh.String()) fmt.Println(vrh.String())
require.NoError(t, err) require.NoError(t, err)
@ -81,6 +89,7 @@ func TestVerifiedClientTopUp(t *testing.T) {
sm, err := api.MpoolPushMessage(ctx, msg, nil) sm, err := api.MpoolPushMessage(ctx, msg, nil)
require.NoError(t, err, "AddVerifier failed") require.NoError(t, err, "AddVerifier failed")
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err := api.StateWaitMsg(ctx, sm.Cid(), 1, lapi.LookbackNoLimit, true) res, err := api.StateWaitMsg(ctx, sm.Cid(), 1, lapi.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, 0, res.Receipt.ExitCode) require.EqualValues(t, 0, res.Receipt.ExitCode)
@ -102,11 +111,13 @@ func TestVerifiedClientTopUp(t *testing.T) {
sm, err = api.MpoolPushMessage(ctx, msg, nil) sm, err = api.MpoolPushMessage(ctx, msg, nil)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
res, err = api.StateWaitMsg(ctx, sm.Cid(), 1, lapi.LookbackNoLimit, true) res, err = api.StateWaitMsg(ctx, sm.Cid(), 1, lapi.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, 0, res.Receipt.ExitCode) require.EqualValues(t, 0, res.Receipt.ExitCode)
// check datacap balance // check datacap balance
//stm: @CHAIN_STATE_VERIFIED_CLIENT_STATUS_001
dcap, err := api.StateVerifiedClientStatus(ctx, verifiedClientAddr, types.EmptyTSK) dcap, err := api.StateVerifiedClientStatus(ctx, verifiedClientAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -20,6 +21,12 @@ import (
) )
func TestWindowPostDispute(t *testing.T) { func TestWindowPostDispute(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -61,6 +68,7 @@ func TestWindowPostDispute(t *testing.T) {
evilMinerAddr, err := evilMiner.ActorAddress(ctx) evilMinerAddr, err := evilMiner.ActorAddress(ctx)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err := client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -72,6 +80,7 @@ func TestWindowPostDispute(t *testing.T) {
ts := client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil)) ts := client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil))
t.Logf("Now head.Height = %d", ts.Height()) t.Logf("Now head.Height = %d", ts.Height())
//stm: @CHAIN_STATE_MINER_POWER_001
p, err := client.StateMinerPower(ctx, evilMinerAddr, types.EmptyTSK) p, err := client.StateMinerPower(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -84,6 +93,7 @@ func TestWindowPostDispute(t *testing.T) {
evilSectors, err := evilMiner.SectorsList(ctx) evilSectors, err := evilMiner.SectorsList(ctx)
require.NoError(t, err) require.NoError(t, err)
evilSectorNo := evilSectors[0] // only one. evilSectorNo := evilSectors[0] // only one.
//stm: @CHAIN_STATE_SECTOR_PARTITION_001
evilSectorLoc, err := client.StateSectorPartition(ctx, evilMinerAddr, evilSectorNo, types.EmptyTSK) evilSectorLoc, err := client.StateSectorPartition(ctx, evilMinerAddr, evilSectorNo, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -96,6 +106,7 @@ func TestWindowPostDispute(t *testing.T) {
// Wait until we need to prove our sector. // Wait until we need to prove our sector.
for { for {
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err = client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK) di, err = client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
if di.Index == evilSectorLoc.Deadline && di.CurrentEpoch-di.PeriodStart > 1 { if di.Index == evilSectorLoc.Deadline && di.CurrentEpoch-di.PeriodStart > 1 {
@ -109,6 +120,7 @@ func TestWindowPostDispute(t *testing.T) {
// Wait until after the proving period. // Wait until after the proving period.
for { for {
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err = client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK) di, err = client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
if di.Index != evilSectorLoc.Deadline { if di.Index != evilSectorLoc.Deadline {
@ -119,6 +131,7 @@ func TestWindowPostDispute(t *testing.T) {
t.Log("accepted evil proof") t.Log("accepted evil proof")
//stm: @CHAIN_STATE_MINER_POWER_001
// Make sure the evil node didn't lose any power. // Make sure the evil node didn't lose any power.
p, err = client.StateMinerPower(ctx, evilMinerAddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -145,11 +158,13 @@ func TestWindowPostDispute(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
t.Log("waiting dispute") t.Log("waiting dispute")
//stm: @CHAIN_STATE_WAIT_MSG_001
rec, err := client.StateWaitMsg(ctx, sm.Cid(), build.MessageConfidence, api.LookbackNoLimit, true) rec, err := client.StateWaitMsg(ctx, sm.Cid(), build.MessageConfidence, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.Zero(t, rec.Receipt.ExitCode, "dispute not accepted: %s", rec.Receipt.ExitCode.Error()) require.Zero(t, rec.Receipt.ExitCode, "dispute not accepted: %s", rec.Receipt.ExitCode.Error())
} }
//stm: @CHAIN_STATE_MINER_POWER_001
// Objection SUSTAINED! // Objection SUSTAINED!
// Make sure the evil node lost power. // Make sure the evil node lost power.
p, err = client.StateMinerPower(ctx, evilMinerAddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, evilMinerAddr, types.EmptyTSK)
@ -162,6 +177,7 @@ func TestWindowPostDispute(t *testing.T) {
// First, recover the sector. // First, recover the sector.
{ {
//stm: @CHAIN_STATE_MINER_INFO_001
minerInfo, err := client.StateMinerInfo(ctx, evilMinerAddr, types.EmptyTSK) minerInfo, err := client.StateMinerInfo(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -186,6 +202,7 @@ func TestWindowPostDispute(t *testing.T) {
sm, err := client.MpoolPushMessage(ctx, msg, nil) sm, err := client.MpoolPushMessage(ctx, msg, nil)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_WAIT_MSG_001
rec, err := client.StateWaitMsg(ctx, sm.Cid(), build.MessageConfidence, api.LookbackNoLimit, true) rec, err := client.StateWaitMsg(ctx, sm.Cid(), build.MessageConfidence, api.LookbackNoLimit, true)
require.NoError(t, err) require.NoError(t, err)
require.Zero(t, rec.Receipt.ExitCode, "recovery not accepted: %s", rec.Receipt.ExitCode.Error()) require.Zero(t, rec.Receipt.ExitCode, "recovery not accepted: %s", rec.Receipt.ExitCode.Error())
@ -193,6 +210,7 @@ func TestWindowPostDispute(t *testing.T) {
// Then wait for the deadline. // Then wait for the deadline.
for { for {
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err = client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK) di, err = client.StateMinerProvingDeadline(ctx, evilMinerAddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
if di.Index == evilSectorLoc.Deadline { if di.Index == evilSectorLoc.Deadline {
@ -210,6 +228,11 @@ func TestWindowPostDispute(t *testing.T) {
} }
func TestWindowPostDisputeFails(t *testing.T) { func TestWindowPostDisputeFails(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_STATE_MINER_GET_DEADLINES_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -232,6 +255,7 @@ func TestWindowPostDisputeFails(t *testing.T) {
miner.PledgeSectors(ctx, 10, 0, nil) miner.PledgeSectors(ctx, 10, 0, nil)
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -246,6 +270,7 @@ func TestWindowPostDisputeFails(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
expectedPower := types.NewInt(uint64(ssz) * (kit.DefaultPresealsPerBootstrapMiner + 10)) expectedPower := types.NewInt(uint64(ssz) * (kit.DefaultPresealsPerBootstrapMiner + 10))
//stm: @CHAIN_STATE_MINER_POWER_001
p, err := client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err := client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -271,6 +296,7 @@ waitForProof:
} }
for { for {
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
// wait until the deadline finishes. // wait until the deadline finishes.
@ -314,11 +340,13 @@ func submitBadProof(
return err return err
} }
//stm: @CHAIN_STATE_MINER_INFO_001
minerInfo, err := client.StateMinerInfo(ctx, maddr, head.Key()) minerInfo, err := client.StateMinerInfo(ctx, maddr, head.Key())
if err != nil { if err != nil {
return err return err
} }
//stm: @CHAIN_STATE_GET_RANDOMNESS_FROM_TICKETS_001
commEpoch := di.Open commEpoch := di.Open
commRand, err := client.StateGetRandomnessFromTickets( commRand, err := client.StateGetRandomnessFromTickets(
ctx, crypto.DomainSeparationTag_PoStChainCommit, ctx, crypto.DomainSeparationTag_PoStChainCommit,
@ -355,6 +383,7 @@ func submitBadProof(
return err return err
} }
//stm: @CHAIN_STATE_WAIT_MSG_001
rec, err := client.StateWaitMsg(ctx, sm.Cid(), build.MessageConfidence, api.LookbackNoLimit, true) rec, err := client.StateWaitMsg(ctx, sm.Cid(), build.MessageConfidence, api.LookbackNoLimit, true)
if err != nil { if err != nil {
return err return err

View File

@ -1,3 +1,4 @@
//stm: #integration
package itests package itests
import ( import (
@ -23,6 +24,12 @@ import (
) )
func TestWindowedPost(t *testing.T) { func TestWindowedPost(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -58,6 +65,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
maddr, err := miner.ActorAddress(ctx) maddr, err := miner.ActorAddress(ctx)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err := client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -71,6 +79,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
ts := client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil)) ts := client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil))
t.Logf("Now head.Height = %d", ts.Height()) t.Logf("Now head.Height = %d", ts.Height())
//stm: @CHAIN_STATE_MINER_POWER_001
p, err := client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err := client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -84,6 +93,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
// Drop 2 sectors from deadline 2 partition 0 (full partition / deadline) // Drop 2 sectors from deadline 2 partition 0 (full partition / deadline)
{ {
//stm: @CHAIN_STATE_MINER_GET_PARTITIONS_001
parts, err := client.StateMinerPartitions(ctx, maddr, 2, types.EmptyTSK) parts, err := client.StateMinerPartitions(ctx, maddr, 2, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.Greater(t, len(parts), 0) require.Greater(t, len(parts), 0)
@ -109,6 +119,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
// Drop 1 sectors from deadline 3 partition 0 // Drop 1 sectors from deadline 3 partition 0
{ {
//stm: @CHAIN_STATE_MINER_GET_PARTITIONS_001
parts, err := client.StateMinerPartitions(ctx, maddr, 3, types.EmptyTSK) parts, err := client.StateMinerPartitions(ctx, maddr, 3, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
require.Greater(t, len(parts), 0) require.Greater(t, len(parts), 0)
@ -137,6 +148,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
require.NoError(t, err) require.NoError(t, err)
} }
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err = client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err = client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -147,6 +159,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
ts = client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil)) ts = client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil))
t.Logf("Now head.Height = %d", ts.Height()) t.Logf("Now head.Height = %d", ts.Height())
//stm: @CHAIN_STATE_MINER_POWER_001
p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -160,6 +173,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
err = miner.StorageMiner.(*impl.StorageMinerAPI).IStorageMgr.(*mock.SectorMgr).MarkFailed(s, false) err = miner.StorageMiner.(*impl.StorageMinerAPI).IStorageMgr.(*mock.SectorMgr).MarkFailed(s, false)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err = client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err = client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -169,6 +183,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
ts = client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil)) ts = client.WaitTillChain(ctx, kit.HeightAtLeast(waitUntil))
t.Logf("Now head.Height = %d", ts.Height()) t.Logf("Now head.Height = %d", ts.Height())
//stm: @CHAIN_STATE_MINER_POWER_001
p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -183,6 +198,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
{ {
// Wait until proven. // Wait until proven.
//stm: @CHAIN_STATE_MINER_CALCULATE_DEADLINE_001
di, err = client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK) di, err = client.StateMinerProvingDeadline(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -193,6 +209,7 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
t.Logf("Now head.Height = %d", ts.Height()) t.Logf("Now head.Height = %d", ts.Height())
} }
//stm: @CHAIN_STATE_MINER_POWER_001
p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK) p, err = client.StateMinerPower(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
@ -203,6 +220,12 @@ func testWindowPostUpgrade(t *testing.T, blocktime time.Duration, nSectors int,
} }
func TestWindowPostBaseFeeNoBurn(t *testing.T) { func TestWindowPostBaseFeeNoBurn(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -225,10 +248,12 @@ func TestWindowPostBaseFeeNoBurn(t *testing.T) {
maddr, err := miner.ActorAddress(ctx) maddr, err := miner.ActorAddress(ctx)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_MINER_INFO_001
mi, err := client.StateMinerInfo(ctx, maddr, types.EmptyTSK) mi, err := client.StateMinerInfo(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
miner.PledgeSectors(ctx, nSectors, 0, nil) miner.PledgeSectors(ctx, nSectors, 0, nil)
//stm: @CHAIN_STATE_GET_ACTOR_001
wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK) wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
en := wact.Nonce en := wact.Nonce
@ -237,6 +262,7 @@ func TestWindowPostBaseFeeNoBurn(t *testing.T) {
waitForProof: waitForProof:
for { for {
//stm: @CHAIN_STATE_GET_ACTOR_001
wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK) wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
if wact.Nonce > en { if wact.Nonce > en {
@ -246,9 +272,11 @@ waitForProof:
build.Clock.Sleep(blocktime) build.Clock.Sleep(blocktime)
} }
//stm: @CHAIN_STATE_LIST_MESSAGES_001
slm, err := client.StateListMessages(ctx, &api.MessageMatch{To: maddr}, types.EmptyTSK, 0) slm, err := client.StateListMessages(ctx, &api.MessageMatch{To: maddr}, types.EmptyTSK, 0)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_REPLAY_001
pmr, err := client.StateReplay(ctx, types.EmptyTSK, slm[0]) pmr, err := client.StateReplay(ctx, types.EmptyTSK, slm[0])
require.NoError(t, err) require.NoError(t, err)
@ -256,6 +284,12 @@ waitForProof:
} }
func TestWindowPostBaseFeeBurn(t *testing.T) { func TestWindowPostBaseFeeBurn(t *testing.T) {
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
kit.Expensive(t) kit.Expensive(t)
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -271,10 +305,12 @@ func TestWindowPostBaseFeeBurn(t *testing.T) {
maddr, err := miner.ActorAddress(ctx) maddr, err := miner.ActorAddress(ctx)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_MINER_INFO_001
mi, err := client.StateMinerInfo(ctx, maddr, types.EmptyTSK) mi, err := client.StateMinerInfo(ctx, maddr, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
miner.PledgeSectors(ctx, 10, 0, nil) miner.PledgeSectors(ctx, 10, 0, nil)
//stm: @CHAIN_STATE_GET_ACTOR_001
wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK) wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
en := wact.Nonce en := wact.Nonce
@ -283,6 +319,7 @@ func TestWindowPostBaseFeeBurn(t *testing.T) {
waitForProof: waitForProof:
for { for {
//stm: @CHAIN_STATE_GET_ACTOR_001
wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK) wact, err := client.StateGetActor(ctx, mi.Worker, types.EmptyTSK)
require.NoError(t, err) require.NoError(t, err)
if wact.Nonce > en { if wact.Nonce > en {
@ -292,9 +329,11 @@ waitForProof:
build.Clock.Sleep(blocktime) build.Clock.Sleep(blocktime)
} }
//stm: @CHAIN_STATE_LIST_MESSAGES_001
slm, err := client.StateListMessages(ctx, &api.MessageMatch{To: maddr}, types.EmptyTSK, 0) slm, err := client.StateListMessages(ctx, &api.MessageMatch{To: maddr}, types.EmptyTSK, 0)
require.NoError(t, err) require.NoError(t, err)
//stm: @CHAIN_STATE_REPLAY_001
pmr, err := client.StateReplay(ctx, types.EmptyTSK, slm[0]) pmr, err := client.StateReplay(ctx, types.EmptyTSK, slm[0])
require.NoError(t, err) require.NoError(t, err)

View File

@ -1,3 +1,4 @@
//stm: #unit
package retrievaladapter package retrievaladapter
import ( import (
@ -18,6 +19,7 @@ import (
) )
func TestGetPricingInput(t *testing.T) { func TestGetPricingInput(t *testing.T) {
//stm: @CHAIN_STATE_MARKET_STORAGE_DEAL_001
ctx := context.Background() ctx := context.Background()
tsk := &types.TipSet{} tsk := &types.TipSet{}
key := tsk.Key() key := tsk.Key()

View File

@ -1,3 +1,4 @@
//stm: #unit
package storageadapter package storageadapter
import ( import (
@ -27,6 +28,7 @@ import (
) )
func TestDealStateMatcher(t *testing.T) { func TestDealStateMatcher(t *testing.T) {
//stm: @CHAIN_STATE_GET_ACTOR_001
ctx := context.Background() ctx := context.Background()
bs := bstore.NewMemorySync() bs := bstore.NewMemorySync()
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs)) store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))

View File

@ -24,7 +24,6 @@ func TestDefaultFullNodeRoundtrip(t *testing.T) {
s = buf.String() s = buf.String()
} }
//stm: @NODE_CONFIG_003
c2, err := FromReader(strings.NewReader(s), DefaultFullNode()) c2, err := FromReader(strings.NewReader(s), DefaultFullNode())
require.NoError(t, err) require.NoError(t, err)
@ -46,7 +45,6 @@ func TestDefaultMinerRoundtrip(t *testing.T) {
s = buf.String() s = buf.String()
} }
//stm: @NODE_CONFIG_004
c2, err := FromReader(strings.NewReader(s), DefaultStorageMiner()) c2, err := FromReader(strings.NewReader(s), DefaultStorageMiner())
require.NoError(t, err) require.NoError(t, err)

View File

@ -15,7 +15,6 @@ func TestDecodeNothing(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
{ {
//stm: @NODE_CONFIG_001
cfg, err := FromFile(os.DevNull, DefaultFullNode()) cfg, err := FromFile(os.DevNull, DefaultFullNode())
assert.Nil(err, "error should be nil") assert.Nil(err, "error should be nil")
assert.Equal(DefaultFullNode(), cfg, assert.Equal(DefaultFullNode(), cfg,
@ -23,7 +22,6 @@ func TestDecodeNothing(t *testing.T) {
} }
{ {
//stm: @NODE_CONFIG_002
cfg, err := FromFile("./does-not-exist.toml", DefaultFullNode()) cfg, err := FromFile("./does-not-exist.toml", DefaultFullNode())
assert.Nil(err, "error should be nil") assert.Nil(err, "error should be nil")
assert.Equal(DefaultFullNode(), cfg, assert.Equal(DefaultFullNode(), cfg,

View File

@ -45,12 +45,10 @@ func TestImportLocal(t *testing.T) {
b, err := testdata.ReadFile("testdata/payload.txt") b, err := testdata.ReadFile("testdata/payload.txt")
require.NoError(t, err) require.NoError(t, err)
//stm: @CLIENT_IMPORT_003
root, err := a.ClientImportLocal(ctx, bytes.NewReader(b)) root, err := a.ClientImportLocal(ctx, bytes.NewReader(b))
require.NoError(t, err) require.NoError(t, err)
require.NotEqual(t, cid.Undef, root) require.NotEqual(t, cid.Undef, root)
//stm: @CLIENT_IMPORT_004
list, err := a.ClientListImports(ctx) list, err := a.ClientListImports(ctx)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, list, 1) require.Len(t, list, 1)
@ -71,7 +69,6 @@ func TestImportLocal(t *testing.T) {
// retrieve as UnixFS. // retrieve as UnixFS.
out1 := filepath.Join(dir, "retrieval1.data") // as unixfs out1 := filepath.Join(dir, "retrieval1.data") // as unixfs
out2 := filepath.Join(dir, "retrieval2.data") // as car out2 := filepath.Join(dir, "retrieval2.data") // as car
//stm: @CLIENT_IMPORT_005
err = a.ClientRetrieve(ctx, order, &api.FileRef{ err = a.ClientRetrieve(ctx, order, &api.FileRef{
Path: out1, Path: out1,
}) })
@ -88,7 +85,6 @@ func TestImportLocal(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// open the CARv2 being custodied by the import manager // open the CARv2 being custodied by the import manager
//stm: @CLIENT_IMPORT_006
orig, err := carv2.OpenReader(it.CARPath) orig, err := carv2.OpenReader(it.CARPath)
require.NoError(t, err) require.NoError(t, err)
@ -99,7 +95,6 @@ func TestImportLocal(t *testing.T) {
require.EqualValues(t, 1, exported.Version) require.EqualValues(t, 1, exported.Version)
require.EqualValues(t, 2, orig.Version) require.EqualValues(t, 2, orig.Version)
//stm: @CLIENT_IMPORT_007
origRoots, err := orig.Roots() origRoots, err := orig.Roots()
require.NoError(t, err) require.NoError(t, err)
require.Len(t, origRoots, 1) require.Len(t, origRoots, 1)

View File

@ -36,13 +36,11 @@ func TestRoundtripUnixFS_Dense(t *testing.T) {
defer os.Remove(carv2File) //nolint:errcheck defer os.Remove(carv2File) //nolint:errcheck
// import a file to a Unixfs DAG using a CARv2 read/write blockstore. // import a file to a Unixfs DAG using a CARv2 read/write blockstore.
//stm: @CLIENT_IMPORT_001, @CLIENT_BLOCKSTORE_001
bs, err := blockstore.OpenReadWrite(carv2File, nil, bs, err := blockstore.OpenReadWrite(carv2File, nil,
carv2.ZeroLengthSectionAsEOF(true), carv2.ZeroLengthSectionAsEOF(true),
blockstore.UseWholeCIDs(true)) blockstore.UseWholeCIDs(true))
require.NoError(t, err) require.NoError(t, err)
//stm: @CLIENT_IMPORT_001
root, err := buildUnixFS(ctx, bytes.NewBuffer(inputContents), bs, false) root, err := buildUnixFS(ctx, bytes.NewBuffer(inputContents), bs, false)
require.NoError(t, err) require.NoError(t, err)
require.NotEqual(t, cid.Undef, root) require.NotEqual(t, cid.Undef, root)
@ -88,13 +86,11 @@ func TestRoundtripUnixFS_Filestore(t *testing.T) {
dst := newTmpFile(t) dst := newTmpFile(t)
defer os.Remove(dst) //nolint:errcheck defer os.Remove(dst) //nolint:errcheck
//stm: @CLIENT_FS_001
root, err := a.createUnixFSFilestore(ctx, inputPath, dst) root, err := a.createUnixFSFilestore(ctx, inputPath, dst)
require.NoError(t, err) require.NoError(t, err)
require.NotEqual(t, cid.Undef, root) require.NotEqual(t, cid.Undef, root)
// convert the CARv2 to a normal file again and ensure the contents match // convert the CARv2 to a normal file again and ensure the contents match
//stm: @CLIENT_FS_002
fs, err := stores.ReadOnlyFilestore(dst) fs, err := stores.ReadOnlyFilestore(dst)
require.NoError(t, err) require.NoError(t, err)
defer fs.Close() //nolint:errcheck defer fs.Close() //nolint:errcheck
@ -121,7 +117,6 @@ func TestRoundtripUnixFS_Filestore(t *testing.T) {
} }
func newTmpFile(t *testing.T) string { func newTmpFile(t *testing.T) string {
//stm: @CLIENT_FS_003
f, err := os.CreateTemp("", "") f, err := os.CreateTemp("", "")
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, f.Close()) require.NoError(t, f.Close())
@ -129,7 +124,6 @@ func newTmpFile(t *testing.T) string {
} }
func genInputFile(t *testing.T) (filepath string, contents []byte) { func genInputFile(t *testing.T) (filepath string, contents []byte) {
//stm: @CLIENT_FS_004
s := strings.Repeat("abcde", 100) s := strings.Repeat("abcde", 100)
tmp, err := os.CreateTemp("", "") tmp, err := os.CreateTemp("", "")
require.NoError(t, err) require.NoError(t, err)

View File

@ -13,7 +13,7 @@ import (
) )
func TestMedian(t *testing.T) { func TestMedian(t *testing.T) {
//stm: @REPO_GAS_001 GAS_001
require.Equal(t, types.NewInt(5), medianGasPremium([]GasMeta{ require.Equal(t, types.NewInt(5), medianGasPremium([]GasMeta{
{big.NewInt(5), build.BlockGasTarget}, {big.NewInt(5), build.BlockGasTarget},
}, 1)) }, 1))

View File

@ -13,13 +13,13 @@ func genFsRepo(t *testing.T) (*FsRepo, func()) {
t.Fatal(err) t.Fatal(err)
} }
//stm: @REPO_FS_001 FS_001
repo, err := NewFS(path) repo, err := NewFS(path)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
//stm: @REPO_FS_002 FS_002
err = repo.Init(FullNode) err = repo.Init(FullNode)
if err != ErrRepoExists && err != nil { if err != ErrRepoExists && err != nil {
t.Fatal(err) t.Fatal(err)
@ -32,6 +32,6 @@ func genFsRepo(t *testing.T) (*FsRepo, func()) {
func TestFsBasic(t *testing.T) { func TestFsBasic(t *testing.T) {
repo, closer := genFsRepo(t) repo, closer := genFsRepo(t)
defer closer() defer closer()
//stm: @REPO_FS_003 FS_003
basicTest(t, repo) basicTest(t, repo)
} }

View File

@ -7,6 +7,6 @@ import (
func TestMemBasic(t *testing.T) { func TestMemBasic(t *testing.T) {
repo := NewMemory(nil) repo := NewMemory(nil)
//stm: @REPO_MEM_001 MEM_001
basicTest(t, repo) basicTest(t, repo)
} }

View File

@ -15,20 +15,20 @@ import (
) )
func basicTest(t *testing.T, repo Repo) { func basicTest(t *testing.T, repo Repo) {
//stm: @REPO_NET_001 NET_001
apima, err := repo.APIEndpoint() apima, err := repo.APIEndpoint()
if assert.Error(t, err) { if assert.Error(t, err) {
assert.Equal(t, ErrNoAPIEndpoint, err) assert.Equal(t, ErrNoAPIEndpoint, err)
} }
assert.Nil(t, apima, "with no api endpoint, return should be nil") assert.Nil(t, apima, "with no api endpoint, return should be nil")
//stm: @REPO_MUT_001 MUT_001
lrepo, err := repo.Lock(FullNode) lrepo, err := repo.Lock(FullNode)
assert.NoError(t, err, "should be able to lock once") assert.NoError(t, err, "should be able to lock once")
assert.NotNil(t, lrepo, "locked repo shouldn't be nil") assert.NotNil(t, lrepo, "locked repo shouldn't be nil")
{ {
//stm: @REPO_MUT_002 MUT_002
lrepo2, err := repo.Lock(FullNode) lrepo2, err := repo.Lock(FullNode)
if assert.Error(t, err) { if assert.Error(t, err) {
assert.Equal(t, ErrRepoAlreadyLocked, err) assert.Equal(t, ErrRepoAlreadyLocked, err)
@ -36,7 +36,7 @@ func basicTest(t *testing.T, repo Repo) {
assert.Nil(t, lrepo2, "with locked repo errors, nil should be returned") assert.Nil(t, lrepo2, "with locked repo errors, nil should be returned")
} }
//stm: @REPO_MUT_003 MUT_003
err = lrepo.Close() err = lrepo.Close()
assert.NoError(t, err, "should be able to unlock") assert.NoError(t, err, "should be able to unlock")
@ -47,7 +47,7 @@ func basicTest(t *testing.T, repo Repo) {
ma, err := multiaddr.NewMultiaddr("/ip4/127.0.0.1/tcp/43244") ma, err := multiaddr.NewMultiaddr("/ip4/127.0.0.1/tcp/43244")
assert.NoError(t, err, "creating multiaddr shouldn't error") assert.NoError(t, err, "creating multiaddr shouldn't error")
//stm: @REPO_NET_002 NET_002
err = lrepo.SetAPIEndpoint(ma) err = lrepo.SetAPIEndpoint(ma)
assert.NoError(t, err, "setting multiaddr shouldn't error") assert.NoError(t, err, "setting multiaddr shouldn't error")
@ -75,7 +75,7 @@ func basicTest(t *testing.T, repo Repo) {
err = lrepo.Close() err = lrepo.Close()
assert.NoError(t, err, "should be able to close") assert.NoError(t, err, "should be able to close")
//stm: @REPO_NET_003 NET_003
apima, err = repo.APIEndpoint() apima, err = repo.APIEndpoint()
if assert.Error(t, err) { if assert.Error(t, err) {
@ -90,27 +90,27 @@ func basicTest(t *testing.T, repo Repo) {
assert.NoError(t, err, "should be able to relock") assert.NoError(t, err, "should be able to relock")
assert.NotNil(t, lrepo, "locked repo shouldn't be nil") assert.NotNil(t, lrepo, "locked repo shouldn't be nil")
//stm: @REPO_KEYSTR_001 KEYSTR_001
kstr, err := lrepo.KeyStore() kstr, err := lrepo.KeyStore()
assert.NoError(t, err, "should be able to get keystore") assert.NoError(t, err, "should be able to get keystore")
assert.NotNil(t, lrepo, "keystore shouldn't be nil") assert.NotNil(t, lrepo, "keystore shouldn't be nil")
//stm: @REPO_KEYSTR_002 KEYSTR_002
list, err := kstr.List() list, err := kstr.List()
assert.NoError(t, err, "should be able to list key") assert.NoError(t, err, "should be able to list key")
assert.Empty(t, list, "there should be no keys") assert.Empty(t, list, "there should be no keys")
//stm: @REPO_KEYSTR_003 KEYSTR_003
err = kstr.Put("k1", k1) err = kstr.Put("k1", k1)
assert.NoError(t, err, "should be able to put k1") assert.NoError(t, err, "should be able to put k1")
//stm: @REPO_KEYSTR_004 KEYSTR_004
err = kstr.Put("k1", k1) err = kstr.Put("k1", k1)
if assert.Error(t, err, "putting key under the same name should error") { if assert.Error(t, err, "putting key under the same name should error") {
assert.True(t, xerrors.Is(err, types.ErrKeyExists), "returned error is ErrKeyExists") assert.True(t, xerrors.Is(err, types.ErrKeyExists), "returned error is ErrKeyExists")
} }
//stm: @REPO_KEYSTR_005 KEYSTR_005
k1prim, err := kstr.Get("k1") k1prim, err := kstr.Get("k1")
assert.NoError(t, err, "should be able to get k1") assert.NoError(t, err, "should be able to get k1")
assert.Equal(t, k1, k1prim, "returned key should be the same") assert.Equal(t, k1, k1prim, "returned key should be the same")
@ -128,7 +128,7 @@ func basicTest(t *testing.T, repo Repo) {
assert.NoError(t, err, "should be able to list keys") assert.NoError(t, err, "should be able to list keys")
assert.ElementsMatch(t, []string{"k1", "k2"}, list, "returned elements match") assert.ElementsMatch(t, []string{"k1", "k2"}, list, "returned elements match")
//stm: @REPO_KEYSTR_006 KEYSTR_006
err = kstr.Delete("k2") err = kstr.Delete("k2")
assert.NoError(t, err, "should be able to delete key") assert.NoError(t, err, "should be able to delete key")

View File

@ -15,7 +15,6 @@ func TestMonitorShutdown(t *testing.T) {
// Three shutdown handlers. // Three shutdown handlers.
var wg sync.WaitGroup var wg sync.WaitGroup
wg.Add(3) wg.Add(3)
//stm: @NODE_SHUTDOWN_001
h := ShutdownHandler{ h := ShutdownHandler{
Component: "handler", Component: "handler",
StopFunc: func(_ context.Context) error { StopFunc: func(_ context.Context) error {
@ -24,7 +23,6 @@ func TestMonitorShutdown(t *testing.T) {
}, },
} }
//stm: @NODE_SHUTDOWN_002
finishCh := MonitorShutdown(signalCh, h, h, h) finishCh := MonitorShutdown(signalCh, h, h, h)
// Nothing here after 10ms. // Nothing here after 10ms.

View File

@ -44,6 +44,9 @@ func TestCheckVoucherValid(t *testing.T) {
mock.setAccountAddress(fromAcct, from) mock.setAccountAddress(fromAcct, from)
mock.setAccountAddress(toAcct, to) mock.setAccountAddress(toAcct, to)
//stm: @TOKEN_PAYCH_VOUCHER_VALID_001, @TOKEN_PAYCH_VOUCHER_VALID_002, @TOKEN_PAYCH_VOUCHER_VALID_003
//stm: @TOKEN_PAYCH_VOUCHER_VALID_004, @TOKEN_PAYCH_VOUCHER_VALID_005, @TOKEN_PAYCH_VOUCHER_VALID_006, @TOKEN_PAYCH_VOUCHER_VALID_007
//stm: @TOKEN_PAYCH_VOUCHER_VALID_009, @TOKEN_PAYCH_VOUCHER_VALID_010
tcases := []struct { tcases := []struct {
name string name string
expectError bool expectError bool
@ -197,7 +200,6 @@ func TestCheckVoucherValid(t *testing.T) {
for _, tcase := range tcases { for _, tcase := range tcases {
tcase := tcase tcase := tcase
//stm: @PAYMENT_CHANNEL_VOUCHER_001, PAYMENT_CHANNEL_VOUCHER_002, PAYMENT_CHANNEL_VOUCHER_003, PAYMENT_CHANNEL_VOUCHER_004
t.Run(tcase.name, func(t *testing.T) { t.Run(tcase.name, func(t *testing.T) {
// Create an actor for the channel with the test case balance // Create an actor for the channel with the test case balance
act := &types.Actor{ act := &types.Actor{
@ -244,6 +246,7 @@ func TestCreateVoucher(t *testing.T) {
Lane: 1, Lane: 1,
Amount: voucherLane1Amt, Amount: voucherLane1Amt,
} }
//stm: @TOKEN_PAYCH_VOUCHER_CREATE_001
res, err := s.mgr.CreateVoucher(ctx, s.ch, voucher) res, err := s.mgr.CreateVoucher(ctx, s.ch, voucher)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, res.Voucher) require.NotNil(t, res.Voucher)
@ -288,6 +291,7 @@ func TestCreateVoucher(t *testing.T) {
Lane: 2, Lane: 2,
Amount: voucherLane2Amt, Amount: voucherLane2Amt,
} }
//stm: @TOKEN_PAYCH_VOUCHER_CREATE_004
res, err = s.mgr.CreateVoucher(ctx, s.ch, voucher) res, err = s.mgr.CreateVoucher(ctx, s.ch, voucher)
require.NoError(t, err) require.NoError(t, err)
@ -298,6 +302,7 @@ func TestCreateVoucher(t *testing.T) {
} }
func TestAddVoucherDelta(t *testing.T) { func TestAddVoucherDelta(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_VOUCHERS_001
ctx := context.Background() ctx := context.Background()
// Set up a manager with a single payment channel // Set up a manager with a single payment channel
@ -359,6 +364,7 @@ func TestAddVoucherNextLane(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.EqualValues(t, ci.NextLane, 3) require.EqualValues(t, ci.NextLane, 3)
//stm: @TOKEN_PAYCH_ALLOCATE_LANE_001
// Allocate a lane (should be lane 3) // Allocate a lane (should be lane 3)
lane, err := s.mgr.AllocateLane(s.ch) lane, err := s.mgr.AllocateLane(s.ch)
require.NoError(t, err) require.NoError(t, err)
@ -393,6 +399,7 @@ func TestAllocateLane(t *testing.T) {
// Set up a manager with a single payment channel // Set up a manager with a single payment channel
s := testSetupMgrWithChannel(t) s := testSetupMgrWithChannel(t)
//stm: @TOKEN_PAYCH_ALLOCATE_LANE_001
// First lane should be 0 // First lane should be 0
lane, err := s.mgr.AllocateLane(s.ch) lane, err := s.mgr.AllocateLane(s.ch)
require.NoError(t, err) require.NoError(t, err)
@ -447,6 +454,7 @@ func TestAllocateLaneWithExistingLaneState(t *testing.T) {
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta) _, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
require.NoError(t, err) require.NoError(t, err)
//stm: @TOKEN_PAYCH_ALLOCATE_LANE_001
// Allocate lane should return the next lane (lane 3) // Allocate lane should return the next lane (lane 3)
lane, err := mgr.AllocateLane(ch) lane, err := mgr.AllocateLane(ch)
require.NoError(t, err) require.NoError(t, err)
@ -509,6 +517,7 @@ func TestAddVoucherInboundWalletKey(t *testing.T) {
} }
func TestBestSpendable(t *testing.T) { func TestBestSpendable(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_VOUCHERS_001
ctx := context.Background() ctx := context.Background()
// Set up a manager with a single payment channel // Set up a manager with a single payment channel
@ -551,6 +560,7 @@ func TestBestSpendable(t *testing.T) {
}, },
}) })
//stm: @TOKEN_PAYCH_BEST_SPENDABLE_001
// Verify best spendable vouchers on each lane // Verify best spendable vouchers on each lane
vouchers, err := BestSpendableByLane(ctx, bsapi, s.ch) vouchers, err := BestSpendableByLane(ctx, bsapi, s.ch)
require.NoError(t, err) require.NoError(t, err)
@ -691,6 +701,7 @@ func TestSubmitVoucher(t *testing.T) {
err = p3.UnmarshalCBOR(bytes.NewReader(msg.Message.Params)) err = p3.UnmarshalCBOR(bytes.NewReader(msg.Message.Params))
require.NoError(t, err) require.NoError(t, err)
//stm: @TOKEN_PAYCH_LIST_VOUCHERS_001
// Verify that vouchers are marked as submitted // Verify that vouchers are marked as submitted
vis, err := s.mgr.ListVouchers(ctx, s.ch) vis, err := s.mgr.ListVouchers(ctx, s.ch)
require.NoError(t, err) require.NoError(t, err)

View File

@ -68,6 +68,7 @@ func TestPaychGetCreateChannelMsg(t *testing.T) {
// TestPaychGetCreateChannelThenAddFunds tests creating a channel and then // TestPaychGetCreateChannelThenAddFunds tests creating a channel and then
// adding funds to it // adding funds to it
func TestPaychGetCreateChannelThenAddFunds(t *testing.T) { func TestPaychGetCreateChannelThenAddFunds(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_CHANNELS_001, @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -158,6 +159,7 @@ func TestPaychGetCreateChannelThenAddFunds(t *testing.T) {
// operation is queued up behind a create channel operation, and the create // operation is queued up behind a create channel operation, and the create
// channel fails, then the waiting operation can succeed. // channel fails, then the waiting operation can succeed.
func TestPaychGetCreateChannelWithErrorThenCreateAgain(t *testing.T) { func TestPaychGetCreateChannelWithErrorThenCreateAgain(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_CHANNELS_001, @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -222,6 +224,7 @@ func TestPaychGetCreateChannelWithErrorThenCreateAgain(t *testing.T) {
// TestPaychGetRecoverAfterError tests that after a create channel fails, the // TestPaychGetRecoverAfterError tests that after a create channel fails, the
// next attempt to create channel can succeed. // next attempt to create channel can succeed.
func TestPaychGetRecoverAfterError(t *testing.T) { func TestPaychGetRecoverAfterError(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_CHANNELS_001, @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -274,6 +277,7 @@ func TestPaychGetRecoverAfterError(t *testing.T) {
// TestPaychGetRecoverAfterAddFundsError tests that after an add funds fails, the // TestPaychGetRecoverAfterAddFundsError tests that after an add funds fails, the
// next attempt to add funds can succeed. // next attempt to add funds can succeed.
func TestPaychGetRecoverAfterAddFundsError(t *testing.T) { func TestPaychGetRecoverAfterAddFundsError(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_CHANNELS_001, @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -356,6 +360,7 @@ func TestPaychGetRecoverAfterAddFundsError(t *testing.T) {
// right after the create channel message is sent, the channel will be // right after the create channel message is sent, the channel will be
// created when the system restarts. // created when the system restarts.
func TestPaychGetRestartAfterCreateChannelMsg(t *testing.T) { func TestPaychGetRestartAfterCreateChannelMsg(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_CHANNELS_001, @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -435,6 +440,7 @@ func TestPaychGetRestartAfterCreateChannelMsg(t *testing.T) {
// right after the add funds message is sent, the add funds will be // right after the add funds message is sent, the add funds will be
// processed when the system restarts. // processed when the system restarts.
func TestPaychGetRestartAfterAddFundsMsg(t *testing.T) { func TestPaychGetRestartAfterAddFundsMsg(t *testing.T) {
//stm: @TOKEN_PAYCH_LIST_CHANNELS_001, @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -498,6 +504,7 @@ func TestPaychGetRestartAfterAddFundsMsg(t *testing.T) {
// TestPaychGetWait tests that GetPaychWaitReady correctly waits for the // TestPaychGetWait tests that GetPaychWaitReady correctly waits for the
// channel to be created or funds to be added // channel to be created or funds to be added
func TestPaychGetWait(t *testing.T) { func TestPaychGetWait(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -555,6 +562,7 @@ func TestPaychGetWait(t *testing.T) {
// TestPaychGetWaitErr tests that GetPaychWaitReady correctly handles errors // TestPaychGetWaitErr tests that GetPaychWaitReady correctly handles errors
func TestPaychGetWaitErr(t *testing.T) { func TestPaychGetWaitErr(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -602,6 +610,7 @@ func TestPaychGetWaitErr(t *testing.T) {
// TestPaychGetWaitCtx tests that GetPaychWaitReady returns early if the context // TestPaychGetWaitCtx tests that GetPaychWaitReady returns early if the context
// is cancelled // is cancelled
func TestPaychGetWaitCtx(t *testing.T) { func TestPaychGetWaitCtx(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -631,6 +640,7 @@ func TestPaychGetWaitCtx(t *testing.T) {
// progress and two add funds are queued up behind it, the two add funds // progress and two add funds are queued up behind it, the two add funds
// will be merged // will be merged
func TestPaychGetMergeAddFunds(t *testing.T) { func TestPaychGetMergeAddFunds(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -729,6 +739,7 @@ func TestPaychGetMergeAddFunds(t *testing.T) {
// TestPaychGetMergeAddFundsCtxCancelOne tests that when a queued add funds // TestPaychGetMergeAddFundsCtxCancelOne tests that when a queued add funds
// request is cancelled, its amount is removed from the total merged add funds // request is cancelled, its amount is removed from the total merged add funds
func TestPaychGetMergeAddFundsCtxCancelOne(t *testing.T) { func TestPaychGetMergeAddFundsCtxCancelOne(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -826,6 +837,7 @@ func TestPaychGetMergeAddFundsCtxCancelOne(t *testing.T) {
// TestPaychGetMergeAddFundsCtxCancelAll tests that when all queued add funds // TestPaychGetMergeAddFundsCtxCancelAll tests that when all queued add funds
// requests are cancelled, no add funds message is sent // requests are cancelled, no add funds message is sent
func TestPaychGetMergeAddFundsCtxCancelAll(t *testing.T) { func TestPaychGetMergeAddFundsCtxCancelAll(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
@ -900,6 +912,7 @@ func TestPaychGetMergeAddFundsCtxCancelAll(t *testing.T) {
// TestPaychAvailableFunds tests that PaychAvailableFunds returns the correct // TestPaychAvailableFunds tests that PaychAvailableFunds returns the correct
// channel state // channel state
func TestPaychAvailableFunds(t *testing.T) { func TestPaychAvailableFunds(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001, @TOKEN_PAYCH_AVAILABLE_FUNDS_001, @TOKEN_PAYCH_AVAILABLE_FUNDS_002, @TOKEN_PAYCH_AVAILABLE_FUNDS_003
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))

View File

@ -23,6 +23,7 @@ import (
// insufficient funds, then adding funds to the channel, then adding the // insufficient funds, then adding funds to the channel, then adding the
// voucher again // voucher again
func TestPaychAddVoucherAfterAddFunds(t *testing.T) { func TestPaychAddVoucherAfterAddFunds(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))

View File

@ -14,6 +14,7 @@ import (
) )
func TestPaychSettle(t *testing.T) { func TestPaychSettle(t *testing.T) {
//stm: @TOKEN_PAYCH_WAIT_READY_001, @TOKEN_PAYCH_SETTLE_001, @TOKEN_PAYCH_LIST_CHANNELS_001
ctx := context.Background() ctx := context.Background()
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore())) store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))