feat: Add additional test annotations (#8272)
* Annotate api,proxy_util,blockstore_badger, policy tests * Annotate splitstore: bsbadger / markset * Annotate splitstore feature * Annotate union/timed blockstore tests * Annotate openrpc, diff_adt tests * Annotate error,drand,events tests * Annotate predicates_test * Fix annotations * Annotate tscache, gen tests * Annotate fundmanager test * Annotate repub and selection tests * Annotate statetree_test * Annotate forks_test * Annotate searchwait_test.go * Fix duplicated @@ symbols * Annotate chain stmgr/store tests * Annotate more (types) tests * More tests annotated * Annotate conformance chaos actor tests * Annotate more integration tests * Annotate journal system tests * Annotate more tests. * Annotate gas,head buffer behaviors * Fix markset annotations * doc: test annotations for the markets dagstore wrapper * Annotate miner_api test in dagstore * Annotate more test files * Remove bad annotations from fsrepo * Annotate wdpost system * Remove bad annotations * Renamce "conformance" to "chaos_actor" tests * doc: stm annotations for blockheader & election proof tests * Annotate remaining "A" tests * annotate: stm for error_test * memrepo_test.go * Annotate "b" file tests * message_test.go * doc: stm annotate for fsrepo_test * Annotate "c" file tests * Annotate "D" test files * message_test.go * doc: stm annotate for chain, node/config & client * docs: stm annotate node_test * Annotate u,v,wl tests * doc: stm annotations for various test files * Annotate "T" test files * doc: stm annotate for proxy_util_test & policy_test * doc: stm annotate for various tests * doc: final few stm annotations * Add mempool unit tests * Add two more memPool Add tests * Update submodules * Add check function tests * Add stm annotations, refactor test helper * Annotate api,proxy_util,blockstore_badger, policy tests * Annotate splitstore: bsbadger / markset solving merge conflicts * Annotate splitstore feature * Annotate union/timed blockstore tests * Annotate openrpc, diff_adt tests * Annotate error,drand,events tests * Annotate predicates_test * Fix annotations * Annotate tscache, gen tests * Annotate fundmanager test * Annotate statetree_test * Annotate forks_test * Annotate searchwait_test.go * Fix duplicated @@ symbols * Annotate chain stmgr/store tests * Annotate more (types) tests * More tests annotated * Annotate conformance chaos actor tests * Annotate more integration tests * Annotate journal system tests * Annotate more tests. * Annotate gas,head buffer behaviors solve merge conflict * Fix markset annotations * Annotate miner_api test in dagstore * Annotate more test files * doc: test annotations for the markets dagstore wrapper * Annotate wdpost system * Renamce "conformance" to "chaos_actor" tests * Annotate remaining "A" tests * doc: stm annotations for blockheader & election proof tests * annotate: stm for error_test * Annotate "b" file tests * memrepo_test.go * Annotate "c" file tests * message_test.go * Annotate "D" test files * doc: stm annotate for fsrepo_test * Annotate u,v,wl tests * message_test.go * doc: stm annotate for chain, node/config & client * docs: stm annotate node_test * Annotate "T" test files * doc: stm annotations for various test files * Add mempool unit tests solve merge conflict * doc: stm annotate for proxy_util_test & policy_test * doc: stm annotate for various tests * doc: final few stm annotations * Add two more memPool Add tests * Update submodules * Add check function tests solve conflict * Add stm annotations, refactor test helper solve merge conflict * Change CLI test kinds to "unit" * Fix double merged test * Fix ccupgrade_test merge * Fix lint issues * Add stm annotation to types_Test * Test vectors submodule * Add file annotation to burn_test Co-authored-by: Nikola Divic <divicnikola@gmail.com> Co-authored-by: TheMenko <themenkoprojects@gmail.com>
This commit is contained in:
parent
159da73fcd
commit
79453663b3
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package api
|
||||
|
||||
import (
|
||||
@ -26,6 +27,7 @@ func goCmd() string {
|
||||
}
|
||||
|
||||
func TestDoesntDependOnFFI(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_FFI_DEPENDENCE_001
|
||||
deps, err := exec.Command(goCmd(), "list", "-deps", "github.com/filecoin-project/lotus/api").Output()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -38,6 +40,7 @@ func TestDoesntDependOnFFI(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestDoesntDependOnBuild(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_FFI_DEPENDENCE_002
|
||||
deps, err := exec.Command(goCmd(), "list", "-deps", "github.com/filecoin-project/lotus/api").Output()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -50,6 +53,7 @@ func TestDoesntDependOnBuild(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestReturnTypes(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_001
|
||||
errType := reflect.TypeOf(new(error)).Elem()
|
||||
bareIface := reflect.TypeOf(new(interface{})).Elem()
|
||||
jmarsh := reflect.TypeOf(new(json.Marshaler)).Elem()
|
||||
@ -115,6 +119,7 @@ func TestReturnTypes(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPermTags(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_PERM_TAGS_001
|
||||
_ = PermissionedFullAPI(&FullNodeStruct{})
|
||||
_ = PermissionedStorMinerAPI(&StorageMinerStruct{})
|
||||
_ = PermissionedWorkerAPI(&WorkerStruct{})
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package api
|
||||
|
||||
import (
|
||||
@ -29,6 +30,7 @@ type StrC struct {
|
||||
}
|
||||
|
||||
func TestGetInternalStructs(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_API_STRUCTS_001
|
||||
var proxy StrA
|
||||
|
||||
sts := GetInternalStructs(&proxy)
|
||||
@ -44,6 +46,7 @@ func TestGetInternalStructs(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNestedInternalStructs(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_API_STRUCTS_001
|
||||
var proxy StrC
|
||||
|
||||
// check that only the top-level internal struct gets picked up
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package badgerbs
|
||||
|
||||
import (
|
||||
@ -20,6 +21,8 @@ import (
|
||||
)
|
||||
|
||||
func TestBadgerBlockstore(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
(&Suite{
|
||||
NewBlockstore: newBlockstore(DefaultOptions),
|
||||
OpenBlockstore: openBlockstore(DefaultOptions),
|
||||
@ -38,6 +41,8 @@ func TestBadgerBlockstore(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestStorageKey(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_STORAGE_KEY_001
|
||||
bs, _ := newBlockstore(DefaultOptions)(t)
|
||||
bbs := bs.(*Blockstore)
|
||||
defer bbs.Close() //nolint:errcheck
|
||||
@ -265,10 +270,16 @@ func testMove(t *testing.T, optsF func(string) Options) {
|
||||
}
|
||||
|
||||
func TestMoveNoPrefix(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_DELETE_001, @SPLITSTORE_BADGER_COLLECT_GARBAGE_001
|
||||
testMove(t, DefaultOptions)
|
||||
}
|
||||
|
||||
func TestMoveWithPrefix(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_DELETE_001, @SPLITSTORE_BADGER_COLLECT_GARBAGE_001
|
||||
testMove(t, func(path string) Options {
|
||||
opts := DefaultOptions(path)
|
||||
opts.Prefix = "/prefixed/"
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package badgerbs
|
||||
|
||||
import (
|
||||
@ -44,6 +45,8 @@ func (s *Suite) RunTests(t *testing.T, prefix string) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestGetWhenKeyNotPresent(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
@ -57,6 +60,8 @@ func (s *Suite) TestGetWhenKeyNotPresent(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestGetWhenKeyIsNil(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_001
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
@ -68,6 +73,9 @@ func (s *Suite) TestGetWhenKeyIsNil(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestPutThenGetBlock(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_001
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
@ -85,6 +93,8 @@ func (s *Suite) TestPutThenGetBlock(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestHas(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_HAS_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
@ -106,6 +116,9 @@ func (s *Suite) TestHas(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestCidv0v1(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_001
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
@ -123,6 +136,9 @@ func (s *Suite) TestCidv0v1(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestPutThenGetSizeBlock(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_SIZE_001
|
||||
ctx := context.Background()
|
||||
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
@ -154,6 +170,8 @@ func (s *Suite) TestPutThenGetSizeBlock(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestAllKeysSimple(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
defer func() { require.NoError(t, c.Close()) }()
|
||||
@ -170,6 +188,9 @@ func (s *Suite) TestAllKeysSimple(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestAllKeysRespectsContext(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_ALL_KEYS_CHAN_001
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
defer func() { require.NoError(t, c.Close()) }()
|
||||
@ -200,6 +221,7 @@ func (s *Suite) TestAllKeysRespectsContext(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestDoubleClose(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
c, ok := bs.(io.Closer)
|
||||
if !ok {
|
||||
@ -210,6 +232,9 @@ func (s *Suite) TestDoubleClose(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestReopenPutGet(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_001
|
||||
ctx := context.Background()
|
||||
bs, path := s.NewBlockstore(t)
|
||||
c, ok := bs.(io.Closer)
|
||||
@ -236,6 +261,10 @@ func (s *Suite) TestReopenPutGet(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestPutMany(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||
//stm: @SPLITSTORE_BADGER_HAS_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_GET_001, @SPLITSTORE_BADGER_PUT_MANY_001
|
||||
//stm: @SPLITSTORE_BADGER_ALL_KEYS_CHAN_001
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
@ -268,6 +297,11 @@ func (s *Suite) TestPutMany(t *testing.T) {
|
||||
}
|
||||
|
||||
func (s *Suite) TestDelete(t *testing.T) {
|
||||
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||
//stm: @SPLITSTORE_BADGER_DELETE_001, @SPLITSTORE_BADGER_POOLED_STORAGE_HAS_001
|
||||
//stm: @SPLITSTORE_BADGER_ALL_KEYS_CHAN_001, @SPLITSTORE_BADGER_HAS_001
|
||||
//stm: @SPLITSTORE_BADGER_PUT_MANY_001
|
||||
|
||||
ctx := context.Background()
|
||||
bs, _ := s.NewBlockstore(t)
|
||||
if c, ok := bs.(io.Closer); ok {
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package splitstore
|
||||
|
||||
import (
|
||||
@ -10,6 +11,8 @@ import (
|
||||
)
|
||||
|
||||
func TestMapMarkSet(t *testing.T) {
|
||||
//stm: @SPLITSTORE_MARKSET_CREATE_001, @SPLITSTORE_MARKSET_HAS_001, @@SPLITSTORE_MARKSET_MARK_001
|
||||
//stm: @SPLITSTORE_MARKSET_CLOSE_001, @SPLITSTORE_MARKSET_CREATE_VISITOR_001
|
||||
testMarkSet(t, "map")
|
||||
testMarkSetRecovery(t, "map")
|
||||
testMarkSetMarkMany(t, "map")
|
||||
@ -18,6 +21,8 @@ func TestMapMarkSet(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestBadgerMarkSet(t *testing.T) {
|
||||
//stm: @SPLITSTORE_MARKSET_CREATE_001, @SPLITSTORE_MARKSET_HAS_001, @@SPLITSTORE_MARKSET_MARK_001
|
||||
//stm: @SPLITSTORE_MARKSET_CLOSE_001, @SPLITSTORE_MARKSET_CREATE_VISITOR_001
|
||||
bs := badgerMarkSetBatchSize
|
||||
badgerMarkSetBatchSize = 1
|
||||
t.Cleanup(func() {
|
||||
@ -46,6 +51,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
||||
}
|
||||
defer env.Close() //nolint:errcheck
|
||||
|
||||
// stm: @SPLITSTORE_MARKSET_CREATE_001
|
||||
hotSet, err := env.New("hot", 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -65,6 +71,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
||||
return cid.NewCidV1(cid.Raw, h)
|
||||
}
|
||||
|
||||
// stm: @SPLITSTORE_MARKSET_HAS_001
|
||||
mustHave := func(s MarkSet, cid cid.Cid) {
|
||||
t.Helper()
|
||||
has, err := s.Has(cid)
|
||||
@ -94,6 +101,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
||||
k3 := makeCid("c")
|
||||
k4 := makeCid("d")
|
||||
|
||||
// stm: @SPLITSTORE_MARKSET_MARK_001
|
||||
hotSet.Mark(k1) //nolint
|
||||
hotSet.Mark(k2) //nolint
|
||||
coldSet.Mark(k3) //nolint
|
||||
@ -144,6 +152,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
||||
mustNotHave(coldSet, k3)
|
||||
mustNotHave(coldSet, k4)
|
||||
|
||||
//stm: @SPLITSTORE_MARKSET_CLOSE_001
|
||||
err = hotSet.Close()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -171,6 +180,7 @@ func testMarkSetVisitor(t *testing.T, lsType string) {
|
||||
}
|
||||
defer env.Close() //nolint:errcheck
|
||||
|
||||
//stm: @SPLITSTORE_MARKSET_CREATE_VISITOR_001
|
||||
visitor, err := env.New("test", 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package splitstore
|
||||
|
||||
import (
|
||||
@ -228,10 +229,16 @@ func testSplitStore(t *testing.T, cfg *Config) {
|
||||
}
|
||||
|
||||
func TestSplitStoreCompaction(t *testing.T) {
|
||||
//stm: @SPLITSTORE_SPLITSTORE_OPEN_001, @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||
//stm: @SPLITSTORE_SPLITSTORE_PUT_001, @SPLITSTORE_SPLITSTORE_ADD_PROTECTOR_001
|
||||
//stm: @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||
testSplitStore(t, &Config{MarkSetType: "map"})
|
||||
}
|
||||
|
||||
func TestSplitStoreCompactionWithBadger(t *testing.T) {
|
||||
//stm: @SPLITSTORE_SPLITSTORE_OPEN_001, @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||
//stm: @SPLITSTORE_SPLITSTORE_PUT_001, @SPLITSTORE_SPLITSTORE_ADD_PROTECTOR_001
|
||||
//stm: @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||
bs := badgerMarkSetBatchSize
|
||||
badgerMarkSetBatchSize = 1
|
||||
t.Cleanup(func() {
|
||||
@ -241,6 +248,9 @@ func TestSplitStoreCompactionWithBadger(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSplitStoreSuppressCompactionNearUpgrade(t *testing.T) {
|
||||
//stm: @SPLITSTORE_SPLITSTORE_OPEN_001, @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||
//stm: @SPLITSTORE_SPLITSTORE_PUT_001, @SPLITSTORE_SPLITSTORE_ADD_PROTECTOR_001
|
||||
//stm: @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||
ctx := context.Background()
|
||||
chain := &mockChain{t: t}
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package blockstore
|
||||
|
||||
import (
|
||||
@ -13,6 +14,9 @@ import (
|
||||
)
|
||||
|
||||
func TestTimedCacheBlockstoreSimple(t *testing.T) {
|
||||
//stm: @SPLITSTORE_TIMED_BLOCKSTORE_START_001
|
||||
//stm: @SPLITSTORE_TIMED_BLOCKSTORE_PUT_001, @SPLITSTORE_TIMED_BLOCKSTORE_HAS_001, @SPLITSTORE_TIMED_BLOCKSTORE_GET_001
|
||||
//stm: @SPLITSTORE_TIMED_BLOCKSTORE_ALL_KEYS_CHAN_001
|
||||
tc := NewTimedCacheBlockstore(10 * time.Millisecond)
|
||||
mClock := clock.NewMock()
|
||||
mClock.Set(time.Now())
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package blockstore
|
||||
|
||||
import (
|
||||
@ -15,6 +16,7 @@ var (
|
||||
)
|
||||
|
||||
func TestUnionBlockstore_Get(t *testing.T) {
|
||||
//stm: @SPLITSTORE_UNION_BLOCKSTORE_GET_001
|
||||
ctx := context.Background()
|
||||
m1 := NewMemory()
|
||||
m2 := NewMemory()
|
||||
@ -34,6 +36,9 @@ func TestUnionBlockstore_Get(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestUnionBlockstore_Put_PutMany_Delete_AllKeysChan(t *testing.T) {
|
||||
//stm: @SPLITSTORE_UNION_BLOCKSTORE_PUT_001, @SPLITSTORE_UNION_BLOCKSTORE_HAS_001
|
||||
//stm: @SPLITSTORE_UNION_BLOCKSTORE_PUT_MANY_001, @SPLITSTORE_UNION_BLOCKSTORE_DELETE_001
|
||||
//stm: @SPLITSTORE_UNION_BLOCKSTORE_ALL_KEYS_CHAN_001
|
||||
ctx := context.Background()
|
||||
m1 := NewMemory()
|
||||
m2 := NewMemory()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package build
|
||||
|
||||
import (
|
||||
@ -7,6 +8,7 @@ import (
|
||||
)
|
||||
|
||||
func TestOpenRPCDiscoverJSON_Version(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_OPENRPC_VERSION_001
|
||||
// openRPCDocVersion is the current OpenRPC version of the API docs.
|
||||
openRPCDocVersion := "1.2.6"
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package adt
|
||||
|
||||
import (
|
||||
@ -44,6 +45,7 @@ func TestDiffAdtArray(t *testing.T) {
|
||||
|
||||
changes := new(TestDiffArray)
|
||||
|
||||
//stm: @CHAIN_ADT_ARRAY_DIFF_001
|
||||
assert.NoError(t, DiffAdtArray(arrA, arrB, changes))
|
||||
assert.NotNil(t, changes)
|
||||
|
||||
@ -98,6 +100,7 @@ func TestDiffAdtMap(t *testing.T) {
|
||||
|
||||
changes := new(TestDiffMap)
|
||||
|
||||
//stm: @CHAIN_ADT_MAP_DIFF_001
|
||||
assert.NoError(t, DiffAdtMap(mapA, mapB, changes))
|
||||
assert.NotNil(t, changes)
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package aerrors_test
|
||||
|
||||
import (
|
||||
@ -11,6 +12,7 @@ import (
|
||||
)
|
||||
|
||||
func TestFatalError(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_ACTOR_ERRORS_001
|
||||
e1 := xerrors.New("out of disk space")
|
||||
e2 := xerrors.Errorf("could not put node: %w", e1)
|
||||
e3 := xerrors.Errorf("could not save head: %w", e2)
|
||||
@ -24,6 +26,7 @@ func TestFatalError(t *testing.T) {
|
||||
assert.True(t, IsFatal(aw4), "should be fatal")
|
||||
}
|
||||
func TestAbsorbeError(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_ACTOR_ERRORS_001
|
||||
e1 := xerrors.New("EOF")
|
||||
e2 := xerrors.Errorf("could not decode: %w", e1)
|
||||
ae := Absorb(e2, 35, "failed to decode CBOR")
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package policy
|
||||
|
||||
import (
|
||||
@ -22,6 +23,7 @@ func TestSupportedProofTypes(t *testing.T) {
|
||||
for t := range miner0.SupportedProofTypes {
|
||||
oldTypes = append(oldTypes, t)
|
||||
}
|
||||
//stm: @BLOCKCHAIN_POLICY_SET_MAX_SUPPORTED_PROOF_TYPES_001
|
||||
t.Cleanup(func() {
|
||||
SetSupportedProofTypes(oldTypes...)
|
||||
})
|
||||
@ -33,6 +35,7 @@ func TestSupportedProofTypes(t *testing.T) {
|
||||
abi.RegisteredSealProof_StackedDrg2KiBV1: {},
|
||||
},
|
||||
)
|
||||
//stm: @BLOCKCHAIN_POLICY_ADD_MAX_SUPPORTED_PROOF_TYPES_001
|
||||
AddSupportedProofTypes(abi.RegisteredSealProof_StackedDrg8MiBV1)
|
||||
require.EqualValues(t,
|
||||
miner0.SupportedProofTypes,
|
||||
@ -45,6 +48,7 @@ func TestSupportedProofTypes(t *testing.T) {
|
||||
|
||||
// Tests assumptions about policies being the same between actor versions.
|
||||
func TestAssumptions(t *testing.T) {
|
||||
//stm: @BLOCKCHAIN_POLICY_ASSUMPTIONS_001
|
||||
require.EqualValues(t, miner0.SupportedProofTypes, miner2.PreCommitSealProofTypesV0)
|
||||
require.Equal(t, miner0.PreCommitChallengeDelay, miner2.PreCommitChallengeDelay)
|
||||
require.Equal(t, miner0.MaxSectorExpirationExtension, miner2.MaxSectorExpirationExtension)
|
||||
@ -58,6 +62,7 @@ func TestAssumptions(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPartitionSizes(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_PARTITION_SIZES_001
|
||||
for _, p := range abi.SealProofInfos {
|
||||
sizeNew, err := builtin2.PoStProofWindowPoStPartitionSectors(p.WindowPoStProof)
|
||||
require.NoError(t, err)
|
||||
@ -71,6 +76,7 @@ func TestPartitionSizes(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPoStSize(t *testing.T) {
|
||||
//stm: @BLOCKCHAIN_POLICY_GET_MAX_POST_PARTITIONS_001
|
||||
v12PoStSize, err := GetMaxPoStPartitions(network.Version12, abi.RegisteredPoStProof_StackedDrgWindow64GiBV1)
|
||||
require.Equal(t, 4, v12PoStSize)
|
||||
require.NoError(t, err)
|
||||
|
@ -1,3 +1,5 @@
|
||||
//stm: ignore
|
||||
//Only tests external library behavior, therefore it should not be annotated
|
||||
package drand
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package events
|
||||
|
||||
import (
|
||||
@ -358,6 +359,7 @@ func (fcs *fakeCS) advance(rev, app, drop int, msgs map[int]cid.Cid, nulls ...in
|
||||
var _ EventAPI = &fakeCS{}
|
||||
|
||||
func TestAt(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
require.NoError(t, err)
|
||||
@ -418,6 +420,7 @@ func TestAt(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtNullTrigger(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||
fcs := newFakeCS(t)
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
require.NoError(t, err)
|
||||
@ -447,6 +450,7 @@ func TestAtNullTrigger(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtNullConf(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001, @EVENTS_HEIGHT_REVERT_001
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
@ -485,6 +489,7 @@ func TestAtNullConf(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtStart(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -515,6 +520,7 @@ func TestAtStart(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtStartConfidence(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -541,6 +547,7 @@ func TestAtStartConfidence(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtChained(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -571,6 +578,7 @@ func TestAtChained(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtChainedConfidence(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -601,6 +609,7 @@ func TestAtChainedConfidence(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAtChainedConfidenceNull(t *testing.T) {
|
||||
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -632,6 +641,7 @@ func matchAddrMethod(to address.Address, m abi.MethodNum) func(msg *types.Messag
|
||||
}
|
||||
|
||||
func TestCalled(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -837,6 +847,7 @@ func TestCalled(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCalledTimeout(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -897,6 +908,7 @@ func TestCalledTimeout(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCalledOrder(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -953,6 +965,7 @@ func TestCalledOrder(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCalledNull(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -1011,6 +1024,7 @@ func TestCalledNull(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestRemoveTriggersOnMessage(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -1094,6 +1108,7 @@ type testStateChange struct {
|
||||
}
|
||||
|
||||
func TestStateChanged(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -1179,6 +1194,7 @@ func TestStateChanged(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestStateChangedRevert(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -1255,6 +1271,7 @@ func TestStateChangedRevert(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestStateChangedTimeout(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
timeoutHeight := abi.ChainEpoch(20)
|
||||
confidence := 3
|
||||
|
||||
@ -1332,6 +1349,7 @@ func TestStateChangedTimeout(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCalledMultiplePerEpoch(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
events, err := NewEvents(context.Background(), fcs)
|
||||
@ -1384,6 +1402,7 @@ func TestCalledMultiplePerEpoch(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCachedSameBlock(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
fcs := newFakeCS(t)
|
||||
|
||||
_, err := NewEvents(context.Background(), fcs)
|
||||
@ -1418,6 +1437,7 @@ func (t *testObserver) Revert(_ context.Context, from, to *types.TipSet) error {
|
||||
}
|
||||
|
||||
func TestReconnect(t *testing.T) {
|
||||
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package state
|
||||
|
||||
import (
|
||||
@ -35,6 +36,12 @@ func init() {
|
||||
}
|
||||
|
||||
func TestMarketPredicates(t *testing.T) {
|
||||
//stm: @EVENTS_PREDICATES_ON_ACTOR_STATE_CHANGED_001, @EVENTS_PREDICATES_DEAL_STATE_CHANGED_001
|
||||
//stm: @EVENTS_PREDICATES_DEAL_CHANGED_FOR_IDS
|
||||
|
||||
//stm: @EVENTS_PREDICATES_ON_BALANCE_CHANGED_001, @EVENTS_PREDICATES_BALANCE_CHANGED_FOR_ADDRESS_001
|
||||
//stm: @EVENTS_PREDICATES_ON_DEAL_PROPOSAL_CHANGED_001, @EVENTS_PREDICATES_PROPOSAL_AMT_CHANGED_001
|
||||
//stm: @EVENTS_PREDICATES_DEAL_STATE_CHANGED_001, @EVENTS_PREDICATES_DEAL_AMT_CHANGED_001
|
||||
ctx := context.Background()
|
||||
bs := bstore.NewMemorySync()
|
||||
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))
|
||||
@ -333,6 +340,8 @@ func TestMarketPredicates(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMinerSectorChange(t *testing.T) {
|
||||
//stm: @EVENTS_PREDICATES_ON_ACTOR_STATE_CHANGED_001, @EVENTS_PREDICATES_MINER_ACTOR_CHANGE_001
|
||||
//stm: @EVENTS_PREDICATES_MINER_SECTOR_CHANGE_001
|
||||
ctx := context.Background()
|
||||
bs := bstore.NewMemorySync()
|
||||
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package events
|
||||
|
||||
import (
|
||||
@ -92,6 +93,7 @@ func (h *cacheHarness) skip(n abi.ChainEpoch) {
|
||||
}
|
||||
|
||||
func TestTsCache(t *testing.T) {
|
||||
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001, @EVENTS_CACHE_GET_001, @EVENTS_CACHE_ADD_001
|
||||
h := newCacheharness(t)
|
||||
|
||||
for i := 0; i < 9000; i++ {
|
||||
@ -104,6 +106,8 @@ func TestTsCache(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestTsCacheNulls(t *testing.T) {
|
||||
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001, @EVENTS_CACHE_GET_CHAIN_TIPSET_BEFORE_001, @EVENTS_CACHE_GET_CHAIN_TIPSET_AFTER_001
|
||||
//stm: @EVENTS_CACHE_GET_001, @EVENTS_CACHE_ADD_001
|
||||
ctx := context.Background()
|
||||
h := newCacheharness(t)
|
||||
|
||||
@ -182,6 +186,7 @@ func (tc *tsCacheAPIStorageCallCounter) ChainGetTipSet(ctx context.Context, tsk
|
||||
}
|
||||
|
||||
func TestTsCacheEmpty(t *testing.T) {
|
||||
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001
|
||||
// Calling best on an empty cache should just call out to the chain API
|
||||
callCounter := &tsCacheAPIStorageCallCounter{t: t}
|
||||
tsc := newTSCache(callCounter, 50)
|
||||
@ -191,6 +196,7 @@ func TestTsCacheEmpty(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestTsCacheSkip(t *testing.T) {
|
||||
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001, @EVENTS_CACHE_GET_001, @EVENTS_CACHE_ADD_001
|
||||
h := newCacheharness(t)
|
||||
|
||||
ts, err := types.NewTipSet([]*types.BlockHeader{{
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package gen
|
||||
|
||||
import (
|
||||
@ -34,6 +35,7 @@ func testGeneration(t testing.TB, n int, msgs int, sectors int) {
|
||||
}
|
||||
|
||||
func TestChainGeneration(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEW_GEN_WITH_SECTORS_001, @CHAIN_GEN_NEXT_TIPSET_001
|
||||
t.Run("10-20-1", func(t *testing.T) { testGeneration(t, 10, 20, 1) })
|
||||
t.Run("10-20-25", func(t *testing.T) { testGeneration(t, 10, 20, 25) })
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package market
|
||||
|
||||
import (
|
||||
@ -22,6 +23,7 @@ import (
|
||||
|
||||
// TestFundManagerBasic verifies that the basic fund manager operations work
|
||||
func TestFundManagerBasic(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
@ -106,6 +108,7 @@ func TestFundManagerBasic(t *testing.T) {
|
||||
|
||||
// TestFundManagerParallel verifies that operations can be run in parallel
|
||||
func TestFundManagerParallel(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
@ -197,6 +200,7 @@ func TestFundManagerParallel(t *testing.T) {
|
||||
|
||||
// TestFundManagerReserveByWallet verifies that reserve requests are grouped by wallet
|
||||
func TestFundManagerReserveByWallet(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
@ -290,6 +294,7 @@ func TestFundManagerReserveByWallet(t *testing.T) {
|
||||
// TestFundManagerWithdrawal verifies that as many withdraw operations as
|
||||
// possible are processed
|
||||
func TestFundManagerWithdrawalLimit(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
@ -384,6 +389,7 @@ func TestFundManagerWithdrawalLimit(t *testing.T) {
|
||||
|
||||
// TestFundManagerWithdrawByWallet verifies that withdraw requests are grouped by wallet
|
||||
func TestFundManagerWithdrawByWallet(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
@ -493,6 +499,7 @@ func TestFundManagerWithdrawByWallet(t *testing.T) {
|
||||
// TestFundManagerRestart verifies that waiting for incomplete requests resumes
|
||||
// on restart
|
||||
func TestFundManagerRestart(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
@ -559,6 +566,7 @@ func TestFundManagerRestart(t *testing.T) {
|
||||
// 3. Deal B completes, reducing addr1 by 7: reserved 12 available 12 -> 5
|
||||
// 4. Deal A releases 5 from addr1: reserved 12 -> 7 available 5
|
||||
func TestFundManagerReleaseAfterPublish(t *testing.T) {
|
||||
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001
|
||||
s := setup(t)
|
||||
defer s.fm.Stop()
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package messagepool
|
||||
|
||||
import (
|
||||
@ -8,6 +9,7 @@ import (
|
||||
)
|
||||
|
||||
func TestBlockProbability(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_BLOCK_PROB_001
|
||||
mp := &MessagePool{}
|
||||
bp := mp.blockProbabilities(1 - 0.15)
|
||||
t.Logf("%+v\n", bp)
|
||||
@ -20,6 +22,7 @@ func TestBlockProbability(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestWinnerProba(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_BLOCK_PROB_002
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
const N = 1000000
|
||||
winnerProba := noWinnersProb()
|
||||
|
@ -854,7 +854,6 @@ func TestMessageValueTooHigh(t *testing.T) {
|
||||
Message: *msg,
|
||||
Signature: *sig,
|
||||
}
|
||||
|
||||
err = mp.Add(context.TODO(), sm)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
@ -901,8 +900,7 @@ func TestMessageSignatureInvalid(t *testing.T) {
|
||||
}
|
||||
err = mp.Add(context.TODO(), sm)
|
||||
assert.Error(t, err)
|
||||
// assert.Contains(t, err.Error(), "invalid signature length")
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "invalid signature length")
|
||||
}
|
||||
}
|
||||
|
||||
@ -926,14 +924,29 @@ func TestAddMessageTwice(t *testing.T) {
|
||||
to := mock.Address(1001)
|
||||
|
||||
{
|
||||
// create a valid messages
|
||||
sm := makeTestMessage(w, from, to, 0, 50_000_000, minimumBaseFee.Uint64())
|
||||
msg := &types.Message{
|
||||
To: to,
|
||||
From: from,
|
||||
Value: types.NewInt(1),
|
||||
Nonce: 0,
|
||||
GasLimit: 50000000,
|
||||
GasFeeCap: types.NewInt(minimumBaseFee.Uint64()),
|
||||
GasPremium: types.NewInt(1),
|
||||
Params: make([]byte, 32<<10),
|
||||
}
|
||||
|
||||
sig, err := w.WalletSign(context.TODO(), from, msg.Cid().Bytes(), api.MsgMeta{})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
sm := &types.SignedMessage{
|
||||
Message: *msg,
|
||||
Signature: *sig,
|
||||
}
|
||||
mustAdd(t, mp, sm)
|
||||
|
||||
// try to add it twice
|
||||
err = mp.Add(context.TODO(), sm)
|
||||
// assert.Contains(t, err.Error(), "with nonce 0 already in mpool")
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "with nonce 0 already in mpool")
|
||||
}
|
||||
}
|
||||
|
||||
@ -963,8 +976,7 @@ func TestAddMessageTwiceNonceGap(t *testing.T) {
|
||||
|
||||
// then try to add message again
|
||||
err = mp.Add(context.TODO(), sm)
|
||||
// assert.Contains(t, err.Error(), "unfulfilled nonce gap")
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "unfulfilled nonce gap")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package state
|
||||
|
||||
import (
|
||||
@ -18,6 +19,7 @@ import (
|
||||
)
|
||||
|
||||
func BenchmarkStateTreeSet(b *testing.B) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001
|
||||
cst := cbor.NewMemCborStore()
|
||||
st, err := NewStateTree(cst, types.StateTreeVersion1)
|
||||
if err != nil {
|
||||
@ -45,6 +47,7 @@ func BenchmarkStateTreeSet(b *testing.B) {
|
||||
}
|
||||
|
||||
func BenchmarkStateTreeSetFlush(b *testing.B) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001
|
||||
cst := cbor.NewMemCborStore()
|
||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||
if err != nil {
|
||||
@ -80,6 +83,8 @@ func BenchmarkStateTreeSetFlush(b *testing.B) {
|
||||
}
|
||||
|
||||
func TestResolveCache(t *testing.T) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||
//stm: @CHAIN_STATETREE_SNAPSHOT_001, @CHAIN_STATETREE_SNAPSHOT_CLEAR_001
|
||||
cst := cbor.NewMemCborStore()
|
||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||
if err != nil {
|
||||
@ -182,6 +187,8 @@ func TestResolveCache(t *testing.T) {
|
||||
}
|
||||
|
||||
func BenchmarkStateTree10kGetActor(b *testing.B) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||
//stm: @CHAIN_STATETREE_FLUSH_001
|
||||
cst := cbor.NewMemCborStore()
|
||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||
if err != nil {
|
||||
@ -229,6 +236,7 @@ func BenchmarkStateTree10kGetActor(b *testing.B) {
|
||||
}
|
||||
|
||||
func TestSetCache(t *testing.T) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||
cst := cbor.NewMemCborStore()
|
||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||
if err != nil {
|
||||
@ -270,6 +278,8 @@ func TestSetCache(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSnapshots(t *testing.T) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||
//stm: @CHAIN_STATETREE_FLUSH_001, @CHAIN_STATETREE_SNAPSHOT_REVERT_001, CHAIN_STATETREE_SNAPSHOT_CLEAR_001
|
||||
ctx := context.Background()
|
||||
cst := cbor.NewMemCborStore()
|
||||
|
||||
@ -360,6 +370,7 @@ func assertNotHas(t *testing.T, st *StateTree, addr address.Address) {
|
||||
}
|
||||
|
||||
func TestStateTreeConsistency(t *testing.T) {
|
||||
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001, @CHAIN_STATETREE_FLUSH_001
|
||||
cst := cbor.NewMemCborStore()
|
||||
|
||||
// TODO: ActorUpgrade: this test tests pre actors v2
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #integration
|
||||
package stmgr_test
|
||||
|
||||
import (
|
||||
@ -106,6 +107,9 @@ func (ta *testActor) TestMethod(rt rt2.Runtime, params *abi.EmptyValue) *abi.Emp
|
||||
}
|
||||
|
||||
func TestForkHeightTriggers(t *testing.T) {
|
||||
//stm: @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_FLUSH_001, @TOKEN_WALLET_SIGN_001
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||
//stm: @CHAIN_STATE_RESOLVE_TO_KEY_ADDR_001, @CHAIN_STATE_SET_VM_CONSTRUCTOR_001
|
||||
logging.SetAllLoggers(logging.LevelInfo)
|
||||
|
||||
ctx := context.TODO()
|
||||
@ -241,6 +245,8 @@ func TestForkHeightTriggers(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestForkRefuseCall(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001, @CHAIN_GEN_NEXT_TIPSET_FROM_MINERS_001
|
||||
//stm: @CHAIN_STATE_RESOLVE_TO_KEY_ADDR_001, @CHAIN_STATE_SET_VM_CONSTRUCTOR_001, @CHAIN_STATE_CALL_001
|
||||
logging.SetAllLoggers(logging.LevelInfo)
|
||||
|
||||
for after := 0; after < 3; after++ {
|
||||
@ -360,6 +366,8 @@ func testForkRefuseCall(t *testing.T, nullsBefore, nullsAfter int) {
|
||||
}
|
||||
|
||||
func TestForkPreMigration(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001,
|
||||
//stm: @CHAIN_STATE_RESOLVE_TO_KEY_ADDR_001, @CHAIN_STATE_SET_VM_CONSTRUCTOR_001
|
||||
logging.SetAllLoggers(logging.LevelInfo)
|
||||
|
||||
cg, err := gen.NewGenerator()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package stmgr_test
|
||||
|
||||
import (
|
||||
@ -12,6 +13,8 @@ import (
|
||||
)
|
||||
|
||||
func TestSearchForMessageReplacements(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||
//stm: @CHAIN_STATE_SEARCH_MSG_001
|
||||
ctx := context.Background()
|
||||
cg, err := gen.NewGenerator()
|
||||
if err != nil {
|
||||
|
@ -1,3 +1,5 @@
|
||||
//stm: #unit
|
||||
|
||||
package store
|
||||
|
||||
import (
|
||||
@ -10,6 +12,7 @@ import (
|
||||
)
|
||||
|
||||
func TestBaseFee(t *testing.T) {
|
||||
//stm: @CHAIN_STORE_COMPUTE_NEXT_BASE_FEE_001
|
||||
tests := []struct {
|
||||
basefee uint64
|
||||
limitUsed int64
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package store_test
|
||||
|
||||
import (
|
||||
@ -10,6 +11,9 @@ import (
|
||||
)
|
||||
|
||||
func TestChainCheckpoint(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_FROM_MINERS_001
|
||||
//stm: @CHAIN_STORE_GET_TIPSET_FROM_KEY_001, @CHAIN_STORE_SET_HEAD_001, @CHAIN_STORE_GET_HEAVIEST_TIPSET_001
|
||||
//stm: @CHAIN_STORE_SET_CHECKPOINT_001, @CHAIN_STORE_MAYBE_TAKE_HEAVIER_TIPSET_001, @CHAIN_STORE_REMOVE_CHECKPOINT_001
|
||||
ctx := context.Background()
|
||||
|
||||
cg, err := gen.NewGenerator()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package store
|
||||
|
||||
import (
|
||||
@ -9,6 +10,7 @@ import (
|
||||
)
|
||||
|
||||
func TestHeadChangeCoalescer(t *testing.T) {
|
||||
//stm: @CHAIN_STORE_COALESCE_HEAD_CHANGE_001
|
||||
notif := make(chan headChange, 1)
|
||||
c := NewHeadChangeCoalescer(func(revert, apply []*types.TipSet) error {
|
||||
notif <- headChange{apply: apply, revert: revert}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package store_test
|
||||
|
||||
import (
|
||||
@ -17,6 +18,9 @@ import (
|
||||
)
|
||||
|
||||
func TestIndexSeeks(t *testing.T) {
|
||||
//stm: @CHAIN_STORE_IMPORT_001
|
||||
//stm: @CHAIN_STORE_GET_TIPSET_BY_HEIGHT_001, @CHAIN_STORE_PUT_TIPSET_001, @CHAIN_STORE_SET_GENESIS_BLOCK_001
|
||||
//stm: @CHAIN_STORE_CLOSE_001
|
||||
cg, err := gen.NewGenerator()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package store_test
|
||||
|
||||
import (
|
||||
@ -28,6 +29,8 @@ func init() {
|
||||
}
|
||||
|
||||
func BenchmarkGetRandomness(b *testing.B) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||
//stm: @CHAIN_STATE_GET_RANDOMNESS_FROM_TICKETS_001
|
||||
cg, err := gen.NewGenerator()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
@ -85,6 +88,8 @@ func BenchmarkGetRandomness(b *testing.B) {
|
||||
}
|
||||
|
||||
func TestChainExportImport(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||
//stm: @CHAIN_STORE_IMPORT_001
|
||||
cg, err := gen.NewGenerator()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -120,6 +125,9 @@ func TestChainExportImport(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestChainExportImportFull(t *testing.T) {
|
||||
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||
//stm: @CHAIN_STORE_IMPORT_001, @CHAIN_STORE_EXPORT_001, @CHAIN_STORE_SET_HEAD_001
|
||||
//stm: @CHAIN_STORE_GET_TIPSET_BY_HEIGHT_001
|
||||
cg, err := gen.NewGenerator()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package chain
|
||||
|
||||
import (
|
||||
@ -78,6 +79,7 @@ func assertGetSyncOp(t *testing.T, c chan *syncOp, ts *types.TipSet) {
|
||||
}
|
||||
|
||||
func TestSyncManagerEdgeCase(t *testing.T) {
|
||||
//stm: @CHAIN_SYNCER_SET_PEER_HEAD_001
|
||||
ctx := context.Background()
|
||||
|
||||
a := mock.TipSet(mock.MkBlock(genTs, 1, 1))
|
||||
@ -161,6 +163,7 @@ func TestSyncManagerEdgeCase(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSyncManager(t *testing.T) {
|
||||
//stm: @CHAIN_SYNCER_SET_PEER_HEAD_001
|
||||
ctx := context.Background()
|
||||
|
||||
a := mock.TipSet(mock.MkBlock(genTs, 1, 1))
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -14,6 +15,7 @@ import (
|
||||
)
|
||||
|
||||
func TestBigIntSerializationRoundTrip(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_PARSE_BIGINT_001
|
||||
testValues := []string{
|
||||
"0", "1", "10", "-10", "9999", "12345678901234567891234567890123456789012345678901234567890",
|
||||
}
|
||||
@ -42,6 +44,7 @@ func TestBigIntSerializationRoundTrip(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestFilRoundTrip(t *testing.T) {
|
||||
//stm: @TYPES_FIL_PARSE_001
|
||||
testValues := []string{
|
||||
"0 FIL", "1 FIL", "1.001 FIL", "100.10001 FIL", "101100 FIL", "5000.01 FIL", "5000 FIL",
|
||||
}
|
||||
@ -59,6 +62,7 @@ func TestFilRoundTrip(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSizeStr(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SIZE_BIGINT_001
|
||||
cases := []struct {
|
||||
in uint64
|
||||
out string
|
||||
@ -79,6 +83,7 @@ func TestSizeStr(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSizeStrUnitsSymmetry(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SIZE_BIGINT_001
|
||||
s := rand.NewSource(time.Now().UnixNano())
|
||||
r := rand.New(s)
|
||||
|
||||
@ -95,6 +100,7 @@ func TestSizeStrUnitsSymmetry(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSizeStrBig(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SIZE_BIGINT_001
|
||||
ZiB := big.NewInt(50000)
|
||||
ZiB = ZiB.Lsh(ZiB, 70)
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -51,6 +52,7 @@ func testBlockHeader(t testing.TB) *BlockHeader {
|
||||
}
|
||||
|
||||
func TestBlockHeaderSerialization(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_BLOCK_HEADER_FROM_CBOR_001, @CHAIN_TYPES_BLOCK_HEADER_TO_CBOR_001
|
||||
bh := testBlockHeader(t)
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
@ -71,6 +73,7 @@ func TestBlockHeaderSerialization(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestInteropBH(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_BLOCK_INTEROP_001
|
||||
newAddr, err := address.NewSecp256k1Address([]byte("address0"))
|
||||
|
||||
if err != nil {
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -11,6 +12,7 @@ import (
|
||||
)
|
||||
|
||||
func TestPoissonFunction(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_POISSON_001
|
||||
tests := []struct {
|
||||
lambdaBase uint64
|
||||
lambdaShift uint
|
||||
@ -47,6 +49,7 @@ func TestPoissonFunction(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLambdaFunction(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_LAMBDA_001
|
||||
tests := []struct {
|
||||
power string
|
||||
totalPower string
|
||||
@ -72,6 +75,7 @@ func TestLambdaFunction(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestExpFunction(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_NEGATIVE_EXP_001
|
||||
const N = 256
|
||||
|
||||
step := big.NewInt(5)
|
||||
@ -100,6 +104,7 @@ func q256ToF(x *big.Int) float64 {
|
||||
}
|
||||
|
||||
func TestElectionLam(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_LAMBDA_001
|
||||
p := big.NewInt(64)
|
||||
tot := big.NewInt(128)
|
||||
lam := lambda(p, tot)
|
||||
@ -128,6 +133,7 @@ func BenchmarkWinCounts(b *testing.B) {
|
||||
}
|
||||
|
||||
func TestWinCounts(t *testing.T) {
|
||||
//stm: @TYPES_ELECTION_PROOF_COMPUTE_WIN_COUNT_001
|
||||
totalPower := NewInt(100)
|
||||
power := NewInt(20)
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -7,6 +8,7 @@ import (
|
||||
)
|
||||
|
||||
func TestFilShort(t *testing.T) {
|
||||
//stm: @TYPES_FIL_PARSE_001
|
||||
for _, s := range []struct {
|
||||
fil string
|
||||
expect string
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -71,6 +72,7 @@ func TestEqualCall(t *testing.T) {
|
||||
Params: []byte("hai"),
|
||||
}
|
||||
|
||||
//stm: @TYPES_MESSAGE_EQUAL_CALL_001
|
||||
require.True(t, m1.EqualCall(m2))
|
||||
require.True(t, m1.EqualCall(m3))
|
||||
require.False(t, m1.EqualCall(m4))
|
||||
@ -97,11 +99,13 @@ func TestMessageJson(t *testing.T) {
|
||||
exp := []byte("{\"Version\":0,\"To\":\"f04\",\"From\":\"f00\",\"Nonce\":34,\"Value\":\"0\",\"GasLimit\":123,\"GasFeeCap\":\"234\",\"GasPremium\":\"234\",\"Method\":6,\"Params\":\"aGFp\",\"CID\":{\"/\":\"bafy2bzaced5rdpz57e64sc7mdwjn3blicglhpialnrph2dlbufhf6iha63dmc\"}}")
|
||||
fmt.Println(string(b))
|
||||
|
||||
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_001
|
||||
require.Equal(t, exp, b)
|
||||
|
||||
var um Message
|
||||
require.NoError(t, json.Unmarshal(b, &um))
|
||||
|
||||
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_002
|
||||
require.EqualValues(t, *m, um)
|
||||
}
|
||||
|
||||
@ -131,10 +135,12 @@ func TestSignedMessageJson(t *testing.T) {
|
||||
exp := []byte("{\"Message\":{\"Version\":0,\"To\":\"f04\",\"From\":\"f00\",\"Nonce\":34,\"Value\":\"0\",\"GasLimit\":123,\"GasFeeCap\":\"234\",\"GasPremium\":\"234\",\"Method\":6,\"Params\":\"aGFp\",\"CID\":{\"/\":\"bafy2bzaced5rdpz57e64sc7mdwjn3blicglhpialnrph2dlbufhf6iha63dmc\"}},\"Signature\":{\"Type\":0,\"Data\":null},\"CID\":{\"/\":\"bafy2bzacea5ainifngxj3rygaw2hppnyz2cw72x5pysqty2x6dxmjs5qg2uus\"}}")
|
||||
fmt.Println(string(b))
|
||||
|
||||
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_001
|
||||
require.Equal(t, exp, b)
|
||||
|
||||
var um SignedMessage
|
||||
require.NoError(t, json.Unmarshal(b, &um))
|
||||
|
||||
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_002
|
||||
require.EqualValues(t, *sm, um)
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -8,6 +9,7 @@ import (
|
||||
)
|
||||
|
||||
func TestSignatureSerializeRoundTrip(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SIGNATURE_SERIALIZATION_001
|
||||
s := &crypto.Signature{
|
||||
Data: []byte("foo bar cat dog"),
|
||||
Type: crypto.SigTypeBLS,
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
@ -12,6 +13,7 @@ import (
|
||||
)
|
||||
|
||||
func TestTipSetKey(t *testing.T) {
|
||||
//stm: @TYPES_TIPSETKEY_FROM_BYTES_001, @TYPES_TIPSETKEY_NEW_001
|
||||
cb := cid.V1Builder{Codec: cid.DagCBOR, MhType: multihash.BLAKE2B_MIN + 31}
|
||||
c1, _ := cb.Sum([]byte("a"))
|
||||
c2, _ := cb.Sum([]byte("b"))
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package types
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package chain
|
||||
|
||||
import (
|
||||
@ -12,6 +13,7 @@ import (
|
||||
)
|
||||
|
||||
func TestSignedMessageJsonRoundtrip(t *testing.T) {
|
||||
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_002
|
||||
to, _ := address.NewIDAddress(5234623)
|
||||
from, _ := address.NewIDAddress(603911192)
|
||||
smsg := &types.SignedMessage{
|
||||
@ -40,6 +42,7 @@ func TestSignedMessageJsonRoundtrip(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAddressType(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_ADDRESS_PREFIX_001
|
||||
build.SetAddressNetwork(address.Testnet)
|
||||
addr, err := makeRandomAddress()
|
||||
if err != nil {
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package vectors
|
||||
|
||||
import (
|
||||
@ -26,6 +27,7 @@ func LoadVector(t *testing.T, f string, out interface{}) {
|
||||
}
|
||||
|
||||
func TestBlockHeaderVectors(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SERIALIZATION_BLOCK_001
|
||||
var headers []HeaderVector
|
||||
LoadVector(t, "block_headers.json", &headers)
|
||||
|
||||
@ -46,6 +48,7 @@ func TestBlockHeaderVectors(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMessageSigningVectors(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SERIALIZATION_SIGNED_MESSAGE_001
|
||||
var msvs []MessageSigningVector
|
||||
LoadVector(t, "message_signing.json", &msvs)
|
||||
|
||||
@ -64,6 +67,7 @@ func TestMessageSigningVectors(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestUnsignedMessageVectors(t *testing.T) {
|
||||
//stm: @CHAIN_TYPES_SERIALIZATION_MESSAGE_001
|
||||
var msvs []UnsignedMessageVector
|
||||
LoadVector(t, "unsigned_messages.json", &msvs)
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package vm
|
||||
|
||||
import (
|
||||
@ -9,6 +10,7 @@ import (
|
||||
)
|
||||
|
||||
func TestGasBurn(t *testing.T) {
|
||||
//stm: @BURN_ESTIMATE_GAS_OVERESTIMATION_BURN_001
|
||||
tests := []struct {
|
||||
used int64
|
||||
limit int64
|
||||
@ -40,6 +42,7 @@ func TestGasBurn(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGasOutputs(t *testing.T) {
|
||||
//stm: @BURN_ESTIMATE_GAS_OUTPUTS_001
|
||||
baseFee := types.NewInt(10)
|
||||
tests := []struct {
|
||||
used int64
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package vm
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package vm
|
||||
|
||||
import (
|
||||
@ -106,6 +107,7 @@ func (*basicRtMessage) ValueReceived() abi.TokenAmount {
|
||||
}
|
||||
|
||||
func TestInvokerBasic(t *testing.T) {
|
||||
//stm: @INVOKER_TRANSFORM_001
|
||||
inv := ActorRegistry{}
|
||||
code, err := inv.transform(basicContract{})
|
||||
assert.NoError(t, err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package vm
|
||||
|
||||
import (
|
||||
@ -22,6 +23,7 @@ func (*NotAVeryGoodMarshaler) MarshalCBOR(writer io.Writer) error {
|
||||
var _ cbg.CBORMarshaler = &NotAVeryGoodMarshaler{}
|
||||
|
||||
func TestRuntimePutErrors(t *testing.T) {
|
||||
//stm: @CHAIN_VM_STORE_PUT_002
|
||||
defer func() {
|
||||
err := recover()
|
||||
if err == nil {
|
||||
|
@ -1,4 +1,4 @@
|
||||
//stm: #cli
|
||||
//stm: #unit
|
||||
package cli
|
||||
|
||||
import (
|
||||
|
@ -1,4 +1,4 @@
|
||||
//stm: #cli
|
||||
//stm: #unit
|
||||
package cli
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,5 @@
|
||||
//stm: ignore
|
||||
//stm: #unit
|
||||
package cli
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,5 @@
|
||||
//stm: ignore
|
||||
//stm: #unit
|
||||
package cli
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package cli
|
||||
|
||||
import (
|
||||
|
@ -1,4 +1,4 @@
|
||||
//stm: #cli
|
||||
//stm: #unit
|
||||
package cli
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package main
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package main
|
||||
|
||||
import (
|
||||
@ -8,6 +9,7 @@ import (
|
||||
)
|
||||
|
||||
func TestRateLimit(t *testing.T) {
|
||||
//stm: @CMD_LIMITER_GET_IP_LIMITER_001, @CMD_LIMITER_GET_WALLET_LIMITER_001
|
||||
limiter := NewLimiter(LimiterConfig{
|
||||
TotalRate: time.Second,
|
||||
TotalBurst: 20,
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package main
|
||||
|
||||
import (
|
||||
@ -9,6 +10,7 @@ import (
|
||||
)
|
||||
|
||||
func TestAppendCIDsToWindow(t *testing.T) {
|
||||
//stm: @CMD_HEALTH_APPEND_CIDS_001
|
||||
assert := assert.New(t)
|
||||
var window CidWindow
|
||||
threshold := 3
|
||||
@ -27,6 +29,7 @@ func TestAppendCIDsToWindow(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCheckWindow(t *testing.T) {
|
||||
//stm: @CMD_HEALTH_APPEND_CIDS_001, @CMD_HEALTH_CHECK_WINDOW_001
|
||||
assert := assert.New(t)
|
||||
threshold := 3
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package main
|
||||
|
||||
import (
|
||||
@ -23,6 +24,7 @@ import (
|
||||
)
|
||||
|
||||
func TestWorkerKeyChange(t *testing.T) {
|
||||
//stm: @OTHER_WORKER_KEY_CHANGE_001
|
||||
if testing.Short() {
|
||||
t.Skip("skipping test in short mode")
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #integration
|
||||
package main
|
||||
|
||||
import (
|
||||
@ -49,6 +50,7 @@ func TestMinerAllInfo(t *testing.T) {
|
||||
|
||||
t.Run("pre-info-all", run)
|
||||
|
||||
//stm: @CLIENT_DATA_IMPORT_001, @CLIENT_STORAGE_DEALS_GET_001
|
||||
dh := kit.NewDealHarness(t, client, miner, miner)
|
||||
deal, res, inPath := dh.MakeOnlineDeal(context.Background(), kit.MakeFullDealParams{Rseed: 6})
|
||||
outPath := dh.PerformRetrieval(context.Background(), deal, res.Root, false)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package stages
|
||||
|
||||
import (
|
||||
@ -13,6 +14,7 @@ import (
|
||||
)
|
||||
|
||||
func TestCommitQueue(t *testing.T) {
|
||||
//stm: @CMD_COMMIT_Q_ENQUEUE_COMMIT_001
|
||||
var q commitQueue
|
||||
addr1, err := address.NewIDAddress(1000)
|
||||
require.NoError(t, err)
|
||||
@ -46,6 +48,7 @@ func TestCommitQueue(t *testing.T) {
|
||||
SectorNumber: 6,
|
||||
}))
|
||||
|
||||
//stm: @CMD_COMMIT_Q_ADVANCE_EPOCH_001, @CMD_COMMIT_Q_NEXT_MINER_001
|
||||
epoch := abi.ChainEpoch(0)
|
||||
q.advanceEpoch(epoch)
|
||||
_, _, ok := q.nextMiner()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package main
|
||||
|
||||
import (
|
||||
@ -10,6 +11,7 @@ import (
|
||||
)
|
||||
|
||||
func TestProtocolCodenames(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_EPOCH_CODENAMES_001
|
||||
if height := abi.ChainEpoch(100); GetProtocolCodename(height) != "genesis" {
|
||||
t.Fatal("expected genesis codename")
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #chaos
|
||||
package chaos
|
||||
|
||||
import (
|
||||
@ -15,6 +16,7 @@ import (
|
||||
)
|
||||
|
||||
func TestSingleton(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_BUILDER_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -29,6 +31,7 @@ func TestSingleton(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCallerValidationNone(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -40,6 +43,7 @@ func TestCallerValidationNone(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCallerValidationIs(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||
caller := atesting2.NewIDAddr(t, 100)
|
||||
receiver := atesting2.NewIDAddr(t, 101)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
@ -69,6 +73,7 @@ func TestCallerValidationIs(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCallerValidationType(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||
caller := atesting2.NewIDAddr(t, 100)
|
||||
receiver := atesting2.NewIDAddr(t, 101)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
@ -95,6 +100,7 @@ func TestCallerValidationType(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCallerValidationInvalidBranch(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -108,6 +114,7 @@ func TestCallerValidationInvalidBranch(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestDeleteActor(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CREATE_ACTOR_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
beneficiary := atesting2.NewIDAddr(t, 101)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
@ -122,6 +129,7 @@ func TestDeleteActor(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMutateStateInTransaction(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CREATE_STATE_001, @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -149,6 +157,7 @@ func TestMutateStateInTransaction(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMutateStateAfterTransaction(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CREATE_STATE_001, @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -183,6 +192,7 @@ func TestMutateStateAfterTransaction(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMutateStateReadonly(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_CREATE_STATE_001, @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -217,6 +227,7 @@ func TestMutateStateReadonly(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMutateStateInvalidBranch(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -231,6 +242,7 @@ func TestMutateStateInvalidBranch(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAbortWith(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_ABORT_WITH_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -249,6 +261,7 @@ func TestAbortWith(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAbortWithUncontrolled(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_ABORT_WITH_001
|
||||
receiver := atesting2.NewIDAddr(t, 100)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
||||
@ -266,6 +279,7 @@ func TestAbortWithUncontrolled(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestInspectRuntime(t *testing.T) {
|
||||
//stm: @CHAIN_ACTOR_CHAOS_INSPECT_RUNTIME_001, @CHAIN_ACTOR_CHAOS_CREATE_STATE_001
|
||||
caller := atesting2.NewIDAddr(t, 100)
|
||||
receiver := atesting2.NewIDAddr(t, 101)
|
||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||
|
@ -1,3 +1,6 @@
|
||||
//stm: ignore
|
||||
// This file does not test any behaviors by itself; rather, it runs other test files
|
||||
// Therefore, this file should not be annotated.
|
||||
package conformance
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package gateway
|
||||
|
||||
import (
|
||||
@ -94,6 +95,7 @@ func TestGatewayAPIChainGetTipSetByHeight(t *testing.T) {
|
||||
// Create tipsets from genesis up to tskh and return the highest
|
||||
ts := mock.createTipSets(tt.args.tskh, tt.args.genesisTS)
|
||||
|
||||
//stm: @GATEWAY_NODE_GET_TIPSET_BY_HEIGHT_001
|
||||
got, err := a.ChainGetTipSetByHeight(ctx, tt.args.h, ts.Key())
|
||||
if tt.expErr {
|
||||
require.Error(t, err)
|
||||
@ -241,6 +243,7 @@ func (m *mockGatewayDepsAPI) Version(context.Context) (api.APIVersion, error) {
|
||||
}
|
||||
|
||||
func TestGatewayVersion(t *testing.T) {
|
||||
//stm: @GATEWAY_NODE_GET_VERSION_001
|
||||
ctx := context.Background()
|
||||
mock := &mockGatewayDepsAPI{}
|
||||
a := NewNode(mock, DefaultLookbackCap, DefaultStateWaitLookbackLimit)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #integration
|
||||
package itests
|
||||
|
||||
import (
|
||||
@ -21,6 +22,7 @@ import (
|
||||
)
|
||||
|
||||
func TestBatchDealInput(t *testing.T) {
|
||||
//stm: @MINER_SECTOR_STATUS_001, @MINER_SECTOR_LIST_001
|
||||
kit.QuietMiningLogs()
|
||||
|
||||
var (
|
||||
|
@ -63,6 +63,7 @@ func runTestCCUpgrade(t *testing.T) *kit.TestFullNode {
|
||||
}
|
||||
waitForSectorActive(ctx, t, CCUpgrade, client, maddr)
|
||||
|
||||
//stm: @SECTOR_CC_UPGRADE_001
|
||||
err = miner.SectorMarkForUpgrade(ctx, sl[0], true)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -27,6 +27,10 @@ import (
|
||||
// TestDealWithMarketAndMinerNode is running concurrently a number of storage and retrieval deals towards a miner
|
||||
// architecture where the `mining/sealing/proving` node is a separate process from the `markets` node
|
||||
func TestDealWithMarketAndMinerNode(t *testing.T) {
|
||||
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||
if testing.Short() {
|
||||
t.Skip("skipping test in short mode")
|
||||
}
|
||||
@ -126,6 +130,10 @@ func TestDealCyclesConcurrent(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSimultanenousTransferLimit(t *testing.T) {
|
||||
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||
t.Skip("skipping as flaky #7152")
|
||||
|
||||
if testing.Short() {
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #integration
|
||||
package itests
|
||||
|
||||
import (
|
||||
@ -36,7 +37,13 @@ var (
|
||||
)
|
||||
|
||||
func TestDMLevelPartialRetrieval(t *testing.T) {
|
||||
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||
|
||||
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
|
||||
//stm: @CLIENT_RETRIEVAL_RETRIEVE_001, @CLIENT_RETRIEVAL_FIND_001
|
||||
ctx := context.Background()
|
||||
|
||||
policy.SetPreCommitChallengeDelay(2)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #integration
|
||||
package itests
|
||||
|
||||
import (
|
||||
@ -19,6 +20,7 @@ import (
|
||||
// we reordered the checks to make sure that a transaction with too much money in it sent to yourself will fail instead of succeeding as a noop
|
||||
// more info in this PR! https://github.com/filecoin-project/lotus/pull/7637
|
||||
func TestSelfSentTxnV15(t *testing.T) {
|
||||
//stm: @TOKEN_WALLET_SIGN_001, @CHAIN_MEMPOOL_PUSH_001
|
||||
ctx := context.Background()
|
||||
|
||||
kit.QuietMiningLogs()
|
||||
@ -60,6 +62,7 @@ func TestSelfSentTxnV15(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSelfSentTxnV14(t *testing.T) {
|
||||
//stm: @TOKEN_WALLET_SIGN_001, @CHAIN_MEMPOOL_PUSH_001
|
||||
ctx := context.Background()
|
||||
|
||||
kit.QuietMiningLogs()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package alerting
|
||||
|
||||
import (
|
||||
@ -12,6 +13,7 @@ import (
|
||||
)
|
||||
|
||||
func TestAlerting(t *testing.T) {
|
||||
//stm: @JOURNAL_ALERTS_ADD_ALERT_TYPE_001, @JOURNAL_ALERTS_RAISE_001, @JOURNAL_ALERTS_GET_ALERTS_001
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
j := mockjournal.NewMockJournal(mockCtrl)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package journal
|
||||
|
||||
import (
|
||||
@ -7,6 +8,7 @@ import (
|
||||
)
|
||||
|
||||
func TestDisabledEvents(t *testing.T) {
|
||||
//stm: @JOURNAL_REGISTRY_NEW_EVENT_TYPE_001, @JOURNAL_REGISTRY_PARSE_DISABLED_001
|
||||
req := require.New(t)
|
||||
|
||||
test := func(dis DisabledEvents) func(*testing.T) {
|
||||
@ -44,6 +46,7 @@ func TestDisabledEvents(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestParseDisableEvents(t *testing.T) {
|
||||
//stm: @JOURNAL_REGISTRY_PARSE_DISABLED_002
|
||||
_, err := ParseDisabledEvents("system1:disabled1:failed,system1:disabled2")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package backupds
|
||||
|
||||
import (
|
||||
@ -37,6 +38,7 @@ func checkVals(t *testing.T, ds datastore.Datastore, start, end int, exist bool)
|
||||
}
|
||||
|
||||
func TestNoLogRestore(t *testing.T) {
|
||||
//stm: @OTHER_DATASTORE_RESTORE_002
|
||||
ds1 := datastore.NewMapDatastore()
|
||||
|
||||
putVals(t, ds1, 0, 10)
|
||||
@ -57,6 +59,7 @@ func TestNoLogRestore(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLogRestore(t *testing.T) {
|
||||
//stm: @OTHER_DATASTORE_RESTORE_001
|
||||
logdir, err := ioutil.TempDir("", "backupds-test-")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(logdir) // nolint
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package rpcenc
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,5 @@
|
||||
//stm: ignore
|
||||
// Ignored because implementation relies on external (ffi) lib
|
||||
package bls
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,5 @@
|
||||
//stm: ignore
|
||||
// Ignored because implementation relies on external (ffi) lib
|
||||
package bls_test
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: ignore
|
||||
package stati
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package tablewriter
|
||||
|
||||
import (
|
||||
@ -8,6 +9,7 @@ import (
|
||||
)
|
||||
|
||||
func TestTableWriter(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_TABLE_WRITE_001, @OTHER_IMPLEMENTATION_TABLE_FLUSH_001
|
||||
tw := New(Col("C1"), Col("X"), Col("C333"), NewLineCol("Thing"))
|
||||
tw.Write(map[string]interface{}{
|
||||
"C1": "234",
|
||||
|
@ -1,6 +1,9 @@
|
||||
//stm: ignore
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
// This file tests file descriptor limits; since this is an OS feature, it should not be annotated
|
||||
|
||||
package ulimit
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package dagstore
|
||||
|
||||
import (
|
||||
@ -88,6 +89,7 @@ func TestLotusAccessorFetchUnsealedPiece(t *testing.T) {
|
||||
}
|
||||
|
||||
// Fetch the piece
|
||||
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001
|
||||
r, err := api.FetchUnsealedPiece(ctx, cid1)
|
||||
if tc.expectErr {
|
||||
require.Error(t, err)
|
||||
@ -101,6 +103,7 @@ func TestLotusAccessorFetchUnsealedPiece(t *testing.T) {
|
||||
|
||||
require.Equal(t, tc.fetchedData, string(bz))
|
||||
|
||||
//stm: @MARKET_DAGSTORE_IS_PIECE_UNSEALED_001
|
||||
uns, err := api.IsUnsealed(ctx, cid1)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tc.isUnsealed, uns)
|
||||
@ -126,6 +129,7 @@ func TestLotusAccessorGetUnpaddedCARSize(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
// Check that the data length is correct
|
||||
//stm: @MARKET_DAGSTORE_GET_UNPADDED_CAR_SIZE_001
|
||||
len, err := api.GetUnpaddedCARSize(ctx, cid1)
|
||||
require.NoError(t, err)
|
||||
require.EqualValues(t, 10, len)
|
||||
@ -160,6 +164,7 @@ func TestThrottle(t *testing.T) {
|
||||
errgrp, ctx := errgroup.WithContext(context.Background())
|
||||
for i := 0; i < 10; i++ {
|
||||
errgrp.Go(func() error {
|
||||
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001
|
||||
r, err := api.FetchUnsealedPiece(ctx, cid1)
|
||||
if err == nil {
|
||||
_ = r.Close()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: @unit
|
||||
package dagstore
|
||||
|
||||
import (
|
||||
@ -17,6 +18,8 @@ import (
|
||||
)
|
||||
|
||||
func TestLotusMount(t *testing.T) {
|
||||
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001, @MARKET_DAGSTORE_GET_UNPADDED_CAR_SIZE_001
|
||||
//stm: @MARKET_DAGSTORE_IS_PIECE_UNSEALED_001
|
||||
ctx := context.Background()
|
||||
bgen := blocksutil.NewBlockGenerator()
|
||||
cid := bgen.Next().Cid()
|
||||
@ -88,6 +91,7 @@ func TestLotusMount(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLotusMountDeserialize(t *testing.T) {
|
||||
//stm: @MARKET_DAGSTORE_DESERIALIZE_CID_001
|
||||
api := &minerAPI{}
|
||||
|
||||
bgen := blocksutil.NewBlockGenerator()
|
||||
@ -115,6 +119,8 @@ func TestLotusMountDeserialize(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLotusMountRegistration(t *testing.T) {
|
||||
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001, @MARKET_DAGSTORE_GET_UNPADDED_CAR_SIZE_001
|
||||
//stm: @MARKET_DAGSTORE_IS_PIECE_UNSEALED_001
|
||||
ctx := context.Background()
|
||||
bgen := blocksutil.NewBlockGenerator()
|
||||
cid := bgen.Next().Cid()
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #integration
|
||||
package dagstore
|
||||
|
||||
import (
|
||||
@ -59,6 +60,7 @@ func TestShardRegistration(t *testing.T) {
|
||||
|
||||
deals := []storagemarket.MinerDeal{{
|
||||
// Should be registered
|
||||
//stm: @MARKET_DAGSTORE_MIGRATE_DEALS_001
|
||||
State: storagemarket.StorageDealSealing,
|
||||
SectorNumber: unsealedSector1,
|
||||
ClientDealProposal: market.ClientDealProposal{
|
||||
@ -77,6 +79,7 @@ func TestShardRegistration(t *testing.T) {
|
||||
},
|
||||
}, {
|
||||
// Should be ignored because deal is no longer active
|
||||
//stm: @MARKET_DAGSTORE_MIGRATE_DEALS_003
|
||||
State: storagemarket.StorageDealError,
|
||||
SectorNumber: unsealedSector2,
|
||||
ClientDealProposal: market.ClientDealProposal{
|
||||
@ -114,6 +117,7 @@ func TestShardRegistration(t *testing.T) {
|
||||
require.True(t, migrated)
|
||||
require.NoError(t, err)
|
||||
|
||||
//stm: @MARKET_DAGSTORE_GET_ALL_SHARDS_001
|
||||
info := dagst.AllShardsInfo()
|
||||
require.Len(t, info, 2)
|
||||
for _, i := range info {
|
||||
@ -121,6 +125,7 @@ func TestShardRegistration(t *testing.T) {
|
||||
}
|
||||
|
||||
// Run register shard migration again
|
||||
//stm: @MARKET_DAGSTORE_MIGRATE_DEALS_002
|
||||
migrated, err = w.MigrateDeals(ctx, deals)
|
||||
require.False(t, migrated)
|
||||
require.NoError(t, err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package dagstore
|
||||
|
||||
import (
|
||||
@ -56,6 +57,7 @@ func TestWrapperAcquireRecovery(t *testing.T) {
|
||||
}
|
||||
w.dagst = mock
|
||||
|
||||
//stm: @MARKET_DAGSTORE_ACQUIRE_SHARD_002
|
||||
mybs, err := w.LoadShard(ctx, pieceCid)
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -104,10 +106,12 @@ func TestWrapperBackground(t *testing.T) {
|
||||
w.dagst = mock
|
||||
|
||||
// Start up the wrapper
|
||||
//stm: @MARKET_DAGSTORE_START_001
|
||||
err = w.Start(ctx)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Expect GC to be called automatically
|
||||
//stm: @MARKET_DAGSTORE_START_002
|
||||
tctx, cancel := context.WithTimeout(ctx, time.Second)
|
||||
defer cancel()
|
||||
select {
|
||||
@ -118,6 +122,7 @@ func TestWrapperBackground(t *testing.T) {
|
||||
|
||||
// Expect that when the wrapper is closed it will call close on the
|
||||
// DAG store
|
||||
//stm: @MARKET_DAGSTORE_CLOSE_001
|
||||
err = w.Close()
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package storageadapter
|
||||
|
||||
import (
|
||||
@ -28,6 +29,7 @@ import (
|
||||
)
|
||||
|
||||
func TestDealPublisher(t *testing.T) {
|
||||
//stm: @MARKET_DEAL_PUBLISHER_PUBLISH_001, @MARKET_DEAL_PUBLISHER_GET_PENDING_DEALS_001
|
||||
oldClock := build.Clock
|
||||
t.Cleanup(func() { build.Clock = oldClock })
|
||||
mc := clock.NewMock()
|
||||
@ -188,6 +190,8 @@ func TestDealPublisher(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestForcePublish(t *testing.T) {
|
||||
//stm: @MARKET_DEAL_PUBLISHER_PUBLISH_001, @MARKET_DEAL_PUBLISHER_GET_PENDING_DEALS_001
|
||||
//stm: @MARKET_DEAL_PUBLISHER_FORCE_PUBLISH_ALL_001
|
||||
dpapi := newDPAPI(t)
|
||||
|
||||
// Create a deal publisher
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package storageadapter
|
||||
|
||||
import (
|
||||
@ -228,6 +229,7 @@ func TestOnDealSectorPreCommitted(t *testing.T) {
|
||||
Err2: data.currentDealInfoErr2,
|
||||
}
|
||||
scm := newSectorCommittedManager(eventsAPI, mockDIAPI, mockPCAPI)
|
||||
//stm: @MARKET_ADAPTER_ON_SECTOR_PRE_COMMIT_001
|
||||
err = scm.OnDealSectorPreCommitted(ctx, provider, proposal, publishCid, cb)
|
||||
if data.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
@ -439,6 +441,7 @@ func TestOnDealSectorCommitted(t *testing.T) {
|
||||
Err2: data.currentDealInfoErr2,
|
||||
}
|
||||
scm := newSectorCommittedManager(eventsAPI, mockDIAPI, mockPCAPI)
|
||||
//stm: @MARKET_ADAPTER_ON_SECTOR_COMMIT_001
|
||||
err = scm.OnDealSectorCommitted(ctx, provider, sectorNumber, proposal, publishCid, cb)
|
||||
if data.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package config
|
||||
|
||||
import (
|
||||
@ -12,6 +13,7 @@ import (
|
||||
)
|
||||
|
||||
func TestDefaultFullNodeRoundtrip(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_001
|
||||
c := DefaultFullNode()
|
||||
|
||||
var s string
|
||||
@ -51,6 +53,7 @@ func TestDefaultFullNodeCommentRoundtrip(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestDefaultMinerRoundtrip(t *testing.T) {
|
||||
//stm: @OTHER_IMPLEMENTATION_001
|
||||
c := DefaultStorageMiner()
|
||||
|
||||
var s string
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
)
|
||||
|
||||
func TestDecodeNothing(t *testing.T) {
|
||||
//stm: @NODE_CONFIG_LOAD_FILE_002
|
||||
assert := assert.New(t)
|
||||
|
||||
{
|
||||
@ -30,6 +31,7 @@ func TestDecodeNothing(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestParitalConfig(t *testing.T) {
|
||||
//stm: @NODE_CONFIG_LOAD_FILE_003
|
||||
assert := assert.New(t)
|
||||
cfgString := `
|
||||
[API]
|
||||
|
@ -27,6 +27,7 @@ import (
|
||||
|
||||
// This test uses a full "dense" CARv2, and not a filestore (positional mapping).
|
||||
func TestRoundtripUnixFS_Dense(t *testing.T) {
|
||||
//stm: @CLIENT_DATA_IMPORT_002
|
||||
ctx := context.Background()
|
||||
|
||||
inputPath, inputContents := genInputFile(t)
|
||||
@ -75,6 +76,7 @@ func TestRoundtripUnixFS_Dense(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestRoundtripUnixFS_Filestore(t *testing.T) {
|
||||
//stm: @CLIENT_DATA_IMPORT_001
|
||||
ctx := context.Background()
|
||||
a := &API{
|
||||
Imports: &imports.Manager{},
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
)
|
||||
|
||||
func TestMedian(t *testing.T) {
|
||||
//stm: @MARKET_GAS_GET_MEDIAN_PREMIUM_001
|
||||
require.Equal(t, types.NewInt(5), medianGasPremium([]GasMeta{
|
||||
{big.NewInt(5), build.BlockGasTarget},
|
||||
}, 1))
|
||||
|
@ -30,5 +30,10 @@ func genFsRepo(t *testing.T) (*FsRepo, func()) {
|
||||
func TestFsBasic(t *testing.T) {
|
||||
repo, closer := genFsRepo(t)
|
||||
defer closer()
|
||||
//stm: @NODE_FS_REPO_LOCK_001,@NODE_FS_REPO_LOCK_002,@NODE_FS_REPO_UNLOCK_001
|
||||
//stm: @NODE_FS_REPO_SET_API_ENDPOINT_001, @NODE_FS_REPO_GET_API_ENDPOINT_001
|
||||
//stm: @NODE_FS_REPO_GET_CONFIG_001, @NODE_FS_REPO_SET_CONFIG_001
|
||||
//stm: @NODE_FS_REPO_LIST_KEYS_001, @NODE_FS_REPO_PUT_KEY_001
|
||||
//stm: @NODE_FS_REPO_GET_KEY_001, NODE_FS_REPO_DELETE_KEY_001
|
||||
basicTest(t, repo)
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
)
|
||||
|
||||
func TestMemBasic(t *testing.T) {
|
||||
//stm: @REPO_MEM_001
|
||||
repo := NewMemory(nil)
|
||||
basicTest(t, repo)
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package node
|
||||
|
||||
import (
|
||||
@ -10,6 +11,7 @@ import (
|
||||
)
|
||||
|
||||
func TestMonitorShutdown(t *testing.T) {
|
||||
//stm: @NODE_COMMON_SHUTDOWN_001
|
||||
signalCh := make(chan struct{})
|
||||
|
||||
// Three shutdown handlers.
|
||||
|
@ -15,6 +15,7 @@ func testCids() []cid.Cid {
|
||||
}
|
||||
|
||||
func TestMsgListener(t *testing.T) {
|
||||
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||
ml := newMsgListeners()
|
||||
|
||||
done := false
|
||||
@ -33,6 +34,7 @@ func TestMsgListener(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMsgListenerNilErr(t *testing.T) {
|
||||
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||
ml := newMsgListeners()
|
||||
|
||||
done := false
|
||||
@ -50,6 +52,7 @@ func TestMsgListenerNilErr(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMsgListenerUnsub(t *testing.T) {
|
||||
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||
ml := newMsgListeners()
|
||||
|
||||
done := false
|
||||
@ -72,6 +75,7 @@ func TestMsgListenerUnsub(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMsgListenerMulti(t *testing.T) {
|
||||
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||
ml := newMsgListeners()
|
||||
|
||||
count := 0
|
||||
|
@ -502,6 +502,7 @@ func TestAddVoucherInboundWalletKey(t *testing.T) {
|
||||
sv := createTestVoucher(t, ch, voucherLane, nonce, voucherAmount, fromKeyPrivate)
|
||||
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
|
||||
|
||||
//stm: @TOKEN_PAYCH_VOUCHER_CREATE_006
|
||||
// Should fail because there is no wallet key matching the channel To
|
||||
// address (ie, the channel is not "owned" by this node)
|
||||
require.Error(t, err)
|
||||
@ -513,6 +514,7 @@ func TestAddVoucherInboundWalletKey(t *testing.T) {
|
||||
sv = createTestVoucher(t, ch, voucherLane, nonce, voucherAmount, fromKeyPrivate)
|
||||
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
|
||||
|
||||
//stm: @TOKEN_PAYCH_VOUCHER_CREATE_001
|
||||
// Should now pass because there is a wallet key matching the channel To
|
||||
// address
|
||||
require.NoError(t, err)
|
||||
@ -626,6 +628,7 @@ func TestCheckSpendable(t *testing.T) {
|
||||
}
|
||||
s.mock.setCallResponse(successResponse)
|
||||
|
||||
//stm: @TOKEN_PAYCH_CHECK_SPENDABLE_001
|
||||
// Check that spendable is true
|
||||
secret := []byte("secret")
|
||||
spendable, err := s.mgr.CheckVoucherSpendable(ctx, s.ch, voucher, secret, nil)
|
||||
@ -655,6 +658,7 @@ func TestCheckSpendable(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.True(t, spendable)
|
||||
|
||||
//stm: @TOKEN_PAYCH_CHECK_SPENDABLE_002
|
||||
// Check that voucher is no longer spendable once it has been submitted
|
||||
_, err = s.mgr.SubmitVoucher(ctx, s.ch, voucher, nil, nil)
|
||||
require.NoError(t, err)
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package paychmgr
|
||||
|
||||
import (
|
||||
@ -59,6 +60,7 @@ func testChannelResponse(t *testing.T, ch address.Address) types.MessageReceipt
|
||||
// TestPaychGetCreateChannelMsg tests that GetPaych sends a message to create
|
||||
// a new channel with the correct funds
|
||||
func TestPaychGetCreateChannelMsg(t *testing.T) {
|
||||
//stm: @TOKEN_PAYCH_CREATE_001
|
||||
ctx := context.Background()
|
||||
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package paychmgr
|
||||
|
||||
import (
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package paychmgr
|
||||
|
||||
import (
|
||||
@ -13,6 +14,8 @@ import (
|
||||
)
|
||||
|
||||
func TestStore(t *testing.T) {
|
||||
//stm: @TOKEN_PAYCH_ALLOCATE_LANE_001, @TOKEN_PAYCH_LIST_CHANNELS_001
|
||||
//stm: @TOKEN_PAYCH_TRACK_CHANNEL_002, @TOKEN_PAYCH_TRACK_CHANNEL_001
|
||||
ctx := context.Background()
|
||||
|
||||
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package storage
|
||||
|
||||
import (
|
||||
@ -200,6 +201,10 @@ func (m *mockAPI) setChangeHandler(ch *changeHandler) {
|
||||
|
||||
// TestChangeHandlerBasic verifies we can generate a proof and submit it
|
||||
func TestChangeHandlerBasic(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -248,6 +253,10 @@ func TestChangeHandlerBasic(t *testing.T) {
|
||||
// chain is already advanced past the confidence interval, we should move from
|
||||
// proving to submitting without a head change in between.
|
||||
func TestChangeHandlerFromProvingToSubmittingNoHeadChange(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_005
|
||||
//stm: @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -299,6 +308,10 @@ func TestChangeHandlerFromProvingToSubmittingNoHeadChange(t *testing.T) {
|
||||
// proofs generated we should not submit anything to chain but submit state
|
||||
// should move to completed
|
||||
func TestChangeHandlerFromProvingEmptyProofsToComplete(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_005, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_006
|
||||
//stm: @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -349,6 +362,9 @@ func TestChangeHandlerFromProvingEmptyProofsToComplete(t *testing.T) {
|
||||
// TestChangeHandlerDontStartUntilProvingPeriod tests that the handler
|
||||
// ignores updates until the proving period has been reached.
|
||||
func TestChangeHandlerDontStartUntilProvingPeriod(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -387,6 +403,9 @@ func TestChangeHandlerDontStartUntilProvingPeriod(t *testing.T) {
|
||||
// TestChangeHandlerStartProvingNextDeadline verifies that the proof handler
|
||||
// starts proving the next deadline after the current one
|
||||
func TestChangeHandlerStartProvingNextDeadline(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -436,6 +455,10 @@ func TestChangeHandlerStartProvingNextDeadline(t *testing.T) {
|
||||
// TestChangeHandlerProvingRounds verifies we can generate several rounds of
|
||||
// proofs as the chain head advances
|
||||
func TestChangeHandlerProvingRounds(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_003, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_005
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -506,6 +529,9 @@ func TestChangeHandlerProvingRounds(t *testing.T) {
|
||||
// TestChangeHandlerProvingErrorRecovery verifies that the proof handler
|
||||
// recovers correctly from an error
|
||||
func TestChangeHandlerProvingErrorRecovery(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -547,6 +573,10 @@ func TestChangeHandlerProvingErrorRecovery(t *testing.T) {
|
||||
// TestChangeHandlerSubmitErrorRecovery verifies that the submit handler
|
||||
// recovers correctly from an error
|
||||
func TestChangeHandlerSubmitErrorRecovery(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -616,6 +646,9 @@ func TestChangeHandlerSubmitErrorRecovery(t *testing.T) {
|
||||
// TestChangeHandlerProveExpiry verifies that the prove handler
|
||||
// behaves correctly on expiry
|
||||
func TestChangeHandlerProveExpiry(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -654,6 +687,9 @@ func TestChangeHandlerProveExpiry(t *testing.T) {
|
||||
// TestChangeHandlerSubmitExpiry verifies that the submit handler
|
||||
// behaves correctly on expiry
|
||||
func TestChangeHandlerSubmitExpiry(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -717,6 +753,9 @@ func TestChangeHandlerSubmitExpiry(t *testing.T) {
|
||||
// TestChangeHandlerProveRevert verifies that the prove handler
|
||||
// behaves correctly on revert
|
||||
func TestChangeHandlerProveRevert(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -753,6 +792,10 @@ func TestChangeHandlerProveRevert(t *testing.T) {
|
||||
// TestChangeHandlerSubmittingRevert verifies that the submit handler
|
||||
// behaves correctly when there's a revert from the submitting state
|
||||
func TestChangeHandlerSubmittingRevert(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -824,6 +867,10 @@ func TestChangeHandlerSubmittingRevert(t *testing.T) {
|
||||
// TestChangeHandlerSubmitCompleteRevert verifies that the submit handler
|
||||
// behaves correctly when there's a revert from the submit complete state
|
||||
func TestChangeHandlerSubmitCompleteRevert(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -885,6 +932,10 @@ func TestChangeHandlerSubmitCompleteRevert(t *testing.T) {
|
||||
// TestChangeHandlerSubmitRevertTwoEpochs verifies that the submit handler
|
||||
// behaves correctly when the revert is two epochs deep
|
||||
func TestChangeHandlerSubmitRevertTwoEpochs(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
@ -986,6 +1037,10 @@ func TestChangeHandlerSubmitRevertTwoEpochs(t *testing.T) {
|
||||
// behaves correctly when the revert is two epochs deep and the advance is
|
||||
// to a lower height than before
|
||||
func TestChangeHandlerSubmitRevertAdvanceLess(t *testing.T) {
|
||||
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||
s := makeScaffolding(t)
|
||||
mock := s.mock
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package storage
|
||||
|
||||
import (
|
||||
@ -22,6 +23,7 @@ func TestNextDeadline(t *testing.T) {
|
||||
require.EqualValues(t, 60, di.Close)
|
||||
|
||||
for i := 1; i < 1+int(miner.WPoStPeriodDeadlines)*2; i++ {
|
||||
//stm: @WDPOST_NEXT_DEADLINE_001
|
||||
di = nextDeadline(di)
|
||||
deadlineIdx = i % int(miner.WPoStPeriodDeadlines)
|
||||
expPeriodStart := int(miner.WPoStProvingPeriod) * (i / int(miner.WPoStPeriodDeadlines))
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package storage
|
||||
|
||||
import (
|
||||
@ -176,6 +177,10 @@ func (m mockFaultTracker) CheckProvable(ctx context.Context, pp abi.RegisteredPo
|
||||
// TestWDPostDoPost verifies that doPost will send the correct number of window
|
||||
// PoST messages for a given number of partitions
|
||||
func TestWDPostDoPost(t *testing.T) {
|
||||
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||
ctx := context.Background()
|
||||
expectedMsgCount := 5
|
||||
|
||||
@ -190,6 +195,7 @@ func TestWDPostDoPost(t *testing.T) {
|
||||
// Work out the number of partitions that can be included in a message
|
||||
// without exceeding the message sector limit
|
||||
|
||||
//stm: @BLOCKCHAIN_POLICY_GET_MAX_POST_PARTITIONS_001
|
||||
partitionsPerMsg, err := policy.GetMaxPoStPartitions(network.Version13, proofType)
|
||||
require.NoError(t, err)
|
||||
if partitionsPerMsg > miner5.AddressedPartitionsMax {
|
||||
|
@ -1,3 +1,4 @@
|
||||
//stm: #unit
|
||||
package headbuffer
|
||||
|
||||
import (
|
||||
@ -8,6 +9,7 @@ import (
|
||||
)
|
||||
|
||||
func TestHeadBuffer(t *testing.T) {
|
||||
//stm: @TOOLS_HEAD_BUFFER_PUSH_001, @TOOLS_HEAD_BUFFER_POP_001
|
||||
t.Run("Straight Push through", func(t *testing.T) {
|
||||
hb := NewHeadChangeStackBuffer(5)
|
||||
require.Nil(t, hb.Push(&api.HeadChange{Type: "1"}))
|
||||
|
Loading…
Reference in New Issue
Block a user