feat: Add additional test annotations (#8272)
* Annotate api,proxy_util,blockstore_badger, policy tests * Annotate splitstore: bsbadger / markset * Annotate splitstore feature * Annotate union/timed blockstore tests * Annotate openrpc, diff_adt tests * Annotate error,drand,events tests * Annotate predicates_test * Fix annotations * Annotate tscache, gen tests * Annotate fundmanager test * Annotate repub and selection tests * Annotate statetree_test * Annotate forks_test * Annotate searchwait_test.go * Fix duplicated @@ symbols * Annotate chain stmgr/store tests * Annotate more (types) tests * More tests annotated * Annotate conformance chaos actor tests * Annotate more integration tests * Annotate journal system tests * Annotate more tests. * Annotate gas,head buffer behaviors * Fix markset annotations * doc: test annotations for the markets dagstore wrapper * Annotate miner_api test in dagstore * Annotate more test files * Remove bad annotations from fsrepo * Annotate wdpost system * Remove bad annotations * Renamce "conformance" to "chaos_actor" tests * doc: stm annotations for blockheader & election proof tests * Annotate remaining "A" tests * annotate: stm for error_test * memrepo_test.go * Annotate "b" file tests * message_test.go * doc: stm annotate for fsrepo_test * Annotate "c" file tests * Annotate "D" test files * message_test.go * doc: stm annotate for chain, node/config & client * docs: stm annotate node_test * Annotate u,v,wl tests * doc: stm annotations for various test files * Annotate "T" test files * doc: stm annotate for proxy_util_test & policy_test * doc: stm annotate for various tests * doc: final few stm annotations * Add mempool unit tests * Add two more memPool Add tests * Update submodules * Add check function tests * Add stm annotations, refactor test helper * Annotate api,proxy_util,blockstore_badger, policy tests * Annotate splitstore: bsbadger / markset solving merge conflicts * Annotate splitstore feature * Annotate union/timed blockstore tests * Annotate openrpc, diff_adt tests * Annotate error,drand,events tests * Annotate predicates_test * Fix annotations * Annotate tscache, gen tests * Annotate fundmanager test * Annotate statetree_test * Annotate forks_test * Annotate searchwait_test.go * Fix duplicated @@ symbols * Annotate chain stmgr/store tests * Annotate more (types) tests * More tests annotated * Annotate conformance chaos actor tests * Annotate more integration tests * Annotate journal system tests * Annotate more tests. * Annotate gas,head buffer behaviors solve merge conflict * Fix markset annotations * Annotate miner_api test in dagstore * Annotate more test files * doc: test annotations for the markets dagstore wrapper * Annotate wdpost system * Renamce "conformance" to "chaos_actor" tests * Annotate remaining "A" tests * doc: stm annotations for blockheader & election proof tests * annotate: stm for error_test * Annotate "b" file tests * memrepo_test.go * Annotate "c" file tests * message_test.go * Annotate "D" test files * doc: stm annotate for fsrepo_test * Annotate u,v,wl tests * message_test.go * doc: stm annotate for chain, node/config & client * docs: stm annotate node_test * Annotate "T" test files * doc: stm annotations for various test files * Add mempool unit tests solve merge conflict * doc: stm annotate for proxy_util_test & policy_test * doc: stm annotate for various tests * doc: final few stm annotations * Add two more memPool Add tests * Update submodules * Add check function tests solve conflict * Add stm annotations, refactor test helper solve merge conflict * Change CLI test kinds to "unit" * Fix double merged test * Fix ccupgrade_test merge * Fix lint issues * Add stm annotation to types_Test * Test vectors submodule * Add file annotation to burn_test Co-authored-by: Nikola Divic <divicnikola@gmail.com> Co-authored-by: TheMenko <themenkoprojects@gmail.com>
This commit is contained in:
parent
159da73fcd
commit
79453663b3
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -26,6 +27,7 @@ func goCmd() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDoesntDependOnFFI(t *testing.T) {
|
func TestDoesntDependOnFFI(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_FFI_DEPENDENCE_001
|
||||||
deps, err := exec.Command(goCmd(), "list", "-deps", "github.com/filecoin-project/lotus/api").Output()
|
deps, err := exec.Command(goCmd(), "list", "-deps", "github.com/filecoin-project/lotus/api").Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -38,6 +40,7 @@ func TestDoesntDependOnFFI(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDoesntDependOnBuild(t *testing.T) {
|
func TestDoesntDependOnBuild(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_FFI_DEPENDENCE_002
|
||||||
deps, err := exec.Command(goCmd(), "list", "-deps", "github.com/filecoin-project/lotus/api").Output()
|
deps, err := exec.Command(goCmd(), "list", "-deps", "github.com/filecoin-project/lotus/api").Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -50,6 +53,7 @@ func TestDoesntDependOnBuild(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestReturnTypes(t *testing.T) {
|
func TestReturnTypes(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_001
|
||||||
errType := reflect.TypeOf(new(error)).Elem()
|
errType := reflect.TypeOf(new(error)).Elem()
|
||||||
bareIface := reflect.TypeOf(new(interface{})).Elem()
|
bareIface := reflect.TypeOf(new(interface{})).Elem()
|
||||||
jmarsh := reflect.TypeOf(new(json.Marshaler)).Elem()
|
jmarsh := reflect.TypeOf(new(json.Marshaler)).Elem()
|
||||||
@ -115,6 +119,7 @@ func TestReturnTypes(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPermTags(t *testing.T) {
|
func TestPermTags(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_PERM_TAGS_001
|
||||||
_ = PermissionedFullAPI(&FullNodeStruct{})
|
_ = PermissionedFullAPI(&FullNodeStruct{})
|
||||||
_ = PermissionedStorMinerAPI(&StorageMinerStruct{})
|
_ = PermissionedStorMinerAPI(&StorageMinerStruct{})
|
||||||
_ = PermissionedWorkerAPI(&WorkerStruct{})
|
_ = PermissionedWorkerAPI(&WorkerStruct{})
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -29,6 +30,7 @@ type StrC struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestGetInternalStructs(t *testing.T) {
|
func TestGetInternalStructs(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_API_STRUCTS_001
|
||||||
var proxy StrA
|
var proxy StrA
|
||||||
|
|
||||||
sts := GetInternalStructs(&proxy)
|
sts := GetInternalStructs(&proxy)
|
||||||
@ -44,6 +46,7 @@ func TestGetInternalStructs(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestNestedInternalStructs(t *testing.T) {
|
func TestNestedInternalStructs(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_API_STRUCTS_001
|
||||||
var proxy StrC
|
var proxy StrC
|
||||||
|
|
||||||
// check that only the top-level internal struct gets picked up
|
// check that only the top-level internal struct gets picked up
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package badgerbs
|
package badgerbs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -20,6 +21,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestBadgerBlockstore(t *testing.T) {
|
func TestBadgerBlockstore(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
(&Suite{
|
(&Suite{
|
||||||
NewBlockstore: newBlockstore(DefaultOptions),
|
NewBlockstore: newBlockstore(DefaultOptions),
|
||||||
OpenBlockstore: openBlockstore(DefaultOptions),
|
OpenBlockstore: openBlockstore(DefaultOptions),
|
||||||
@ -38,6 +41,8 @@ func TestBadgerBlockstore(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestStorageKey(t *testing.T) {
|
func TestStorageKey(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_STORAGE_KEY_001
|
||||||
bs, _ := newBlockstore(DefaultOptions)(t)
|
bs, _ := newBlockstore(DefaultOptions)(t)
|
||||||
bbs := bs.(*Blockstore)
|
bbs := bs.(*Blockstore)
|
||||||
defer bbs.Close() //nolint:errcheck
|
defer bbs.Close() //nolint:errcheck
|
||||||
@ -265,10 +270,16 @@ func testMove(t *testing.T, optsF func(string) Options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMoveNoPrefix(t *testing.T) {
|
func TestMoveNoPrefix(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_DELETE_001, @SPLITSTORE_BADGER_COLLECT_GARBAGE_001
|
||||||
testMove(t, DefaultOptions)
|
testMove(t, DefaultOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMoveWithPrefix(t *testing.T) {
|
func TestMoveWithPrefix(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_DELETE_001, @SPLITSTORE_BADGER_COLLECT_GARBAGE_001
|
||||||
testMove(t, func(path string) Options {
|
testMove(t, func(path string) Options {
|
||||||
opts := DefaultOptions(path)
|
opts := DefaultOptions(path)
|
||||||
opts.Prefix = "/prefixed/"
|
opts.Prefix = "/prefixed/"
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package badgerbs
|
package badgerbs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -44,6 +45,8 @@ func (s *Suite) RunTests(t *testing.T, prefix string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestGetWhenKeyNotPresent(t *testing.T) {
|
func (s *Suite) TestGetWhenKeyNotPresent(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
@ -57,6 +60,8 @@ func (s *Suite) TestGetWhenKeyNotPresent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestGetWhenKeyIsNil(t *testing.T) {
|
func (s *Suite) TestGetWhenKeyIsNil(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
@ -68,6 +73,9 @@ func (s *Suite) TestGetWhenKeyIsNil(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestPutThenGetBlock(t *testing.T) {
|
func (s *Suite) TestPutThenGetBlock(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
@ -85,6 +93,8 @@ func (s *Suite) TestPutThenGetBlock(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestHas(t *testing.T) {
|
func (s *Suite) TestHas(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_HAS_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
@ -106,6 +116,9 @@ func (s *Suite) TestHas(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestCidv0v1(t *testing.T) {
|
func (s *Suite) TestCidv0v1(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
@ -123,6 +136,9 @@ func (s *Suite) TestCidv0v1(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestPutThenGetSizeBlock(t *testing.T) {
|
func (s *Suite) TestPutThenGetSizeBlock(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_SIZE_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
@ -154,6 +170,8 @@ func (s *Suite) TestPutThenGetSizeBlock(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestAllKeysSimple(t *testing.T) {
|
func (s *Suite) TestAllKeysSimple(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
defer func() { require.NoError(t, c.Close()) }()
|
defer func() { require.NoError(t, c.Close()) }()
|
||||||
@ -170,6 +188,9 @@ func (s *Suite) TestAllKeysSimple(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestAllKeysRespectsContext(t *testing.T) {
|
func (s *Suite) TestAllKeysRespectsContext(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_ALL_KEYS_CHAN_001
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
defer func() { require.NoError(t, c.Close()) }()
|
defer func() { require.NoError(t, c.Close()) }()
|
||||||
@ -200,6 +221,7 @@ func (s *Suite) TestAllKeysRespectsContext(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestDoubleClose(t *testing.T) {
|
func (s *Suite) TestDoubleClose(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
c, ok := bs.(io.Closer)
|
c, ok := bs.(io.Closer)
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -210,6 +232,9 @@ func (s *Suite) TestDoubleClose(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestReopenPutGet(t *testing.T) {
|
func (s *Suite) TestReopenPutGet(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, path := s.NewBlockstore(t)
|
bs, path := s.NewBlockstore(t)
|
||||||
c, ok := bs.(io.Closer)
|
c, ok := bs.(io.Closer)
|
||||||
@ -236,6 +261,10 @@ func (s *Suite) TestReopenPutGet(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestPutMany(t *testing.T) {
|
func (s *Suite) TestPutMany(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_OPEN_001, @SPLITSTORE_BADGER_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_HAS_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_GET_001, @SPLITSTORE_BADGER_PUT_MANY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_ALL_KEYS_CHAN_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
@ -268,6 +297,11 @@ func (s *Suite) TestPutMany(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Suite) TestDelete(t *testing.T) {
|
func (s *Suite) TestDelete(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_001, @SPLITSTORE_BADGER_POOLED_STORAGE_KEY_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_DELETE_001, @SPLITSTORE_BADGER_POOLED_STORAGE_HAS_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_ALL_KEYS_CHAN_001, @SPLITSTORE_BADGER_HAS_001
|
||||||
|
//stm: @SPLITSTORE_BADGER_PUT_MANY_001
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs, _ := s.NewBlockstore(t)
|
bs, _ := s.NewBlockstore(t)
|
||||||
if c, ok := bs.(io.Closer); ok {
|
if c, ok := bs.(io.Closer); ok {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package splitstore
|
package splitstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -10,6 +11,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestMapMarkSet(t *testing.T) {
|
func TestMapMarkSet(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_MARKSET_CREATE_001, @SPLITSTORE_MARKSET_HAS_001, @@SPLITSTORE_MARKSET_MARK_001
|
||||||
|
//stm: @SPLITSTORE_MARKSET_CLOSE_001, @SPLITSTORE_MARKSET_CREATE_VISITOR_001
|
||||||
testMarkSet(t, "map")
|
testMarkSet(t, "map")
|
||||||
testMarkSetRecovery(t, "map")
|
testMarkSetRecovery(t, "map")
|
||||||
testMarkSetMarkMany(t, "map")
|
testMarkSetMarkMany(t, "map")
|
||||||
@ -18,6 +21,8 @@ func TestMapMarkSet(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestBadgerMarkSet(t *testing.T) {
|
func TestBadgerMarkSet(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_MARKSET_CREATE_001, @SPLITSTORE_MARKSET_HAS_001, @@SPLITSTORE_MARKSET_MARK_001
|
||||||
|
//stm: @SPLITSTORE_MARKSET_CLOSE_001, @SPLITSTORE_MARKSET_CREATE_VISITOR_001
|
||||||
bs := badgerMarkSetBatchSize
|
bs := badgerMarkSetBatchSize
|
||||||
badgerMarkSetBatchSize = 1
|
badgerMarkSetBatchSize = 1
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
@ -46,6 +51,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
|||||||
}
|
}
|
||||||
defer env.Close() //nolint:errcheck
|
defer env.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
// stm: @SPLITSTORE_MARKSET_CREATE_001
|
||||||
hotSet, err := env.New("hot", 0)
|
hotSet, err := env.New("hot", 0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -65,6 +71,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
|||||||
return cid.NewCidV1(cid.Raw, h)
|
return cid.NewCidV1(cid.Raw, h)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// stm: @SPLITSTORE_MARKSET_HAS_001
|
||||||
mustHave := func(s MarkSet, cid cid.Cid) {
|
mustHave := func(s MarkSet, cid cid.Cid) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
has, err := s.Has(cid)
|
has, err := s.Has(cid)
|
||||||
@ -94,6 +101,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
|||||||
k3 := makeCid("c")
|
k3 := makeCid("c")
|
||||||
k4 := makeCid("d")
|
k4 := makeCid("d")
|
||||||
|
|
||||||
|
// stm: @SPLITSTORE_MARKSET_MARK_001
|
||||||
hotSet.Mark(k1) //nolint
|
hotSet.Mark(k1) //nolint
|
||||||
hotSet.Mark(k2) //nolint
|
hotSet.Mark(k2) //nolint
|
||||||
coldSet.Mark(k3) //nolint
|
coldSet.Mark(k3) //nolint
|
||||||
@ -144,6 +152,7 @@ func testMarkSet(t *testing.T, lsType string) {
|
|||||||
mustNotHave(coldSet, k3)
|
mustNotHave(coldSet, k3)
|
||||||
mustNotHave(coldSet, k4)
|
mustNotHave(coldSet, k4)
|
||||||
|
|
||||||
|
//stm: @SPLITSTORE_MARKSET_CLOSE_001
|
||||||
err = hotSet.Close()
|
err = hotSet.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -171,6 +180,7 @@ func testMarkSetVisitor(t *testing.T, lsType string) {
|
|||||||
}
|
}
|
||||||
defer env.Close() //nolint:errcheck
|
defer env.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
//stm: @SPLITSTORE_MARKSET_CREATE_VISITOR_001
|
||||||
visitor, err := env.New("test", 0)
|
visitor, err := env.New("test", 0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package splitstore
|
package splitstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -228,10 +229,16 @@ func testSplitStore(t *testing.T, cfg *Config) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSplitStoreCompaction(t *testing.T) {
|
func TestSplitStoreCompaction(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_OPEN_001, @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_PUT_001, @SPLITSTORE_SPLITSTORE_ADD_PROTECTOR_001
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||||
testSplitStore(t, &Config{MarkSetType: "map"})
|
testSplitStore(t, &Config{MarkSetType: "map"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSplitStoreCompactionWithBadger(t *testing.T) {
|
func TestSplitStoreCompactionWithBadger(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_OPEN_001, @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_PUT_001, @SPLITSTORE_SPLITSTORE_ADD_PROTECTOR_001
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||||
bs := badgerMarkSetBatchSize
|
bs := badgerMarkSetBatchSize
|
||||||
badgerMarkSetBatchSize = 1
|
badgerMarkSetBatchSize = 1
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
@ -241,6 +248,9 @@ func TestSplitStoreCompactionWithBadger(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSplitStoreSuppressCompactionNearUpgrade(t *testing.T) {
|
func TestSplitStoreSuppressCompactionNearUpgrade(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_OPEN_001, @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_PUT_001, @SPLITSTORE_SPLITSTORE_ADD_PROTECTOR_001
|
||||||
|
//stm: @SPLITSTORE_SPLITSTORE_CLOSE_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
chain := &mockChain{t: t}
|
chain := &mockChain{t: t}
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package blockstore
|
package blockstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -13,6 +14,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestTimedCacheBlockstoreSimple(t *testing.T) {
|
func TestTimedCacheBlockstoreSimple(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_TIMED_BLOCKSTORE_START_001
|
||||||
|
//stm: @SPLITSTORE_TIMED_BLOCKSTORE_PUT_001, @SPLITSTORE_TIMED_BLOCKSTORE_HAS_001, @SPLITSTORE_TIMED_BLOCKSTORE_GET_001
|
||||||
|
//stm: @SPLITSTORE_TIMED_BLOCKSTORE_ALL_KEYS_CHAN_001
|
||||||
tc := NewTimedCacheBlockstore(10 * time.Millisecond)
|
tc := NewTimedCacheBlockstore(10 * time.Millisecond)
|
||||||
mClock := clock.NewMock()
|
mClock := clock.NewMock()
|
||||||
mClock.Set(time.Now())
|
mClock.Set(time.Now())
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package blockstore
|
package blockstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -15,6 +16,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestUnionBlockstore_Get(t *testing.T) {
|
func TestUnionBlockstore_Get(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_UNION_BLOCKSTORE_GET_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
m1 := NewMemory()
|
m1 := NewMemory()
|
||||||
m2 := NewMemory()
|
m2 := NewMemory()
|
||||||
@ -34,6 +36,9 @@ func TestUnionBlockstore_Get(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestUnionBlockstore_Put_PutMany_Delete_AllKeysChan(t *testing.T) {
|
func TestUnionBlockstore_Put_PutMany_Delete_AllKeysChan(t *testing.T) {
|
||||||
|
//stm: @SPLITSTORE_UNION_BLOCKSTORE_PUT_001, @SPLITSTORE_UNION_BLOCKSTORE_HAS_001
|
||||||
|
//stm: @SPLITSTORE_UNION_BLOCKSTORE_PUT_MANY_001, @SPLITSTORE_UNION_BLOCKSTORE_DELETE_001
|
||||||
|
//stm: @SPLITSTORE_UNION_BLOCKSTORE_ALL_KEYS_CHAN_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
m1 := NewMemory()
|
m1 := NewMemory()
|
||||||
m2 := NewMemory()
|
m2 := NewMemory()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package build
|
package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -7,6 +8,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestOpenRPCDiscoverJSON_Version(t *testing.T) {
|
func TestOpenRPCDiscoverJSON_Version(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_OPENRPC_VERSION_001
|
||||||
// openRPCDocVersion is the current OpenRPC version of the API docs.
|
// openRPCDocVersion is the current OpenRPC version of the API docs.
|
||||||
openRPCDocVersion := "1.2.6"
|
openRPCDocVersion := "1.2.6"
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package adt
|
package adt
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -44,6 +45,7 @@ func TestDiffAdtArray(t *testing.T) {
|
|||||||
|
|
||||||
changes := new(TestDiffArray)
|
changes := new(TestDiffArray)
|
||||||
|
|
||||||
|
//stm: @CHAIN_ADT_ARRAY_DIFF_001
|
||||||
assert.NoError(t, DiffAdtArray(arrA, arrB, changes))
|
assert.NoError(t, DiffAdtArray(arrA, arrB, changes))
|
||||||
assert.NotNil(t, changes)
|
assert.NotNil(t, changes)
|
||||||
|
|
||||||
@ -98,6 +100,7 @@ func TestDiffAdtMap(t *testing.T) {
|
|||||||
|
|
||||||
changes := new(TestDiffMap)
|
changes := new(TestDiffMap)
|
||||||
|
|
||||||
|
//stm: @CHAIN_ADT_MAP_DIFF_001
|
||||||
assert.NoError(t, DiffAdtMap(mapA, mapB, changes))
|
assert.NoError(t, DiffAdtMap(mapA, mapB, changes))
|
||||||
assert.NotNil(t, changes)
|
assert.NotNil(t, changes)
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package aerrors_test
|
package aerrors_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -11,6 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestFatalError(t *testing.T) {
|
func TestFatalError(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_ACTOR_ERRORS_001
|
||||||
e1 := xerrors.New("out of disk space")
|
e1 := xerrors.New("out of disk space")
|
||||||
e2 := xerrors.Errorf("could not put node: %w", e1)
|
e2 := xerrors.Errorf("could not put node: %w", e1)
|
||||||
e3 := xerrors.Errorf("could not save head: %w", e2)
|
e3 := xerrors.Errorf("could not save head: %w", e2)
|
||||||
@ -24,6 +26,7 @@ func TestFatalError(t *testing.T) {
|
|||||||
assert.True(t, IsFatal(aw4), "should be fatal")
|
assert.True(t, IsFatal(aw4), "should be fatal")
|
||||||
}
|
}
|
||||||
func TestAbsorbeError(t *testing.T) {
|
func TestAbsorbeError(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_ACTOR_ERRORS_001
|
||||||
e1 := xerrors.New("EOF")
|
e1 := xerrors.New("EOF")
|
||||||
e2 := xerrors.Errorf("could not decode: %w", e1)
|
e2 := xerrors.Errorf("could not decode: %w", e1)
|
||||||
ae := Absorb(e2, 35, "failed to decode CBOR")
|
ae := Absorb(e2, 35, "failed to decode CBOR")
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package policy
|
package policy
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -22,6 +23,7 @@ func TestSupportedProofTypes(t *testing.T) {
|
|||||||
for t := range miner0.SupportedProofTypes {
|
for t := range miner0.SupportedProofTypes {
|
||||||
oldTypes = append(oldTypes, t)
|
oldTypes = append(oldTypes, t)
|
||||||
}
|
}
|
||||||
|
//stm: @BLOCKCHAIN_POLICY_SET_MAX_SUPPORTED_PROOF_TYPES_001
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
SetSupportedProofTypes(oldTypes...)
|
SetSupportedProofTypes(oldTypes...)
|
||||||
})
|
})
|
||||||
@ -33,6 +35,7 @@ func TestSupportedProofTypes(t *testing.T) {
|
|||||||
abi.RegisteredSealProof_StackedDrg2KiBV1: {},
|
abi.RegisteredSealProof_StackedDrg2KiBV1: {},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
//stm: @BLOCKCHAIN_POLICY_ADD_MAX_SUPPORTED_PROOF_TYPES_001
|
||||||
AddSupportedProofTypes(abi.RegisteredSealProof_StackedDrg8MiBV1)
|
AddSupportedProofTypes(abi.RegisteredSealProof_StackedDrg8MiBV1)
|
||||||
require.EqualValues(t,
|
require.EqualValues(t,
|
||||||
miner0.SupportedProofTypes,
|
miner0.SupportedProofTypes,
|
||||||
@ -45,6 +48,7 @@ func TestSupportedProofTypes(t *testing.T) {
|
|||||||
|
|
||||||
// Tests assumptions about policies being the same between actor versions.
|
// Tests assumptions about policies being the same between actor versions.
|
||||||
func TestAssumptions(t *testing.T) {
|
func TestAssumptions(t *testing.T) {
|
||||||
|
//stm: @BLOCKCHAIN_POLICY_ASSUMPTIONS_001
|
||||||
require.EqualValues(t, miner0.SupportedProofTypes, miner2.PreCommitSealProofTypesV0)
|
require.EqualValues(t, miner0.SupportedProofTypes, miner2.PreCommitSealProofTypesV0)
|
||||||
require.Equal(t, miner0.PreCommitChallengeDelay, miner2.PreCommitChallengeDelay)
|
require.Equal(t, miner0.PreCommitChallengeDelay, miner2.PreCommitChallengeDelay)
|
||||||
require.Equal(t, miner0.MaxSectorExpirationExtension, miner2.MaxSectorExpirationExtension)
|
require.Equal(t, miner0.MaxSectorExpirationExtension, miner2.MaxSectorExpirationExtension)
|
||||||
@ -58,6 +62,7 @@ func TestAssumptions(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPartitionSizes(t *testing.T) {
|
func TestPartitionSizes(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_PARTITION_SIZES_001
|
||||||
for _, p := range abi.SealProofInfos {
|
for _, p := range abi.SealProofInfos {
|
||||||
sizeNew, err := builtin2.PoStProofWindowPoStPartitionSectors(p.WindowPoStProof)
|
sizeNew, err := builtin2.PoStProofWindowPoStPartitionSectors(p.WindowPoStProof)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -71,6 +76,7 @@ func TestPartitionSizes(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPoStSize(t *testing.T) {
|
func TestPoStSize(t *testing.T) {
|
||||||
|
//stm: @BLOCKCHAIN_POLICY_GET_MAX_POST_PARTITIONS_001
|
||||||
v12PoStSize, err := GetMaxPoStPartitions(network.Version12, abi.RegisteredPoStProof_StackedDrgWindow64GiBV1)
|
v12PoStSize, err := GetMaxPoStPartitions(network.Version12, abi.RegisteredPoStProof_StackedDrgWindow64GiBV1)
|
||||||
require.Equal(t, 4, v12PoStSize)
|
require.Equal(t, 4, v12PoStSize)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//stm: ignore
|
||||||
|
//Only tests external library behavior, therefore it should not be annotated
|
||||||
package drand
|
package drand
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package events
|
package events
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -358,6 +359,7 @@ func (fcs *fakeCS) advance(rev, app, drop int, msgs map[int]cid.Cid, nulls ...in
|
|||||||
var _ EventAPI = &fakeCS{}
|
var _ EventAPI = &fakeCS{}
|
||||||
|
|
||||||
func TestAt(t *testing.T) {
|
func TestAt(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -418,6 +420,7 @@ func TestAt(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtNullTrigger(t *testing.T) {
|
func TestAtNullTrigger(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -447,6 +450,7 @@ func TestAtNullTrigger(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtNullConf(t *testing.T) {
|
func TestAtNullConf(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
@ -485,6 +489,7 @@ func TestAtNullConf(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtStart(t *testing.T) {
|
func TestAtStart(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -515,6 +520,7 @@ func TestAtStart(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtStartConfidence(t *testing.T) {
|
func TestAtStartConfidence(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -541,6 +547,7 @@ func TestAtStartConfidence(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtChained(t *testing.T) {
|
func TestAtChained(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -571,6 +578,7 @@ func TestAtChained(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtChainedConfidence(t *testing.T) {
|
func TestAtChainedConfidence(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -601,6 +609,7 @@ func TestAtChainedConfidence(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAtChainedConfidenceNull(t *testing.T) {
|
func TestAtChainedConfidenceNull(t *testing.T) {
|
||||||
|
//stm: @EVENTS_HEIGHT_CHAIN_AT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -632,6 +641,7 @@ func matchAddrMethod(to address.Address, m abi.MethodNum) func(msg *types.Messag
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCalled(t *testing.T) {
|
func TestCalled(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -837,6 +847,7 @@ func TestCalled(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCalledTimeout(t *testing.T) {
|
func TestCalledTimeout(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -897,6 +908,7 @@ func TestCalledTimeout(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCalledOrder(t *testing.T) {
|
func TestCalledOrder(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -953,6 +965,7 @@ func TestCalledOrder(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCalledNull(t *testing.T) {
|
func TestCalledNull(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -1011,6 +1024,7 @@ func TestCalledNull(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestRemoveTriggersOnMessage(t *testing.T) {
|
func TestRemoveTriggersOnMessage(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -1094,6 +1108,7 @@ type testStateChange struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestStateChanged(t *testing.T) {
|
func TestStateChanged(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -1179,6 +1194,7 @@ func TestStateChanged(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestStateChangedRevert(t *testing.T) {
|
func TestStateChangedRevert(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -1255,6 +1271,7 @@ func TestStateChangedRevert(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestStateChangedTimeout(t *testing.T) {
|
func TestStateChangedTimeout(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
timeoutHeight := abi.ChainEpoch(20)
|
timeoutHeight := abi.ChainEpoch(20)
|
||||||
confidence := 3
|
confidence := 3
|
||||||
|
|
||||||
@ -1332,6 +1349,7 @@ func TestStateChangedTimeout(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCalledMultiplePerEpoch(t *testing.T) {
|
func TestCalledMultiplePerEpoch(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
events, err := NewEvents(context.Background(), fcs)
|
events, err := NewEvents(context.Background(), fcs)
|
||||||
@ -1384,6 +1402,7 @@ func TestCalledMultiplePerEpoch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCachedSameBlock(t *testing.T) {
|
func TestCachedSameBlock(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
fcs := newFakeCS(t)
|
fcs := newFakeCS(t)
|
||||||
|
|
||||||
_, err := NewEvents(context.Background(), fcs)
|
_, err := NewEvents(context.Background(), fcs)
|
||||||
@ -1418,6 +1437,7 @@ func (t *testObserver) Revert(_ context.Context, from, to *types.TipSet) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestReconnect(t *testing.T) {
|
func TestReconnect(t *testing.T) {
|
||||||
|
//stm: @EVENTS_EVENTS_CALLED_001, @EVENTS_HEIGHT_REVERT_001
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package state
|
package state
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -35,6 +36,12 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMarketPredicates(t *testing.T) {
|
func TestMarketPredicates(t *testing.T) {
|
||||||
|
//stm: @EVENTS_PREDICATES_ON_ACTOR_STATE_CHANGED_001, @EVENTS_PREDICATES_DEAL_STATE_CHANGED_001
|
||||||
|
//stm: @EVENTS_PREDICATES_DEAL_CHANGED_FOR_IDS
|
||||||
|
|
||||||
|
//stm: @EVENTS_PREDICATES_ON_BALANCE_CHANGED_001, @EVENTS_PREDICATES_BALANCE_CHANGED_FOR_ADDRESS_001
|
||||||
|
//stm: @EVENTS_PREDICATES_ON_DEAL_PROPOSAL_CHANGED_001, @EVENTS_PREDICATES_PROPOSAL_AMT_CHANGED_001
|
||||||
|
//stm: @EVENTS_PREDICATES_DEAL_STATE_CHANGED_001, @EVENTS_PREDICATES_DEAL_AMT_CHANGED_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs := bstore.NewMemorySync()
|
bs := bstore.NewMemorySync()
|
||||||
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))
|
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))
|
||||||
@ -333,6 +340,8 @@ func TestMarketPredicates(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMinerSectorChange(t *testing.T) {
|
func TestMinerSectorChange(t *testing.T) {
|
||||||
|
//stm: @EVENTS_PREDICATES_ON_ACTOR_STATE_CHANGED_001, @EVENTS_PREDICATES_MINER_ACTOR_CHANGE_001
|
||||||
|
//stm: @EVENTS_PREDICATES_MINER_SECTOR_CHANGE_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bs := bstore.NewMemorySync()
|
bs := bstore.NewMemorySync()
|
||||||
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))
|
store := adt2.WrapStore(ctx, cbornode.NewCborStore(bs))
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package events
|
package events
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -92,6 +93,7 @@ func (h *cacheHarness) skip(n abi.ChainEpoch) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTsCache(t *testing.T) {
|
func TestTsCache(t *testing.T) {
|
||||||
|
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001, @EVENTS_CACHE_GET_001, @EVENTS_CACHE_ADD_001
|
||||||
h := newCacheharness(t)
|
h := newCacheharness(t)
|
||||||
|
|
||||||
for i := 0; i < 9000; i++ {
|
for i := 0; i < 9000; i++ {
|
||||||
@ -104,6 +106,8 @@ func TestTsCache(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTsCacheNulls(t *testing.T) {
|
func TestTsCacheNulls(t *testing.T) {
|
||||||
|
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001, @EVENTS_CACHE_GET_CHAIN_TIPSET_BEFORE_001, @EVENTS_CACHE_GET_CHAIN_TIPSET_AFTER_001
|
||||||
|
//stm: @EVENTS_CACHE_GET_001, @EVENTS_CACHE_ADD_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
h := newCacheharness(t)
|
h := newCacheharness(t)
|
||||||
|
|
||||||
@ -182,6 +186,7 @@ func (tc *tsCacheAPIStorageCallCounter) ChainGetTipSet(ctx context.Context, tsk
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTsCacheEmpty(t *testing.T) {
|
func TestTsCacheEmpty(t *testing.T) {
|
||||||
|
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001
|
||||||
// Calling best on an empty cache should just call out to the chain API
|
// Calling best on an empty cache should just call out to the chain API
|
||||||
callCounter := &tsCacheAPIStorageCallCounter{t: t}
|
callCounter := &tsCacheAPIStorageCallCounter{t: t}
|
||||||
tsc := newTSCache(callCounter, 50)
|
tsc := newTSCache(callCounter, 50)
|
||||||
@ -191,6 +196,7 @@ func TestTsCacheEmpty(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTsCacheSkip(t *testing.T) {
|
func TestTsCacheSkip(t *testing.T) {
|
||||||
|
//stm: @EVENTS_CACHE_GET_CHAIN_HEAD_001, @EVENTS_CACHE_GET_001, @EVENTS_CACHE_ADD_001
|
||||||
h := newCacheharness(t)
|
h := newCacheharness(t)
|
||||||
|
|
||||||
ts, err := types.NewTipSet([]*types.BlockHeader{{
|
ts, err := types.NewTipSet([]*types.BlockHeader{{
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package gen
|
package gen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -34,6 +35,7 @@ func testGeneration(t testing.TB, n int, msgs int, sectors int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestChainGeneration(t *testing.T) {
|
func TestChainGeneration(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEW_GEN_WITH_SECTORS_001, @CHAIN_GEN_NEXT_TIPSET_001
|
||||||
t.Run("10-20-1", func(t *testing.T) { testGeneration(t, 10, 20, 1) })
|
t.Run("10-20-1", func(t *testing.T) { testGeneration(t, 10, 20, 1) })
|
||||||
t.Run("10-20-25", func(t *testing.T) { testGeneration(t, 10, 20, 25) })
|
t.Run("10-20-25", func(t *testing.T) { testGeneration(t, 10, 20, 25) })
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package market
|
package market
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -22,6 +23,7 @@ import (
|
|||||||
|
|
||||||
// TestFundManagerBasic verifies that the basic fund manager operations work
|
// TestFundManagerBasic verifies that the basic fund manager operations work
|
||||||
func TestFundManagerBasic(t *testing.T) {
|
func TestFundManagerBasic(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
@ -106,6 +108,7 @@ func TestFundManagerBasic(t *testing.T) {
|
|||||||
|
|
||||||
// TestFundManagerParallel verifies that operations can be run in parallel
|
// TestFundManagerParallel verifies that operations can be run in parallel
|
||||||
func TestFundManagerParallel(t *testing.T) {
|
func TestFundManagerParallel(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
@ -197,6 +200,7 @@ func TestFundManagerParallel(t *testing.T) {
|
|||||||
|
|
||||||
// TestFundManagerReserveByWallet verifies that reserve requests are grouped by wallet
|
// TestFundManagerReserveByWallet verifies that reserve requests are grouped by wallet
|
||||||
func TestFundManagerReserveByWallet(t *testing.T) {
|
func TestFundManagerReserveByWallet(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
@ -290,6 +294,7 @@ func TestFundManagerReserveByWallet(t *testing.T) {
|
|||||||
// TestFundManagerWithdrawal verifies that as many withdraw operations as
|
// TestFundManagerWithdrawal verifies that as many withdraw operations as
|
||||||
// possible are processed
|
// possible are processed
|
||||||
func TestFundManagerWithdrawalLimit(t *testing.T) {
|
func TestFundManagerWithdrawalLimit(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
@ -384,6 +389,7 @@ func TestFundManagerWithdrawalLimit(t *testing.T) {
|
|||||||
|
|
||||||
// TestFundManagerWithdrawByWallet verifies that withdraw requests are grouped by wallet
|
// TestFundManagerWithdrawByWallet verifies that withdraw requests are grouped by wallet
|
||||||
func TestFundManagerWithdrawByWallet(t *testing.T) {
|
func TestFundManagerWithdrawByWallet(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001, @MARKET_WITHDRAW_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
@ -493,6 +499,7 @@ func TestFundManagerWithdrawByWallet(t *testing.T) {
|
|||||||
// TestFundManagerRestart verifies that waiting for incomplete requests resumes
|
// TestFundManagerRestart verifies that waiting for incomplete requests resumes
|
||||||
// on restart
|
// on restart
|
||||||
func TestFundManagerRestart(t *testing.T) {
|
func TestFundManagerRestart(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
@ -559,6 +566,7 @@ func TestFundManagerRestart(t *testing.T) {
|
|||||||
// 3. Deal B completes, reducing addr1 by 7: reserved 12 available 12 -> 5
|
// 3. Deal B completes, reducing addr1 by 7: reserved 12 available 12 -> 5
|
||||||
// 4. Deal A releases 5 from addr1: reserved 12 -> 7 available 5
|
// 4. Deal A releases 5 from addr1: reserved 12 -> 7 available 5
|
||||||
func TestFundManagerReleaseAfterPublish(t *testing.T) {
|
func TestFundManagerReleaseAfterPublish(t *testing.T) {
|
||||||
|
//stm: @MARKET_RESERVE_FUNDS_001, @MARKET_RELEASE_FUNDS_001
|
||||||
s := setup(t)
|
s := setup(t)
|
||||||
defer s.fm.Stop()
|
defer s.fm.Stop()
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package messagepool
|
package messagepool
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -8,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestBlockProbability(t *testing.T) {
|
func TestBlockProbability(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_BLOCK_PROB_001
|
||||||
mp := &MessagePool{}
|
mp := &MessagePool{}
|
||||||
bp := mp.blockProbabilities(1 - 0.15)
|
bp := mp.blockProbabilities(1 - 0.15)
|
||||||
t.Logf("%+v\n", bp)
|
t.Logf("%+v\n", bp)
|
||||||
@ -20,6 +22,7 @@ func TestBlockProbability(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestWinnerProba(t *testing.T) {
|
func TestWinnerProba(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_BLOCK_PROB_002
|
||||||
rand.Seed(time.Now().UnixNano())
|
rand.Seed(time.Now().UnixNano())
|
||||||
const N = 1000000
|
const N = 1000000
|
||||||
winnerProba := noWinnersProb()
|
winnerProba := noWinnersProb()
|
||||||
|
@ -854,7 +854,6 @@ func TestMessageValueTooHigh(t *testing.T) {
|
|||||||
Message: *msg,
|
Message: *msg,
|
||||||
Signature: *sig,
|
Signature: *sig,
|
||||||
}
|
}
|
||||||
|
|
||||||
err = mp.Add(context.TODO(), sm)
|
err = mp.Add(context.TODO(), sm)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
@ -901,8 +900,7 @@ func TestMessageSignatureInvalid(t *testing.T) {
|
|||||||
}
|
}
|
||||||
err = mp.Add(context.TODO(), sm)
|
err = mp.Add(context.TODO(), sm)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
// assert.Contains(t, err.Error(), "invalid signature length")
|
assert.Contains(t, err.Error(), "invalid signature length")
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -926,14 +924,29 @@ func TestAddMessageTwice(t *testing.T) {
|
|||||||
to := mock.Address(1001)
|
to := mock.Address(1001)
|
||||||
|
|
||||||
{
|
{
|
||||||
// create a valid messages
|
msg := &types.Message{
|
||||||
sm := makeTestMessage(w, from, to, 0, 50_000_000, minimumBaseFee.Uint64())
|
To: to,
|
||||||
|
From: from,
|
||||||
|
Value: types.NewInt(1),
|
||||||
|
Nonce: 0,
|
||||||
|
GasLimit: 50000000,
|
||||||
|
GasFeeCap: types.NewInt(minimumBaseFee.Uint64()),
|
||||||
|
GasPremium: types.NewInt(1),
|
||||||
|
Params: make([]byte, 32<<10),
|
||||||
|
}
|
||||||
|
|
||||||
|
sig, err := w.WalletSign(context.TODO(), from, msg.Cid().Bytes(), api.MsgMeta{})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
sm := &types.SignedMessage{
|
||||||
|
Message: *msg,
|
||||||
|
Signature: *sig,
|
||||||
|
}
|
||||||
mustAdd(t, mp, sm)
|
mustAdd(t, mp, sm)
|
||||||
|
|
||||||
// try to add it twice
|
|
||||||
err = mp.Add(context.TODO(), sm)
|
err = mp.Add(context.TODO(), sm)
|
||||||
// assert.Contains(t, err.Error(), "with nonce 0 already in mpool")
|
assert.Contains(t, err.Error(), "with nonce 0 already in mpool")
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -963,8 +976,7 @@ func TestAddMessageTwiceNonceGap(t *testing.T) {
|
|||||||
|
|
||||||
// then try to add message again
|
// then try to add message again
|
||||||
err = mp.Add(context.TODO(), sm)
|
err = mp.Add(context.TODO(), sm)
|
||||||
// assert.Contains(t, err.Error(), "unfulfilled nonce gap")
|
assert.Contains(t, err.Error(), "unfulfilled nonce gap")
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package state
|
package state
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -18,6 +19,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func BenchmarkStateTreeSet(b *testing.B) {
|
func BenchmarkStateTreeSet(b *testing.B) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
st, err := NewStateTree(cst, types.StateTreeVersion1)
|
st, err := NewStateTree(cst, types.StateTreeVersion1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -45,6 +47,7 @@ func BenchmarkStateTreeSet(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkStateTreeSetFlush(b *testing.B) {
|
func BenchmarkStateTreeSetFlush(b *testing.B) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -80,6 +83,8 @@ func BenchmarkStateTreeSetFlush(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestResolveCache(t *testing.T) {
|
func TestResolveCache(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||||
|
//stm: @CHAIN_STATETREE_SNAPSHOT_001, @CHAIN_STATETREE_SNAPSHOT_CLEAR_001
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -182,6 +187,8 @@ func TestResolveCache(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkStateTree10kGetActor(b *testing.B) {
|
func BenchmarkStateTree10kGetActor(b *testing.B) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||||
|
//stm: @CHAIN_STATETREE_FLUSH_001
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -229,6 +236,7 @@ func BenchmarkStateTree10kGetActor(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSetCache(t *testing.T) {
|
func TestSetCache(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
sv, err := VersionForNetwork(build.NewestNetworkVersion)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -270,6 +278,8 @@ func TestSetCache(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSnapshots(t *testing.T) {
|
func TestSnapshots(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001
|
||||||
|
//stm: @CHAIN_STATETREE_FLUSH_001, @CHAIN_STATETREE_SNAPSHOT_REVERT_001, CHAIN_STATETREE_SNAPSHOT_CLEAR_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
|
|
||||||
@ -360,6 +370,7 @@ func assertNotHas(t *testing.T, st *StateTree, addr address.Address) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestStateTreeConsistency(t *testing.T) {
|
func TestStateTreeConsistency(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STATETREE_SET_ACTOR_001, @CHAIN_STATETREE_VERSION_FOR_NETWORK_001, @CHAIN_STATETREE_FLUSH_001
|
||||||
cst := cbor.NewMemCborStore()
|
cst := cbor.NewMemCborStore()
|
||||||
|
|
||||||
// TODO: ActorUpgrade: this test tests pre actors v2
|
// TODO: ActorUpgrade: this test tests pre actors v2
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #integration
|
||||||
package stmgr_test
|
package stmgr_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -106,6 +107,9 @@ func (ta *testActor) TestMethod(rt rt2.Runtime, params *abi.EmptyValue) *abi.Emp
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestForkHeightTriggers(t *testing.T) {
|
func TestForkHeightTriggers(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STATETREE_GET_ACTOR_001, @CHAIN_STATETREE_FLUSH_001, @TOKEN_WALLET_SIGN_001
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||||
|
//stm: @CHAIN_STATE_RESOLVE_TO_KEY_ADDR_001, @CHAIN_STATE_SET_VM_CONSTRUCTOR_001
|
||||||
logging.SetAllLoggers(logging.LevelInfo)
|
logging.SetAllLoggers(logging.LevelInfo)
|
||||||
|
|
||||||
ctx := context.TODO()
|
ctx := context.TODO()
|
||||||
@ -241,6 +245,8 @@ func TestForkHeightTriggers(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestForkRefuseCall(t *testing.T) {
|
func TestForkRefuseCall(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001, @CHAIN_GEN_NEXT_TIPSET_FROM_MINERS_001
|
||||||
|
//stm: @CHAIN_STATE_RESOLVE_TO_KEY_ADDR_001, @CHAIN_STATE_SET_VM_CONSTRUCTOR_001, @CHAIN_STATE_CALL_001
|
||||||
logging.SetAllLoggers(logging.LevelInfo)
|
logging.SetAllLoggers(logging.LevelInfo)
|
||||||
|
|
||||||
for after := 0; after < 3; after++ {
|
for after := 0; after < 3; after++ {
|
||||||
@ -360,6 +366,8 @@ func testForkRefuseCall(t *testing.T, nullsBefore, nullsAfter int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestForkPreMigration(t *testing.T) {
|
func TestForkPreMigration(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001,
|
||||||
|
//stm: @CHAIN_STATE_RESOLVE_TO_KEY_ADDR_001, @CHAIN_STATE_SET_VM_CONSTRUCTOR_001
|
||||||
logging.SetAllLoggers(logging.LevelInfo)
|
logging.SetAllLoggers(logging.LevelInfo)
|
||||||
|
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package stmgr_test
|
package stmgr_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -12,6 +13,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestSearchForMessageReplacements(t *testing.T) {
|
func TestSearchForMessageReplacements(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||||
|
//stm: @CHAIN_STATE_SEARCH_MSG_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//stm: #unit
|
||||||
|
|
||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -10,6 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestBaseFee(t *testing.T) {
|
func TestBaseFee(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STORE_COMPUTE_NEXT_BASE_FEE_001
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
basefee uint64
|
basefee uint64
|
||||||
limitUsed int64
|
limitUsed int64
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package store_test
|
package store_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -10,6 +11,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestChainCheckpoint(t *testing.T) {
|
func TestChainCheckpoint(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_FROM_MINERS_001
|
||||||
|
//stm: @CHAIN_STORE_GET_TIPSET_FROM_KEY_001, @CHAIN_STORE_SET_HEAD_001, @CHAIN_STORE_GET_HEAVIEST_TIPSET_001
|
||||||
|
//stm: @CHAIN_STORE_SET_CHECKPOINT_001, @CHAIN_STORE_MAYBE_TAKE_HEAVIER_TIPSET_001, @CHAIN_STORE_REMOVE_CHECKPOINT_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -9,6 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestHeadChangeCoalescer(t *testing.T) {
|
func TestHeadChangeCoalescer(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STORE_COALESCE_HEAD_CHANGE_001
|
||||||
notif := make(chan headChange, 1)
|
notif := make(chan headChange, 1)
|
||||||
c := NewHeadChangeCoalescer(func(revert, apply []*types.TipSet) error {
|
c := NewHeadChangeCoalescer(func(revert, apply []*types.TipSet) error {
|
||||||
notif <- headChange{apply: apply, revert: revert}
|
notif <- headChange{apply: apply, revert: revert}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package store_test
|
package store_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -17,6 +18,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestIndexSeeks(t *testing.T) {
|
func TestIndexSeeks(t *testing.T) {
|
||||||
|
//stm: @CHAIN_STORE_IMPORT_001
|
||||||
|
//stm: @CHAIN_STORE_GET_TIPSET_BY_HEIGHT_001, @CHAIN_STORE_PUT_TIPSET_001, @CHAIN_STORE_SET_GENESIS_BLOCK_001
|
||||||
|
//stm: @CHAIN_STORE_CLOSE_001
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package store_test
|
package store_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -28,6 +29,8 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkGetRandomness(b *testing.B) {
|
func BenchmarkGetRandomness(b *testing.B) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||||
|
//stm: @CHAIN_STATE_GET_RANDOMNESS_FROM_TICKETS_001
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
@ -85,6 +88,8 @@ func BenchmarkGetRandomness(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestChainExportImport(t *testing.T) {
|
func TestChainExportImport(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||||
|
//stm: @CHAIN_STORE_IMPORT_001
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -120,6 +125,9 @@ func TestChainExportImport(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestChainExportImportFull(t *testing.T) {
|
func TestChainExportImportFull(t *testing.T) {
|
||||||
|
//stm: @CHAIN_GEN_NEXT_TIPSET_001
|
||||||
|
//stm: @CHAIN_STORE_IMPORT_001, @CHAIN_STORE_EXPORT_001, @CHAIN_STORE_SET_HEAD_001
|
||||||
|
//stm: @CHAIN_STORE_GET_TIPSET_BY_HEIGHT_001
|
||||||
cg, err := gen.NewGenerator()
|
cg, err := gen.NewGenerator()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package chain
|
package chain
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -78,6 +79,7 @@ func assertGetSyncOp(t *testing.T, c chan *syncOp, ts *types.TipSet) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSyncManagerEdgeCase(t *testing.T) {
|
func TestSyncManagerEdgeCase(t *testing.T) {
|
||||||
|
//stm: @CHAIN_SYNCER_SET_PEER_HEAD_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
a := mock.TipSet(mock.MkBlock(genTs, 1, 1))
|
a := mock.TipSet(mock.MkBlock(genTs, 1, 1))
|
||||||
@ -161,6 +163,7 @@ func TestSyncManagerEdgeCase(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSyncManager(t *testing.T) {
|
func TestSyncManager(t *testing.T) {
|
||||||
|
//stm: @CHAIN_SYNCER_SET_PEER_HEAD_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
a := mock.TipSet(mock.MkBlock(genTs, 1, 1))
|
a := mock.TipSet(mock.MkBlock(genTs, 1, 1))
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -14,6 +15,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestBigIntSerializationRoundTrip(t *testing.T) {
|
func TestBigIntSerializationRoundTrip(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_PARSE_BIGINT_001
|
||||||
testValues := []string{
|
testValues := []string{
|
||||||
"0", "1", "10", "-10", "9999", "12345678901234567891234567890123456789012345678901234567890",
|
"0", "1", "10", "-10", "9999", "12345678901234567891234567890123456789012345678901234567890",
|
||||||
}
|
}
|
||||||
@ -42,6 +44,7 @@ func TestBigIntSerializationRoundTrip(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestFilRoundTrip(t *testing.T) {
|
func TestFilRoundTrip(t *testing.T) {
|
||||||
|
//stm: @TYPES_FIL_PARSE_001
|
||||||
testValues := []string{
|
testValues := []string{
|
||||||
"0 FIL", "1 FIL", "1.001 FIL", "100.10001 FIL", "101100 FIL", "5000.01 FIL", "5000 FIL",
|
"0 FIL", "1 FIL", "1.001 FIL", "100.10001 FIL", "101100 FIL", "5000.01 FIL", "5000 FIL",
|
||||||
}
|
}
|
||||||
@ -59,6 +62,7 @@ func TestFilRoundTrip(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSizeStr(t *testing.T) {
|
func TestSizeStr(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SIZE_BIGINT_001
|
||||||
cases := []struct {
|
cases := []struct {
|
||||||
in uint64
|
in uint64
|
||||||
out string
|
out string
|
||||||
@ -79,6 +83,7 @@ func TestSizeStr(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSizeStrUnitsSymmetry(t *testing.T) {
|
func TestSizeStrUnitsSymmetry(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SIZE_BIGINT_001
|
||||||
s := rand.NewSource(time.Now().UnixNano())
|
s := rand.NewSource(time.Now().UnixNano())
|
||||||
r := rand.New(s)
|
r := rand.New(s)
|
||||||
|
|
||||||
@ -95,6 +100,7 @@ func TestSizeStrUnitsSymmetry(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSizeStrBig(t *testing.T) {
|
func TestSizeStrBig(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SIZE_BIGINT_001
|
||||||
ZiB := big.NewInt(50000)
|
ZiB := big.NewInt(50000)
|
||||||
ZiB = ZiB.Lsh(ZiB, 70)
|
ZiB = ZiB.Lsh(ZiB, 70)
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -51,6 +52,7 @@ func testBlockHeader(t testing.TB) *BlockHeader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestBlockHeaderSerialization(t *testing.T) {
|
func TestBlockHeaderSerialization(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_BLOCK_HEADER_FROM_CBOR_001, @CHAIN_TYPES_BLOCK_HEADER_TO_CBOR_001
|
||||||
bh := testBlockHeader(t)
|
bh := testBlockHeader(t)
|
||||||
|
|
||||||
buf := new(bytes.Buffer)
|
buf := new(bytes.Buffer)
|
||||||
@ -71,6 +73,7 @@ func TestBlockHeaderSerialization(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestInteropBH(t *testing.T) {
|
func TestInteropBH(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_BLOCK_INTEROP_001
|
||||||
newAddr, err := address.NewSecp256k1Address([]byte("address0"))
|
newAddr, err := address.NewSecp256k1Address([]byte("address0"))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -11,6 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestPoissonFunction(t *testing.T) {
|
func TestPoissonFunction(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_POISSON_001
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
lambdaBase uint64
|
lambdaBase uint64
|
||||||
lambdaShift uint
|
lambdaShift uint
|
||||||
@ -47,6 +49,7 @@ func TestPoissonFunction(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestLambdaFunction(t *testing.T) {
|
func TestLambdaFunction(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_LAMBDA_001
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
power string
|
power string
|
||||||
totalPower string
|
totalPower string
|
||||||
@ -72,6 +75,7 @@ func TestLambdaFunction(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestExpFunction(t *testing.T) {
|
func TestExpFunction(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_NEGATIVE_EXP_001
|
||||||
const N = 256
|
const N = 256
|
||||||
|
|
||||||
step := big.NewInt(5)
|
step := big.NewInt(5)
|
||||||
@ -100,6 +104,7 @@ func q256ToF(x *big.Int) float64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestElectionLam(t *testing.T) {
|
func TestElectionLam(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_LAMBDA_001
|
||||||
p := big.NewInt(64)
|
p := big.NewInt(64)
|
||||||
tot := big.NewInt(128)
|
tot := big.NewInt(128)
|
||||||
lam := lambda(p, tot)
|
lam := lambda(p, tot)
|
||||||
@ -128,6 +133,7 @@ func BenchmarkWinCounts(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestWinCounts(t *testing.T) {
|
func TestWinCounts(t *testing.T) {
|
||||||
|
//stm: @TYPES_ELECTION_PROOF_COMPUTE_WIN_COUNT_001
|
||||||
totalPower := NewInt(100)
|
totalPower := NewInt(100)
|
||||||
power := NewInt(20)
|
power := NewInt(20)
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -7,6 +8,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestFilShort(t *testing.T) {
|
func TestFilShort(t *testing.T) {
|
||||||
|
//stm: @TYPES_FIL_PARSE_001
|
||||||
for _, s := range []struct {
|
for _, s := range []struct {
|
||||||
fil string
|
fil string
|
||||||
expect string
|
expect string
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -71,6 +72,7 @@ func TestEqualCall(t *testing.T) {
|
|||||||
Params: []byte("hai"),
|
Params: []byte("hai"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//stm: @TYPES_MESSAGE_EQUAL_CALL_001
|
||||||
require.True(t, m1.EqualCall(m2))
|
require.True(t, m1.EqualCall(m2))
|
||||||
require.True(t, m1.EqualCall(m3))
|
require.True(t, m1.EqualCall(m3))
|
||||||
require.False(t, m1.EqualCall(m4))
|
require.False(t, m1.EqualCall(m4))
|
||||||
@ -97,11 +99,13 @@ func TestMessageJson(t *testing.T) {
|
|||||||
exp := []byte("{\"Version\":0,\"To\":\"f04\",\"From\":\"f00\",\"Nonce\":34,\"Value\":\"0\",\"GasLimit\":123,\"GasFeeCap\":\"234\",\"GasPremium\":\"234\",\"Method\":6,\"Params\":\"aGFp\",\"CID\":{\"/\":\"bafy2bzaced5rdpz57e64sc7mdwjn3blicglhpialnrph2dlbufhf6iha63dmc\"}}")
|
exp := []byte("{\"Version\":0,\"To\":\"f04\",\"From\":\"f00\",\"Nonce\":34,\"Value\":\"0\",\"GasLimit\":123,\"GasFeeCap\":\"234\",\"GasPremium\":\"234\",\"Method\":6,\"Params\":\"aGFp\",\"CID\":{\"/\":\"bafy2bzaced5rdpz57e64sc7mdwjn3blicglhpialnrph2dlbufhf6iha63dmc\"}}")
|
||||||
fmt.Println(string(b))
|
fmt.Println(string(b))
|
||||||
|
|
||||||
|
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_001
|
||||||
require.Equal(t, exp, b)
|
require.Equal(t, exp, b)
|
||||||
|
|
||||||
var um Message
|
var um Message
|
||||||
require.NoError(t, json.Unmarshal(b, &um))
|
require.NoError(t, json.Unmarshal(b, &um))
|
||||||
|
|
||||||
|
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_002
|
||||||
require.EqualValues(t, *m, um)
|
require.EqualValues(t, *m, um)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,10 +135,12 @@ func TestSignedMessageJson(t *testing.T) {
|
|||||||
exp := []byte("{\"Message\":{\"Version\":0,\"To\":\"f04\",\"From\":\"f00\",\"Nonce\":34,\"Value\":\"0\",\"GasLimit\":123,\"GasFeeCap\":\"234\",\"GasPremium\":\"234\",\"Method\":6,\"Params\":\"aGFp\",\"CID\":{\"/\":\"bafy2bzaced5rdpz57e64sc7mdwjn3blicglhpialnrph2dlbufhf6iha63dmc\"}},\"Signature\":{\"Type\":0,\"Data\":null},\"CID\":{\"/\":\"bafy2bzacea5ainifngxj3rygaw2hppnyz2cw72x5pysqty2x6dxmjs5qg2uus\"}}")
|
exp := []byte("{\"Message\":{\"Version\":0,\"To\":\"f04\",\"From\":\"f00\",\"Nonce\":34,\"Value\":\"0\",\"GasLimit\":123,\"GasFeeCap\":\"234\",\"GasPremium\":\"234\",\"Method\":6,\"Params\":\"aGFp\",\"CID\":{\"/\":\"bafy2bzaced5rdpz57e64sc7mdwjn3blicglhpialnrph2dlbufhf6iha63dmc\"}},\"Signature\":{\"Type\":0,\"Data\":null},\"CID\":{\"/\":\"bafy2bzacea5ainifngxj3rygaw2hppnyz2cw72x5pysqty2x6dxmjs5qg2uus\"}}")
|
||||||
fmt.Println(string(b))
|
fmt.Println(string(b))
|
||||||
|
|
||||||
|
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_001
|
||||||
require.Equal(t, exp, b)
|
require.Equal(t, exp, b)
|
||||||
|
|
||||||
var um SignedMessage
|
var um SignedMessage
|
||||||
require.NoError(t, json.Unmarshal(b, &um))
|
require.NoError(t, json.Unmarshal(b, &um))
|
||||||
|
|
||||||
|
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_002
|
||||||
require.EqualValues(t, *sm, um)
|
require.EqualValues(t, *sm, um)
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -8,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestSignatureSerializeRoundTrip(t *testing.T) {
|
func TestSignatureSerializeRoundTrip(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SIGNATURE_SERIALIZATION_001
|
||||||
s := &crypto.Signature{
|
s := &crypto.Signature{
|
||||||
Data: []byte("foo bar cat dog"),
|
Data: []byte("foo bar cat dog"),
|
||||||
Type: crypto.SigTypeBLS,
|
Type: crypto.SigTypeBLS,
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -12,6 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestTipSetKey(t *testing.T) {
|
func TestTipSetKey(t *testing.T) {
|
||||||
|
//stm: @TYPES_TIPSETKEY_FROM_BYTES_001, @TYPES_TIPSETKEY_NEW_001
|
||||||
cb := cid.V1Builder{Codec: cid.DagCBOR, MhType: multihash.BLAKE2B_MIN + 31}
|
cb := cid.V1Builder{Codec: cid.DagCBOR, MhType: multihash.BLAKE2B_MIN + 31}
|
||||||
c1, _ := cb.Sum([]byte("a"))
|
c1, _ := cb.Sum([]byte("a"))
|
||||||
c2, _ := cb.Sum([]byte("b"))
|
c2, _ := cb.Sum([]byte("b"))
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package chain
|
package chain
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -12,6 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestSignedMessageJsonRoundtrip(t *testing.T) {
|
func TestSignedMessageJsonRoundtrip(t *testing.T) {
|
||||||
|
//stm: @TYPES_MESSAGE_JSON_EQUAL_CALL_002
|
||||||
to, _ := address.NewIDAddress(5234623)
|
to, _ := address.NewIDAddress(5234623)
|
||||||
from, _ := address.NewIDAddress(603911192)
|
from, _ := address.NewIDAddress(603911192)
|
||||||
smsg := &types.SignedMessage{
|
smsg := &types.SignedMessage{
|
||||||
@ -40,6 +42,7 @@ func TestSignedMessageJsonRoundtrip(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAddressType(t *testing.T) {
|
func TestAddressType(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_ADDRESS_PREFIX_001
|
||||||
build.SetAddressNetwork(address.Testnet)
|
build.SetAddressNetwork(address.Testnet)
|
||||||
addr, err := makeRandomAddress()
|
addr, err := makeRandomAddress()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package vectors
|
package vectors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -26,6 +27,7 @@ func LoadVector(t *testing.T, f string, out interface{}) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestBlockHeaderVectors(t *testing.T) {
|
func TestBlockHeaderVectors(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SERIALIZATION_BLOCK_001
|
||||||
var headers []HeaderVector
|
var headers []HeaderVector
|
||||||
LoadVector(t, "block_headers.json", &headers)
|
LoadVector(t, "block_headers.json", &headers)
|
||||||
|
|
||||||
@ -46,6 +48,7 @@ func TestBlockHeaderVectors(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMessageSigningVectors(t *testing.T) {
|
func TestMessageSigningVectors(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SERIALIZATION_SIGNED_MESSAGE_001
|
||||||
var msvs []MessageSigningVector
|
var msvs []MessageSigningVector
|
||||||
LoadVector(t, "message_signing.json", &msvs)
|
LoadVector(t, "message_signing.json", &msvs)
|
||||||
|
|
||||||
@ -64,6 +67,7 @@ func TestMessageSigningVectors(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestUnsignedMessageVectors(t *testing.T) {
|
func TestUnsignedMessageVectors(t *testing.T) {
|
||||||
|
//stm: @CHAIN_TYPES_SERIALIZATION_MESSAGE_001
|
||||||
var msvs []UnsignedMessageVector
|
var msvs []UnsignedMessageVector
|
||||||
LoadVector(t, "unsigned_messages.json", &msvs)
|
LoadVector(t, "unsigned_messages.json", &msvs)
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package vm
|
package vm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -9,6 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestGasBurn(t *testing.T) {
|
func TestGasBurn(t *testing.T) {
|
||||||
|
//stm: @BURN_ESTIMATE_GAS_OVERESTIMATION_BURN_001
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
used int64
|
used int64
|
||||||
limit int64
|
limit int64
|
||||||
@ -40,6 +42,7 @@ func TestGasBurn(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestGasOutputs(t *testing.T) {
|
func TestGasOutputs(t *testing.T) {
|
||||||
|
//stm: @BURN_ESTIMATE_GAS_OUTPUTS_001
|
||||||
baseFee := types.NewInt(10)
|
baseFee := types.NewInt(10)
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
used int64
|
used int64
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package vm
|
package vm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package vm
|
package vm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -106,6 +107,7 @@ func (*basicRtMessage) ValueReceived() abi.TokenAmount {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestInvokerBasic(t *testing.T) {
|
func TestInvokerBasic(t *testing.T) {
|
||||||
|
//stm: @INVOKER_TRANSFORM_001
|
||||||
inv := ActorRegistry{}
|
inv := ActorRegistry{}
|
||||||
code, err := inv.transform(basicContract{})
|
code, err := inv.transform(basicContract{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package vm
|
package vm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -22,6 +23,7 @@ func (*NotAVeryGoodMarshaler) MarshalCBOR(writer io.Writer) error {
|
|||||||
var _ cbg.CBORMarshaler = &NotAVeryGoodMarshaler{}
|
var _ cbg.CBORMarshaler = &NotAVeryGoodMarshaler{}
|
||||||
|
|
||||||
func TestRuntimePutErrors(t *testing.T) {
|
func TestRuntimePutErrors(t *testing.T) {
|
||||||
|
//stm: @CHAIN_VM_STORE_PUT_002
|
||||||
defer func() {
|
defer func() {
|
||||||
err := recover()
|
err := recover()
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
//stm: #cli
|
//stm: #unit
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
//stm: #cli
|
//stm: #unit
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//stm: ignore
|
||||||
|
//stm: #unit
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//stm: ignore
|
||||||
|
//stm: #unit
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
//stm: #cli
|
//stm: #unit
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -8,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestRateLimit(t *testing.T) {
|
func TestRateLimit(t *testing.T) {
|
||||||
|
//stm: @CMD_LIMITER_GET_IP_LIMITER_001, @CMD_LIMITER_GET_WALLET_LIMITER_001
|
||||||
limiter := NewLimiter(LimiterConfig{
|
limiter := NewLimiter(LimiterConfig{
|
||||||
TotalRate: time.Second,
|
TotalRate: time.Second,
|
||||||
TotalBurst: 20,
|
TotalBurst: 20,
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -9,6 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestAppendCIDsToWindow(t *testing.T) {
|
func TestAppendCIDsToWindow(t *testing.T) {
|
||||||
|
//stm: @CMD_HEALTH_APPEND_CIDS_001
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
var window CidWindow
|
var window CidWindow
|
||||||
threshold := 3
|
threshold := 3
|
||||||
@ -27,6 +29,7 @@ func TestAppendCIDsToWindow(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCheckWindow(t *testing.T) {
|
func TestCheckWindow(t *testing.T) {
|
||||||
|
//stm: @CMD_HEALTH_APPEND_CIDS_001, @CMD_HEALTH_CHECK_WINDOW_001
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
threshold := 3
|
threshold := 3
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -23,6 +24,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestWorkerKeyChange(t *testing.T) {
|
func TestWorkerKeyChange(t *testing.T) {
|
||||||
|
//stm: @OTHER_WORKER_KEY_CHANGE_001
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping test in short mode")
|
t.Skip("skipping test in short mode")
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #integration
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -49,6 +50,7 @@ func TestMinerAllInfo(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("pre-info-all", run)
|
t.Run("pre-info-all", run)
|
||||||
|
|
||||||
|
//stm: @CLIENT_DATA_IMPORT_001, @CLIENT_STORAGE_DEALS_GET_001
|
||||||
dh := kit.NewDealHarness(t, client, miner, miner)
|
dh := kit.NewDealHarness(t, client, miner, miner)
|
||||||
deal, res, inPath := dh.MakeOnlineDeal(context.Background(), kit.MakeFullDealParams{Rseed: 6})
|
deal, res, inPath := dh.MakeOnlineDeal(context.Background(), kit.MakeFullDealParams{Rseed: 6})
|
||||||
outPath := dh.PerformRetrieval(context.Background(), deal, res.Root, false)
|
outPath := dh.PerformRetrieval(context.Background(), deal, res.Root, false)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package stages
|
package stages
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -13,6 +14,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestCommitQueue(t *testing.T) {
|
func TestCommitQueue(t *testing.T) {
|
||||||
|
//stm: @CMD_COMMIT_Q_ENQUEUE_COMMIT_001
|
||||||
var q commitQueue
|
var q commitQueue
|
||||||
addr1, err := address.NewIDAddress(1000)
|
addr1, err := address.NewIDAddress(1000)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -46,6 +48,7 @@ func TestCommitQueue(t *testing.T) {
|
|||||||
SectorNumber: 6,
|
SectorNumber: 6,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
//stm: @CMD_COMMIT_Q_ADVANCE_EPOCH_001, @CMD_COMMIT_Q_NEXT_MINER_001
|
||||||
epoch := abi.ChainEpoch(0)
|
epoch := abi.ChainEpoch(0)
|
||||||
q.advanceEpoch(epoch)
|
q.advanceEpoch(epoch)
|
||||||
_, _, ok := q.nextMiner()
|
_, _, ok := q.nextMiner()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -10,6 +11,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestProtocolCodenames(t *testing.T) {
|
func TestProtocolCodenames(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_EPOCH_CODENAMES_001
|
||||||
if height := abi.ChainEpoch(100); GetProtocolCodename(height) != "genesis" {
|
if height := abi.ChainEpoch(100); GetProtocolCodename(height) != "genesis" {
|
||||||
t.Fatal("expected genesis codename")
|
t.Fatal("expected genesis codename")
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #chaos
|
||||||
package chaos
|
package chaos
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -15,6 +16,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestSingleton(t *testing.T) {
|
func TestSingleton(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_BUILDER_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -29,6 +31,7 @@ func TestSingleton(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCallerValidationNone(t *testing.T) {
|
func TestCallerValidationNone(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -40,6 +43,7 @@ func TestCallerValidationNone(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCallerValidationIs(t *testing.T) {
|
func TestCallerValidationIs(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||||
caller := atesting2.NewIDAddr(t, 100)
|
caller := atesting2.NewIDAddr(t, 100)
|
||||||
receiver := atesting2.NewIDAddr(t, 101)
|
receiver := atesting2.NewIDAddr(t, 101)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
@ -69,6 +73,7 @@ func TestCallerValidationIs(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCallerValidationType(t *testing.T) {
|
func TestCallerValidationType(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||||
caller := atesting2.NewIDAddr(t, 100)
|
caller := atesting2.NewIDAddr(t, 100)
|
||||||
receiver := atesting2.NewIDAddr(t, 101)
|
receiver := atesting2.NewIDAddr(t, 101)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
@ -95,6 +100,7 @@ func TestCallerValidationType(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCallerValidationInvalidBranch(t *testing.T) {
|
func TestCallerValidationInvalidBranch(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CALLER_VALIDATION_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -108,6 +114,7 @@ func TestCallerValidationInvalidBranch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDeleteActor(t *testing.T) {
|
func TestDeleteActor(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CREATE_ACTOR_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
beneficiary := atesting2.NewIDAddr(t, 101)
|
beneficiary := atesting2.NewIDAddr(t, 101)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
@ -122,6 +129,7 @@ func TestDeleteActor(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMutateStateInTransaction(t *testing.T) {
|
func TestMutateStateInTransaction(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CREATE_STATE_001, @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -149,6 +157,7 @@ func TestMutateStateInTransaction(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMutateStateAfterTransaction(t *testing.T) {
|
func TestMutateStateAfterTransaction(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CREATE_STATE_001, @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -183,6 +192,7 @@ func TestMutateStateAfterTransaction(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMutateStateReadonly(t *testing.T) {
|
func TestMutateStateReadonly(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_CREATE_STATE_001, @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -217,6 +227,7 @@ func TestMutateStateReadonly(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMutateStateInvalidBranch(t *testing.T) {
|
func TestMutateStateInvalidBranch(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_MUTATE_STATE_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -231,6 +242,7 @@ func TestMutateStateInvalidBranch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAbortWith(t *testing.T) {
|
func TestAbortWith(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_ABORT_WITH_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -249,6 +261,7 @@ func TestAbortWith(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAbortWithUncontrolled(t *testing.T) {
|
func TestAbortWithUncontrolled(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_ABORT_WITH_001
|
||||||
receiver := atesting2.NewIDAddr(t, 100)
|
receiver := atesting2.NewIDAddr(t, 100)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
|
||||||
@ -266,6 +279,7 @@ func TestAbortWithUncontrolled(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestInspectRuntime(t *testing.T) {
|
func TestInspectRuntime(t *testing.T) {
|
||||||
|
//stm: @CHAIN_ACTOR_CHAOS_INSPECT_RUNTIME_001, @CHAIN_ACTOR_CHAOS_CREATE_STATE_001
|
||||||
caller := atesting2.NewIDAddr(t, 100)
|
caller := atesting2.NewIDAddr(t, 100)
|
||||||
receiver := atesting2.NewIDAddr(t, 101)
|
receiver := atesting2.NewIDAddr(t, 101)
|
||||||
builder := mock2.NewBuilder(context.Background(), receiver)
|
builder := mock2.NewBuilder(context.Background(), receiver)
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
//stm: ignore
|
||||||
|
// This file does not test any behaviors by itself; rather, it runs other test files
|
||||||
|
// Therefore, this file should not be annotated.
|
||||||
package conformance
|
package conformance
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package gateway
|
package gateway
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -94,6 +95,7 @@ func TestGatewayAPIChainGetTipSetByHeight(t *testing.T) {
|
|||||||
// Create tipsets from genesis up to tskh and return the highest
|
// Create tipsets from genesis up to tskh and return the highest
|
||||||
ts := mock.createTipSets(tt.args.tskh, tt.args.genesisTS)
|
ts := mock.createTipSets(tt.args.tskh, tt.args.genesisTS)
|
||||||
|
|
||||||
|
//stm: @GATEWAY_NODE_GET_TIPSET_BY_HEIGHT_001
|
||||||
got, err := a.ChainGetTipSetByHeight(ctx, tt.args.h, ts.Key())
|
got, err := a.ChainGetTipSetByHeight(ctx, tt.args.h, ts.Key())
|
||||||
if tt.expErr {
|
if tt.expErr {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
@ -241,6 +243,7 @@ func (m *mockGatewayDepsAPI) Version(context.Context) (api.APIVersion, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestGatewayVersion(t *testing.T) {
|
func TestGatewayVersion(t *testing.T) {
|
||||||
|
//stm: @GATEWAY_NODE_GET_VERSION_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
mock := &mockGatewayDepsAPI{}
|
mock := &mockGatewayDepsAPI{}
|
||||||
a := NewNode(mock, DefaultLookbackCap, DefaultStateWaitLookbackLimit)
|
a := NewNode(mock, DefaultLookbackCap, DefaultStateWaitLookbackLimit)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #integration
|
||||||
package itests
|
package itests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -21,6 +22,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestBatchDealInput(t *testing.T) {
|
func TestBatchDealInput(t *testing.T) {
|
||||||
|
//stm: @MINER_SECTOR_STATUS_001, @MINER_SECTOR_LIST_001
|
||||||
kit.QuietMiningLogs()
|
kit.QuietMiningLogs()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -63,6 +63,7 @@ func runTestCCUpgrade(t *testing.T) *kit.TestFullNode {
|
|||||||
}
|
}
|
||||||
waitForSectorActive(ctx, t, CCUpgrade, client, maddr)
|
waitForSectorActive(ctx, t, CCUpgrade, client, maddr)
|
||||||
|
|
||||||
|
//stm: @SECTOR_CC_UPGRADE_001
|
||||||
err = miner.SectorMarkForUpgrade(ctx, sl[0], true)
|
err = miner.SectorMarkForUpgrade(ctx, sl[0], true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -27,6 +27,10 @@ import (
|
|||||||
// TestDealWithMarketAndMinerNode is running concurrently a number of storage and retrieval deals towards a miner
|
// TestDealWithMarketAndMinerNode is running concurrently a number of storage and retrieval deals towards a miner
|
||||||
// architecture where the `mining/sealing/proving` node is a separate process from the `markets` node
|
// architecture where the `mining/sealing/proving` node is a separate process from the `markets` node
|
||||||
func TestDealWithMarketAndMinerNode(t *testing.T) {
|
func TestDealWithMarketAndMinerNode(t *testing.T) {
|
||||||
|
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||||
|
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||||
|
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||||
|
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping test in short mode")
|
t.Skip("skipping test in short mode")
|
||||||
}
|
}
|
||||||
@ -126,6 +130,10 @@ func TestDealCyclesConcurrent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSimultanenousTransferLimit(t *testing.T) {
|
func TestSimultanenousTransferLimit(t *testing.T) {
|
||||||
|
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||||
|
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||||
|
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||||
|
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||||
t.Skip("skipping as flaky #7152")
|
t.Skip("skipping as flaky #7152")
|
||||||
|
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #integration
|
||||||
package itests
|
package itests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -36,7 +37,13 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestDMLevelPartialRetrieval(t *testing.T) {
|
func TestDMLevelPartialRetrieval(t *testing.T) {
|
||||||
|
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||||
|
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||||
|
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||||
|
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||||
|
|
||||||
|
//stm: @CHAIN_INCOMING_HANDLE_INCOMING_BLOCKS_001, @CHAIN_INCOMING_VALIDATE_BLOCK_PUBSUB_001, @CHAIN_INCOMING_VALIDATE_MESSAGE_PUBSUB_001
|
||||||
|
//stm: @CLIENT_RETRIEVAL_RETRIEVE_001, @CLIENT_RETRIEVAL_FIND_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
policy.SetPreCommitChallengeDelay(2)
|
policy.SetPreCommitChallengeDelay(2)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #integration
|
||||||
package itests
|
package itests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -19,6 +20,7 @@ import (
|
|||||||
// we reordered the checks to make sure that a transaction with too much money in it sent to yourself will fail instead of succeeding as a noop
|
// we reordered the checks to make sure that a transaction with too much money in it sent to yourself will fail instead of succeeding as a noop
|
||||||
// more info in this PR! https://github.com/filecoin-project/lotus/pull/7637
|
// more info in this PR! https://github.com/filecoin-project/lotus/pull/7637
|
||||||
func TestSelfSentTxnV15(t *testing.T) {
|
func TestSelfSentTxnV15(t *testing.T) {
|
||||||
|
//stm: @TOKEN_WALLET_SIGN_001, @CHAIN_MEMPOOL_PUSH_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
kit.QuietMiningLogs()
|
kit.QuietMiningLogs()
|
||||||
@ -60,6 +62,7 @@ func TestSelfSentTxnV15(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSelfSentTxnV14(t *testing.T) {
|
func TestSelfSentTxnV14(t *testing.T) {
|
||||||
|
//stm: @TOKEN_WALLET_SIGN_001, @CHAIN_MEMPOOL_PUSH_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
kit.QuietMiningLogs()
|
kit.QuietMiningLogs()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package alerting
|
package alerting
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -12,6 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestAlerting(t *testing.T) {
|
func TestAlerting(t *testing.T) {
|
||||||
|
//stm: @JOURNAL_ALERTS_ADD_ALERT_TYPE_001, @JOURNAL_ALERTS_RAISE_001, @JOURNAL_ALERTS_GET_ALERTS_001
|
||||||
mockCtrl := gomock.NewController(t)
|
mockCtrl := gomock.NewController(t)
|
||||||
defer mockCtrl.Finish()
|
defer mockCtrl.Finish()
|
||||||
j := mockjournal.NewMockJournal(mockCtrl)
|
j := mockjournal.NewMockJournal(mockCtrl)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package journal
|
package journal
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -7,6 +8,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestDisabledEvents(t *testing.T) {
|
func TestDisabledEvents(t *testing.T) {
|
||||||
|
//stm: @JOURNAL_REGISTRY_NEW_EVENT_TYPE_001, @JOURNAL_REGISTRY_PARSE_DISABLED_001
|
||||||
req := require.New(t)
|
req := require.New(t)
|
||||||
|
|
||||||
test := func(dis DisabledEvents) func(*testing.T) {
|
test := func(dis DisabledEvents) func(*testing.T) {
|
||||||
@ -44,6 +46,7 @@ func TestDisabledEvents(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestParseDisableEvents(t *testing.T) {
|
func TestParseDisableEvents(t *testing.T) {
|
||||||
|
//stm: @JOURNAL_REGISTRY_PARSE_DISABLED_002
|
||||||
_, err := ParseDisabledEvents("system1:disabled1:failed,system1:disabled2")
|
_, err := ParseDisabledEvents("system1:disabled1:failed,system1:disabled2")
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package backupds
|
package backupds
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -37,6 +38,7 @@ func checkVals(t *testing.T, ds datastore.Datastore, start, end int, exist bool)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestNoLogRestore(t *testing.T) {
|
func TestNoLogRestore(t *testing.T) {
|
||||||
|
//stm: @OTHER_DATASTORE_RESTORE_002
|
||||||
ds1 := datastore.NewMapDatastore()
|
ds1 := datastore.NewMapDatastore()
|
||||||
|
|
||||||
putVals(t, ds1, 0, 10)
|
putVals(t, ds1, 0, 10)
|
||||||
@ -57,6 +59,7 @@ func TestNoLogRestore(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestLogRestore(t *testing.T) {
|
func TestLogRestore(t *testing.T) {
|
||||||
|
//stm: @OTHER_DATASTORE_RESTORE_001
|
||||||
logdir, err := ioutil.TempDir("", "backupds-test-")
|
logdir, err := ioutil.TempDir("", "backupds-test-")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer os.RemoveAll(logdir) // nolint
|
defer os.RemoveAll(logdir) // nolint
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package rpcenc
|
package rpcenc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//stm: ignore
|
||||||
|
// Ignored because implementation relies on external (ffi) lib
|
||||||
package bls
|
package bls
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//stm: ignore
|
||||||
|
// Ignored because implementation relies on external (ffi) lib
|
||||||
package bls_test
|
package bls_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: ignore
|
||||||
package stati
|
package stati
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package tablewriter
|
package tablewriter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -8,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestTableWriter(t *testing.T) {
|
func TestTableWriter(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_TABLE_WRITE_001, @OTHER_IMPLEMENTATION_TABLE_FLUSH_001
|
||||||
tw := New(Col("C1"), Col("X"), Col("C333"), NewLineCol("Thing"))
|
tw := New(Col("C1"), Col("X"), Col("C333"), NewLineCol("Thing"))
|
||||||
tw.Write(map[string]interface{}{
|
tw.Write(map[string]interface{}{
|
||||||
"C1": "234",
|
"C1": "234",
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
|
//stm: ignore
|
||||||
//go:build !windows
|
//go:build !windows
|
||||||
// +build !windows
|
// +build !windows
|
||||||
|
|
||||||
|
// This file tests file descriptor limits; since this is an OS feature, it should not be annotated
|
||||||
|
|
||||||
package ulimit
|
package ulimit
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package dagstore
|
package dagstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -88,6 +89,7 @@ func TestLotusAccessorFetchUnsealedPiece(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch the piece
|
// Fetch the piece
|
||||||
|
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001
|
||||||
r, err := api.FetchUnsealedPiece(ctx, cid1)
|
r, err := api.FetchUnsealedPiece(ctx, cid1)
|
||||||
if tc.expectErr {
|
if tc.expectErr {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
@ -101,6 +103,7 @@ func TestLotusAccessorFetchUnsealedPiece(t *testing.T) {
|
|||||||
|
|
||||||
require.Equal(t, tc.fetchedData, string(bz))
|
require.Equal(t, tc.fetchedData, string(bz))
|
||||||
|
|
||||||
|
//stm: @MARKET_DAGSTORE_IS_PIECE_UNSEALED_001
|
||||||
uns, err := api.IsUnsealed(ctx, cid1)
|
uns, err := api.IsUnsealed(ctx, cid1)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, tc.isUnsealed, uns)
|
require.Equal(t, tc.isUnsealed, uns)
|
||||||
@ -126,6 +129,7 @@ func TestLotusAccessorGetUnpaddedCARSize(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Check that the data length is correct
|
// Check that the data length is correct
|
||||||
|
//stm: @MARKET_DAGSTORE_GET_UNPADDED_CAR_SIZE_001
|
||||||
len, err := api.GetUnpaddedCARSize(ctx, cid1)
|
len, err := api.GetUnpaddedCARSize(ctx, cid1)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.EqualValues(t, 10, len)
|
require.EqualValues(t, 10, len)
|
||||||
@ -160,6 +164,7 @@ func TestThrottle(t *testing.T) {
|
|||||||
errgrp, ctx := errgroup.WithContext(context.Background())
|
errgrp, ctx := errgroup.WithContext(context.Background())
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
errgrp.Go(func() error {
|
errgrp.Go(func() error {
|
||||||
|
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001
|
||||||
r, err := api.FetchUnsealedPiece(ctx, cid1)
|
r, err := api.FetchUnsealedPiece(ctx, cid1)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
_ = r.Close()
|
_ = r.Close()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: @unit
|
||||||
package dagstore
|
package dagstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -17,6 +18,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestLotusMount(t *testing.T) {
|
func TestLotusMount(t *testing.T) {
|
||||||
|
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001, @MARKET_DAGSTORE_GET_UNPADDED_CAR_SIZE_001
|
||||||
|
//stm: @MARKET_DAGSTORE_IS_PIECE_UNSEALED_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bgen := blocksutil.NewBlockGenerator()
|
bgen := blocksutil.NewBlockGenerator()
|
||||||
cid := bgen.Next().Cid()
|
cid := bgen.Next().Cid()
|
||||||
@ -88,6 +91,7 @@ func TestLotusMount(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestLotusMountDeserialize(t *testing.T) {
|
func TestLotusMountDeserialize(t *testing.T) {
|
||||||
|
//stm: @MARKET_DAGSTORE_DESERIALIZE_CID_001
|
||||||
api := &minerAPI{}
|
api := &minerAPI{}
|
||||||
|
|
||||||
bgen := blocksutil.NewBlockGenerator()
|
bgen := blocksutil.NewBlockGenerator()
|
||||||
@ -115,6 +119,8 @@ func TestLotusMountDeserialize(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestLotusMountRegistration(t *testing.T) {
|
func TestLotusMountRegistration(t *testing.T) {
|
||||||
|
//stm: @MARKET_DAGSTORE_FETCH_UNSEALED_PIECE_001, @MARKET_DAGSTORE_GET_UNPADDED_CAR_SIZE_001
|
||||||
|
//stm: @MARKET_DAGSTORE_IS_PIECE_UNSEALED_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
bgen := blocksutil.NewBlockGenerator()
|
bgen := blocksutil.NewBlockGenerator()
|
||||||
cid := bgen.Next().Cid()
|
cid := bgen.Next().Cid()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #integration
|
||||||
package dagstore
|
package dagstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -59,6 +60,7 @@ func TestShardRegistration(t *testing.T) {
|
|||||||
|
|
||||||
deals := []storagemarket.MinerDeal{{
|
deals := []storagemarket.MinerDeal{{
|
||||||
// Should be registered
|
// Should be registered
|
||||||
|
//stm: @MARKET_DAGSTORE_MIGRATE_DEALS_001
|
||||||
State: storagemarket.StorageDealSealing,
|
State: storagemarket.StorageDealSealing,
|
||||||
SectorNumber: unsealedSector1,
|
SectorNumber: unsealedSector1,
|
||||||
ClientDealProposal: market.ClientDealProposal{
|
ClientDealProposal: market.ClientDealProposal{
|
||||||
@ -77,6 +79,7 @@ func TestShardRegistration(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
// Should be ignored because deal is no longer active
|
// Should be ignored because deal is no longer active
|
||||||
|
//stm: @MARKET_DAGSTORE_MIGRATE_DEALS_003
|
||||||
State: storagemarket.StorageDealError,
|
State: storagemarket.StorageDealError,
|
||||||
SectorNumber: unsealedSector2,
|
SectorNumber: unsealedSector2,
|
||||||
ClientDealProposal: market.ClientDealProposal{
|
ClientDealProposal: market.ClientDealProposal{
|
||||||
@ -114,6 +117,7 @@ func TestShardRegistration(t *testing.T) {
|
|||||||
require.True(t, migrated)
|
require.True(t, migrated)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
//stm: @MARKET_DAGSTORE_GET_ALL_SHARDS_001
|
||||||
info := dagst.AllShardsInfo()
|
info := dagst.AllShardsInfo()
|
||||||
require.Len(t, info, 2)
|
require.Len(t, info, 2)
|
||||||
for _, i := range info {
|
for _, i := range info {
|
||||||
@ -121,6 +125,7 @@ func TestShardRegistration(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Run register shard migration again
|
// Run register shard migration again
|
||||||
|
//stm: @MARKET_DAGSTORE_MIGRATE_DEALS_002
|
||||||
migrated, err = w.MigrateDeals(ctx, deals)
|
migrated, err = w.MigrateDeals(ctx, deals)
|
||||||
require.False(t, migrated)
|
require.False(t, migrated)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package dagstore
|
package dagstore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -56,6 +57,7 @@ func TestWrapperAcquireRecovery(t *testing.T) {
|
|||||||
}
|
}
|
||||||
w.dagst = mock
|
w.dagst = mock
|
||||||
|
|
||||||
|
//stm: @MARKET_DAGSTORE_ACQUIRE_SHARD_002
|
||||||
mybs, err := w.LoadShard(ctx, pieceCid)
|
mybs, err := w.LoadShard(ctx, pieceCid)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
@ -104,10 +106,12 @@ func TestWrapperBackground(t *testing.T) {
|
|||||||
w.dagst = mock
|
w.dagst = mock
|
||||||
|
|
||||||
// Start up the wrapper
|
// Start up the wrapper
|
||||||
|
//stm: @MARKET_DAGSTORE_START_001
|
||||||
err = w.Start(ctx)
|
err = w.Start(ctx)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Expect GC to be called automatically
|
// Expect GC to be called automatically
|
||||||
|
//stm: @MARKET_DAGSTORE_START_002
|
||||||
tctx, cancel := context.WithTimeout(ctx, time.Second)
|
tctx, cancel := context.WithTimeout(ctx, time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
select {
|
select {
|
||||||
@ -118,6 +122,7 @@ func TestWrapperBackground(t *testing.T) {
|
|||||||
|
|
||||||
// Expect that when the wrapper is closed it will call close on the
|
// Expect that when the wrapper is closed it will call close on the
|
||||||
// DAG store
|
// DAG store
|
||||||
|
//stm: @MARKET_DAGSTORE_CLOSE_001
|
||||||
err = w.Close()
|
err = w.Close()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package storageadapter
|
package storageadapter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -28,6 +29,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestDealPublisher(t *testing.T) {
|
func TestDealPublisher(t *testing.T) {
|
||||||
|
//stm: @MARKET_DEAL_PUBLISHER_PUBLISH_001, @MARKET_DEAL_PUBLISHER_GET_PENDING_DEALS_001
|
||||||
oldClock := build.Clock
|
oldClock := build.Clock
|
||||||
t.Cleanup(func() { build.Clock = oldClock })
|
t.Cleanup(func() { build.Clock = oldClock })
|
||||||
mc := clock.NewMock()
|
mc := clock.NewMock()
|
||||||
@ -188,6 +190,8 @@ func TestDealPublisher(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestForcePublish(t *testing.T) {
|
func TestForcePublish(t *testing.T) {
|
||||||
|
//stm: @MARKET_DEAL_PUBLISHER_PUBLISH_001, @MARKET_DEAL_PUBLISHER_GET_PENDING_DEALS_001
|
||||||
|
//stm: @MARKET_DEAL_PUBLISHER_FORCE_PUBLISH_ALL_001
|
||||||
dpapi := newDPAPI(t)
|
dpapi := newDPAPI(t)
|
||||||
|
|
||||||
// Create a deal publisher
|
// Create a deal publisher
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package storageadapter
|
package storageadapter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -228,6 +229,7 @@ func TestOnDealSectorPreCommitted(t *testing.T) {
|
|||||||
Err2: data.currentDealInfoErr2,
|
Err2: data.currentDealInfoErr2,
|
||||||
}
|
}
|
||||||
scm := newSectorCommittedManager(eventsAPI, mockDIAPI, mockPCAPI)
|
scm := newSectorCommittedManager(eventsAPI, mockDIAPI, mockPCAPI)
|
||||||
|
//stm: @MARKET_ADAPTER_ON_SECTOR_PRE_COMMIT_001
|
||||||
err = scm.OnDealSectorPreCommitted(ctx, provider, proposal, publishCid, cb)
|
err = scm.OnDealSectorPreCommitted(ctx, provider, proposal, publishCid, cb)
|
||||||
if data.expectedError == nil {
|
if data.expectedError == nil {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -439,6 +441,7 @@ func TestOnDealSectorCommitted(t *testing.T) {
|
|||||||
Err2: data.currentDealInfoErr2,
|
Err2: data.currentDealInfoErr2,
|
||||||
}
|
}
|
||||||
scm := newSectorCommittedManager(eventsAPI, mockDIAPI, mockPCAPI)
|
scm := newSectorCommittedManager(eventsAPI, mockDIAPI, mockPCAPI)
|
||||||
|
//stm: @MARKET_ADAPTER_ON_SECTOR_COMMIT_001
|
||||||
err = scm.OnDealSectorCommitted(ctx, provider, sectorNumber, proposal, publishCid, cb)
|
err = scm.OnDealSectorCommitted(ctx, provider, sectorNumber, proposal, publishCid, cb)
|
||||||
if data.expectedError == nil {
|
if data.expectedError == nil {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -12,6 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestDefaultFullNodeRoundtrip(t *testing.T) {
|
func TestDefaultFullNodeRoundtrip(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_001
|
||||||
c := DefaultFullNode()
|
c := DefaultFullNode()
|
||||||
|
|
||||||
var s string
|
var s string
|
||||||
@ -51,6 +53,7 @@ func TestDefaultFullNodeCommentRoundtrip(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDefaultMinerRoundtrip(t *testing.T) {
|
func TestDefaultMinerRoundtrip(t *testing.T) {
|
||||||
|
//stm: @OTHER_IMPLEMENTATION_001
|
||||||
c := DefaultStorageMiner()
|
c := DefaultStorageMiner()
|
||||||
|
|
||||||
var s string
|
var s string
|
||||||
|
@ -12,6 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestDecodeNothing(t *testing.T) {
|
func TestDecodeNothing(t *testing.T) {
|
||||||
|
//stm: @NODE_CONFIG_LOAD_FILE_002
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -30,6 +31,7 @@ func TestDecodeNothing(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestParitalConfig(t *testing.T) {
|
func TestParitalConfig(t *testing.T) {
|
||||||
|
//stm: @NODE_CONFIG_LOAD_FILE_003
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
cfgString := `
|
cfgString := `
|
||||||
[API]
|
[API]
|
||||||
|
@ -27,6 +27,7 @@ import (
|
|||||||
|
|
||||||
// This test uses a full "dense" CARv2, and not a filestore (positional mapping).
|
// This test uses a full "dense" CARv2, and not a filestore (positional mapping).
|
||||||
func TestRoundtripUnixFS_Dense(t *testing.T) {
|
func TestRoundtripUnixFS_Dense(t *testing.T) {
|
||||||
|
//stm: @CLIENT_DATA_IMPORT_002
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
inputPath, inputContents := genInputFile(t)
|
inputPath, inputContents := genInputFile(t)
|
||||||
@ -75,6 +76,7 @@ func TestRoundtripUnixFS_Dense(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestRoundtripUnixFS_Filestore(t *testing.T) {
|
func TestRoundtripUnixFS_Filestore(t *testing.T) {
|
||||||
|
//stm: @CLIENT_DATA_IMPORT_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
a := &API{
|
a := &API{
|
||||||
Imports: &imports.Manager{},
|
Imports: &imports.Manager{},
|
||||||
|
@ -13,6 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestMedian(t *testing.T) {
|
func TestMedian(t *testing.T) {
|
||||||
|
//stm: @MARKET_GAS_GET_MEDIAN_PREMIUM_001
|
||||||
require.Equal(t, types.NewInt(5), medianGasPremium([]GasMeta{
|
require.Equal(t, types.NewInt(5), medianGasPremium([]GasMeta{
|
||||||
{big.NewInt(5), build.BlockGasTarget},
|
{big.NewInt(5), build.BlockGasTarget},
|
||||||
}, 1))
|
}, 1))
|
||||||
|
@ -30,5 +30,10 @@ func genFsRepo(t *testing.T) (*FsRepo, func()) {
|
|||||||
func TestFsBasic(t *testing.T) {
|
func TestFsBasic(t *testing.T) {
|
||||||
repo, closer := genFsRepo(t)
|
repo, closer := genFsRepo(t)
|
||||||
defer closer()
|
defer closer()
|
||||||
|
//stm: @NODE_FS_REPO_LOCK_001,@NODE_FS_REPO_LOCK_002,@NODE_FS_REPO_UNLOCK_001
|
||||||
|
//stm: @NODE_FS_REPO_SET_API_ENDPOINT_001, @NODE_FS_REPO_GET_API_ENDPOINT_001
|
||||||
|
//stm: @NODE_FS_REPO_GET_CONFIG_001, @NODE_FS_REPO_SET_CONFIG_001
|
||||||
|
//stm: @NODE_FS_REPO_LIST_KEYS_001, @NODE_FS_REPO_PUT_KEY_001
|
||||||
|
//stm: @NODE_FS_REPO_GET_KEY_001, NODE_FS_REPO_DELETE_KEY_001
|
||||||
basicTest(t, repo)
|
basicTest(t, repo)
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestMemBasic(t *testing.T) {
|
func TestMemBasic(t *testing.T) {
|
||||||
|
//stm: @REPO_MEM_001
|
||||||
repo := NewMemory(nil)
|
repo := NewMemory(nil)
|
||||||
basicTest(t, repo)
|
basicTest(t, repo)
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package node
|
package node
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -10,6 +11,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestMonitorShutdown(t *testing.T) {
|
func TestMonitorShutdown(t *testing.T) {
|
||||||
|
//stm: @NODE_COMMON_SHUTDOWN_001
|
||||||
signalCh := make(chan struct{})
|
signalCh := make(chan struct{})
|
||||||
|
|
||||||
// Three shutdown handlers.
|
// Three shutdown handlers.
|
||||||
|
@ -15,6 +15,7 @@ func testCids() []cid.Cid {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMsgListener(t *testing.T) {
|
func TestMsgListener(t *testing.T) {
|
||||||
|
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||||
ml := newMsgListeners()
|
ml := newMsgListeners()
|
||||||
|
|
||||||
done := false
|
done := false
|
||||||
@ -33,6 +34,7 @@ func TestMsgListener(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMsgListenerNilErr(t *testing.T) {
|
func TestMsgListenerNilErr(t *testing.T) {
|
||||||
|
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||||
ml := newMsgListeners()
|
ml := newMsgListeners()
|
||||||
|
|
||||||
done := false
|
done := false
|
||||||
@ -50,6 +52,7 @@ func TestMsgListenerNilErr(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMsgListenerUnsub(t *testing.T) {
|
func TestMsgListenerUnsub(t *testing.T) {
|
||||||
|
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||||
ml := newMsgListeners()
|
ml := newMsgListeners()
|
||||||
|
|
||||||
done := false
|
done := false
|
||||||
@ -72,6 +75,7 @@ func TestMsgListenerUnsub(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMsgListenerMulti(t *testing.T) {
|
func TestMsgListenerMulti(t *testing.T) {
|
||||||
|
//stm: @TOKEN_PAYCH_REG_ON_MSG_COMPLETE_001, @TOKEN_PAYCH_FIRE_ON_MSG_COMPLETE_001
|
||||||
ml := newMsgListeners()
|
ml := newMsgListeners()
|
||||||
|
|
||||||
count := 0
|
count := 0
|
||||||
|
@ -502,6 +502,7 @@ func TestAddVoucherInboundWalletKey(t *testing.T) {
|
|||||||
sv := createTestVoucher(t, ch, voucherLane, nonce, voucherAmount, fromKeyPrivate)
|
sv := createTestVoucher(t, ch, voucherLane, nonce, voucherAmount, fromKeyPrivate)
|
||||||
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
|
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
|
||||||
|
|
||||||
|
//stm: @TOKEN_PAYCH_VOUCHER_CREATE_006
|
||||||
// Should fail because there is no wallet key matching the channel To
|
// Should fail because there is no wallet key matching the channel To
|
||||||
// address (ie, the channel is not "owned" by this node)
|
// address (ie, the channel is not "owned" by this node)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
@ -513,6 +514,7 @@ func TestAddVoucherInboundWalletKey(t *testing.T) {
|
|||||||
sv = createTestVoucher(t, ch, voucherLane, nonce, voucherAmount, fromKeyPrivate)
|
sv = createTestVoucher(t, ch, voucherLane, nonce, voucherAmount, fromKeyPrivate)
|
||||||
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
|
_, err = mgr.AddVoucherInbound(ctx, ch, sv, nil, minDelta)
|
||||||
|
|
||||||
|
//stm: @TOKEN_PAYCH_VOUCHER_CREATE_001
|
||||||
// Should now pass because there is a wallet key matching the channel To
|
// Should now pass because there is a wallet key matching the channel To
|
||||||
// address
|
// address
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -626,6 +628,7 @@ func TestCheckSpendable(t *testing.T) {
|
|||||||
}
|
}
|
||||||
s.mock.setCallResponse(successResponse)
|
s.mock.setCallResponse(successResponse)
|
||||||
|
|
||||||
|
//stm: @TOKEN_PAYCH_CHECK_SPENDABLE_001
|
||||||
// Check that spendable is true
|
// Check that spendable is true
|
||||||
secret := []byte("secret")
|
secret := []byte("secret")
|
||||||
spendable, err := s.mgr.CheckVoucherSpendable(ctx, s.ch, voucher, secret, nil)
|
spendable, err := s.mgr.CheckVoucherSpendable(ctx, s.ch, voucher, secret, nil)
|
||||||
@ -655,6 +658,7 @@ func TestCheckSpendable(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.True(t, spendable)
|
require.True(t, spendable)
|
||||||
|
|
||||||
|
//stm: @TOKEN_PAYCH_CHECK_SPENDABLE_002
|
||||||
// Check that voucher is no longer spendable once it has been submitted
|
// Check that voucher is no longer spendable once it has been submitted
|
||||||
_, err = s.mgr.SubmitVoucher(ctx, s.ch, voucher, nil, nil)
|
_, err = s.mgr.SubmitVoucher(ctx, s.ch, voucher, nil, nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package paychmgr
|
package paychmgr
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -59,6 +60,7 @@ func testChannelResponse(t *testing.T, ch address.Address) types.MessageReceipt
|
|||||||
// TestPaychGetCreateChannelMsg tests that GetPaych sends a message to create
|
// TestPaychGetCreateChannelMsg tests that GetPaych sends a message to create
|
||||||
// a new channel with the correct funds
|
// a new channel with the correct funds
|
||||||
func TestPaychGetCreateChannelMsg(t *testing.T) {
|
func TestPaychGetCreateChannelMsg(t *testing.T) {
|
||||||
|
//stm: @TOKEN_PAYCH_CREATE_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
|
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package paychmgr
|
package paychmgr
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package paychmgr
|
package paychmgr
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -13,6 +14,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestStore(t *testing.T) {
|
func TestStore(t *testing.T) {
|
||||||
|
//stm: @TOKEN_PAYCH_ALLOCATE_LANE_001, @TOKEN_PAYCH_LIST_CHANNELS_001
|
||||||
|
//stm: @TOKEN_PAYCH_TRACK_CHANNEL_002, @TOKEN_PAYCH_TRACK_CHANNEL_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
|
store := NewStore(ds_sync.MutexWrap(ds.NewMapDatastore()))
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package storage
|
package storage
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -200,6 +201,10 @@ func (m *mockAPI) setChangeHandler(ch *changeHandler) {
|
|||||||
|
|
||||||
// TestChangeHandlerBasic verifies we can generate a proof and submit it
|
// TestChangeHandlerBasic verifies we can generate a proof and submit it
|
||||||
func TestChangeHandlerBasic(t *testing.T) {
|
func TestChangeHandlerBasic(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -248,6 +253,10 @@ func TestChangeHandlerBasic(t *testing.T) {
|
|||||||
// chain is already advanced past the confidence interval, we should move from
|
// chain is already advanced past the confidence interval, we should move from
|
||||||
// proving to submitting without a head change in between.
|
// proving to submitting without a head change in between.
|
||||||
func TestChangeHandlerFromProvingToSubmittingNoHeadChange(t *testing.T) {
|
func TestChangeHandlerFromProvingToSubmittingNoHeadChange(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_005
|
||||||
|
//stm: @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -299,6 +308,10 @@ func TestChangeHandlerFromProvingToSubmittingNoHeadChange(t *testing.T) {
|
|||||||
// proofs generated we should not submit anything to chain but submit state
|
// proofs generated we should not submit anything to chain but submit state
|
||||||
// should move to completed
|
// should move to completed
|
||||||
func TestChangeHandlerFromProvingEmptyProofsToComplete(t *testing.T) {
|
func TestChangeHandlerFromProvingEmptyProofsToComplete(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_005, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_006
|
||||||
|
//stm: @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -349,6 +362,9 @@ func TestChangeHandlerFromProvingEmptyProofsToComplete(t *testing.T) {
|
|||||||
// TestChangeHandlerDontStartUntilProvingPeriod tests that the handler
|
// TestChangeHandlerDontStartUntilProvingPeriod tests that the handler
|
||||||
// ignores updates until the proving period has been reached.
|
// ignores updates until the proving period has been reached.
|
||||||
func TestChangeHandlerDontStartUntilProvingPeriod(t *testing.T) {
|
func TestChangeHandlerDontStartUntilProvingPeriod(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -387,6 +403,9 @@ func TestChangeHandlerDontStartUntilProvingPeriod(t *testing.T) {
|
|||||||
// TestChangeHandlerStartProvingNextDeadline verifies that the proof handler
|
// TestChangeHandlerStartProvingNextDeadline verifies that the proof handler
|
||||||
// starts proving the next deadline after the current one
|
// starts proving the next deadline after the current one
|
||||||
func TestChangeHandlerStartProvingNextDeadline(t *testing.T) {
|
func TestChangeHandlerStartProvingNextDeadline(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -436,6 +455,10 @@ func TestChangeHandlerStartProvingNextDeadline(t *testing.T) {
|
|||||||
// TestChangeHandlerProvingRounds verifies we can generate several rounds of
|
// TestChangeHandlerProvingRounds verifies we can generate several rounds of
|
||||||
// proofs as the chain head advances
|
// proofs as the chain head advances
|
||||||
func TestChangeHandlerProvingRounds(t *testing.T) {
|
func TestChangeHandlerProvingRounds(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_003, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_005
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -506,6 +529,9 @@ func TestChangeHandlerProvingRounds(t *testing.T) {
|
|||||||
// TestChangeHandlerProvingErrorRecovery verifies that the proof handler
|
// TestChangeHandlerProvingErrorRecovery verifies that the proof handler
|
||||||
// recovers correctly from an error
|
// recovers correctly from an error
|
||||||
func TestChangeHandlerProvingErrorRecovery(t *testing.T) {
|
func TestChangeHandlerProvingErrorRecovery(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -547,6 +573,10 @@ func TestChangeHandlerProvingErrorRecovery(t *testing.T) {
|
|||||||
// TestChangeHandlerSubmitErrorRecovery verifies that the submit handler
|
// TestChangeHandlerSubmitErrorRecovery verifies that the submit handler
|
||||||
// recovers correctly from an error
|
// recovers correctly from an error
|
||||||
func TestChangeHandlerSubmitErrorRecovery(t *testing.T) {
|
func TestChangeHandlerSubmitErrorRecovery(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -616,6 +646,9 @@ func TestChangeHandlerSubmitErrorRecovery(t *testing.T) {
|
|||||||
// TestChangeHandlerProveExpiry verifies that the prove handler
|
// TestChangeHandlerProveExpiry verifies that the prove handler
|
||||||
// behaves correctly on expiry
|
// behaves correctly on expiry
|
||||||
func TestChangeHandlerProveExpiry(t *testing.T) {
|
func TestChangeHandlerProveExpiry(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -654,6 +687,9 @@ func TestChangeHandlerProveExpiry(t *testing.T) {
|
|||||||
// TestChangeHandlerSubmitExpiry verifies that the submit handler
|
// TestChangeHandlerSubmitExpiry verifies that the submit handler
|
||||||
// behaves correctly on expiry
|
// behaves correctly on expiry
|
||||||
func TestChangeHandlerSubmitExpiry(t *testing.T) {
|
func TestChangeHandlerSubmitExpiry(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -717,6 +753,9 @@ func TestChangeHandlerSubmitExpiry(t *testing.T) {
|
|||||||
// TestChangeHandlerProveRevert verifies that the prove handler
|
// TestChangeHandlerProveRevert verifies that the prove handler
|
||||||
// behaves correctly on revert
|
// behaves correctly on revert
|
||||||
func TestChangeHandlerProveRevert(t *testing.T) {
|
func TestChangeHandlerProveRevert(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -753,6 +792,10 @@ func TestChangeHandlerProveRevert(t *testing.T) {
|
|||||||
// TestChangeHandlerSubmittingRevert verifies that the submit handler
|
// TestChangeHandlerSubmittingRevert verifies that the submit handler
|
||||||
// behaves correctly when there's a revert from the submitting state
|
// behaves correctly when there's a revert from the submitting state
|
||||||
func TestChangeHandlerSubmittingRevert(t *testing.T) {
|
func TestChangeHandlerSubmittingRevert(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -824,6 +867,10 @@ func TestChangeHandlerSubmittingRevert(t *testing.T) {
|
|||||||
// TestChangeHandlerSubmitCompleteRevert verifies that the submit handler
|
// TestChangeHandlerSubmitCompleteRevert verifies that the submit handler
|
||||||
// behaves correctly when there's a revert from the submit complete state
|
// behaves correctly when there's a revert from the submit complete state
|
||||||
func TestChangeHandlerSubmitCompleteRevert(t *testing.T) {
|
func TestChangeHandlerSubmitCompleteRevert(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -885,6 +932,10 @@ func TestChangeHandlerSubmitCompleteRevert(t *testing.T) {
|
|||||||
// TestChangeHandlerSubmitRevertTwoEpochs verifies that the submit handler
|
// TestChangeHandlerSubmitRevertTwoEpochs verifies that the submit handler
|
||||||
// behaves correctly when the revert is two epochs deep
|
// behaves correctly when the revert is two epochs deep
|
||||||
func TestChangeHandlerSubmitRevertTwoEpochs(t *testing.T) {
|
func TestChangeHandlerSubmitRevertTwoEpochs(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
@ -986,6 +1037,10 @@ func TestChangeHandlerSubmitRevertTwoEpochs(t *testing.T) {
|
|||||||
// behaves correctly when the revert is two epochs deep and the advance is
|
// behaves correctly when the revert is two epochs deep and the advance is
|
||||||
// to a lower height than before
|
// to a lower height than before
|
||||||
func TestChangeHandlerSubmitRevertAdvanceLess(t *testing.T) {
|
func TestChangeHandlerSubmitRevertAdvanceLess(t *testing.T) {
|
||||||
|
//stm: @WDPOST_CHANGE_HANDLER_START_001, @WDPOST_CHANGE_HANDLER_UPDATE_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_001, @WDPOST_SUBMIT_HANDLER_PROCESS_HEAD_CHANGE_PW_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_004, @WDPOST_SUBMIT_HANDLER_SUBMIT_IF_READY_002, @WDPOST_PROVE_HANDLER_PROCESS_POST_RESULT_001
|
||||||
|
//stm: @WDPOST_SUBMIT_HANDLER_PROCESS_PROCESS_RESULTS_001
|
||||||
s := makeScaffolding(t)
|
s := makeScaffolding(t)
|
||||||
mock := s.mock
|
mock := s.mock
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package storage
|
package storage
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -22,6 +23,7 @@ func TestNextDeadline(t *testing.T) {
|
|||||||
require.EqualValues(t, 60, di.Close)
|
require.EqualValues(t, 60, di.Close)
|
||||||
|
|
||||||
for i := 1; i < 1+int(miner.WPoStPeriodDeadlines)*2; i++ {
|
for i := 1; i < 1+int(miner.WPoStPeriodDeadlines)*2; i++ {
|
||||||
|
//stm: @WDPOST_NEXT_DEADLINE_001
|
||||||
di = nextDeadline(di)
|
di = nextDeadline(di)
|
||||||
deadlineIdx = i % int(miner.WPoStPeriodDeadlines)
|
deadlineIdx = i % int(miner.WPoStPeriodDeadlines)
|
||||||
expPeriodStart := int(miner.WPoStProvingPeriod) * (i / int(miner.WPoStPeriodDeadlines))
|
expPeriodStart := int(miner.WPoStProvingPeriod) * (i / int(miner.WPoStPeriodDeadlines))
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package storage
|
package storage
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -176,6 +177,10 @@ func (m mockFaultTracker) CheckProvable(ctx context.Context, pp abi.RegisteredPo
|
|||||||
// TestWDPostDoPost verifies that doPost will send the correct number of window
|
// TestWDPostDoPost verifies that doPost will send the correct number of window
|
||||||
// PoST messages for a given number of partitions
|
// PoST messages for a given number of partitions
|
||||||
func TestWDPostDoPost(t *testing.T) {
|
func TestWDPostDoPost(t *testing.T) {
|
||||||
|
//stm: @CHAIN_SYNCER_LOAD_GENESIS_001, @CHAIN_SYNCER_FETCH_TIPSET_001,
|
||||||
|
//stm: @CHAIN_SYNCER_START_001, @CHAIN_SYNCER_SYNC_001, @BLOCKCHAIN_BEACON_VALIDATE_BLOCK_VALUES_01
|
||||||
|
//stm: @CHAIN_SYNCER_COLLECT_CHAIN_001, @CHAIN_SYNCER_COLLECT_HEADERS_001, @CHAIN_SYNCER_VALIDATE_TIPSET_001
|
||||||
|
//stm: @CHAIN_SYNCER_NEW_PEER_HEAD_001, @CHAIN_SYNCER_VALIDATE_MESSAGE_META_001, @CHAIN_SYNCER_STOP_001
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
expectedMsgCount := 5
|
expectedMsgCount := 5
|
||||||
|
|
||||||
@ -190,6 +195,7 @@ func TestWDPostDoPost(t *testing.T) {
|
|||||||
// Work out the number of partitions that can be included in a message
|
// Work out the number of partitions that can be included in a message
|
||||||
// without exceeding the message sector limit
|
// without exceeding the message sector limit
|
||||||
|
|
||||||
|
//stm: @BLOCKCHAIN_POLICY_GET_MAX_POST_PARTITIONS_001
|
||||||
partitionsPerMsg, err := policy.GetMaxPoStPartitions(network.Version13, proofType)
|
partitionsPerMsg, err := policy.GetMaxPoStPartitions(network.Version13, proofType)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
if partitionsPerMsg > miner5.AddressedPartitionsMax {
|
if partitionsPerMsg > miner5.AddressedPartitionsMax {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//stm: #unit
|
||||||
package headbuffer
|
package headbuffer
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -8,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestHeadBuffer(t *testing.T) {
|
func TestHeadBuffer(t *testing.T) {
|
||||||
|
//stm: @TOOLS_HEAD_BUFFER_PUSH_001, @TOOLS_HEAD_BUFFER_POP_001
|
||||||
t.Run("Straight Push through", func(t *testing.T) {
|
t.Run("Straight Push through", func(t *testing.T) {
|
||||||
hb := NewHeadChangeStackBuffer(5)
|
hb := NewHeadChangeStackBuffer(5)
|
||||||
require.Nil(t, hb.Push(&api.HeadChange{Type: "1"}))
|
require.Nil(t, hb.Push(&api.HeadChange{Type: "1"}))
|
||||||
|
Loading…
Reference in New Issue
Block a user