2022-06-14 18:03:38 +00:00
|
|
|
package sealer
|
2021-05-19 20:36:13 +00:00
|
|
|
|
|
|
|
import (
|
2021-05-21 05:30:17 +00:00
|
|
|
"bytes"
|
2021-05-19 20:36:13 +00:00
|
|
|
"context"
|
2023-03-29 19:24:07 +00:00
|
|
|
"io"
|
2023-08-21 08:02:06 +00:00
|
|
|
"math/rand"
|
2021-05-21 05:30:17 +00:00
|
|
|
"net"
|
|
|
|
"net/http"
|
2021-11-29 14:14:57 +00:00
|
|
|
"os"
|
2021-05-19 20:36:13 +00:00
|
|
|
"testing"
|
|
|
|
|
2021-05-21 05:30:17 +00:00
|
|
|
"github.com/gorilla/mux"
|
|
|
|
"github.com/ipfs/go-cid"
|
2021-05-19 20:36:13 +00:00
|
|
|
"github.com/ipfs/go-datastore"
|
|
|
|
"github.com/ipfs/go-datastore/namespace"
|
|
|
|
ds_sync "github.com/ipfs/go-datastore/sync"
|
2021-05-21 05:30:17 +00:00
|
|
|
logging "github.com/ipfs/go-log/v2"
|
2021-05-19 20:36:13 +00:00
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
|
2022-06-14 15:00:51 +00:00
|
|
|
"github.com/filecoin-project/go-state-types/abi"
|
|
|
|
"github.com/filecoin-project/go-statestore"
|
|
|
|
|
2022-11-06 16:38:25 +00:00
|
|
|
"github.com/filecoin-project/lotus/node/config"
|
2022-06-14 18:25:52 +00:00
|
|
|
"github.com/filecoin-project/lotus/storage/paths"
|
2022-06-14 18:03:38 +00:00
|
|
|
"github.com/filecoin-project/lotus/storage/sealer/sealtasks"
|
2022-06-15 10:06:22 +00:00
|
|
|
"github.com/filecoin-project/lotus/storage/sealer/storiface"
|
2021-05-19 20:36:13 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// TestPieceProviderReadPiece verifies that the ReadPiece method works correctly
|
2021-05-21 05:30:17 +00:00
|
|
|
// only uses miner and does NOT use any remote worker.
|
|
|
|
func TestPieceProviderSimpleNoRemoteWorker(t *testing.T) {
|
2021-05-21 10:45:08 +00:00
|
|
|
// Set up sector storage manager
|
2022-11-06 16:38:25 +00:00
|
|
|
sealerCfg := config.SealerConfig{
|
2021-05-21 10:45:08 +00:00
|
|
|
ParallelFetchLimit: 10,
|
|
|
|
AllowAddPiece: true,
|
|
|
|
AllowPreCommit1: true,
|
|
|
|
AllowPreCommit2: true,
|
|
|
|
AllowCommit: true,
|
|
|
|
AllowUnseal: true,
|
2021-05-19 20:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-05-21 10:45:08 +00:00
|
|
|
ppt := newPieceProviderTestHarness(t, sealerCfg, abi.RegisteredSealProof_StackedDrg8MiBV1)
|
|
|
|
defer ppt.shutdown(t)
|
|
|
|
|
|
|
|
// Create some padded data that aligns with the piece boundaries.
|
|
|
|
pieceData := generatePieceData(8 * 127 * 1024 * 8)
|
|
|
|
size := abi.UnpaddedPieceSize(len(pieceData))
|
|
|
|
ppt.addPiece(t, pieceData)
|
|
|
|
|
|
|
|
// read piece
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), size,
|
|
|
|
false, pieceData)
|
2021-05-21 05:30:17 +00:00
|
|
|
|
2021-05-21 10:45:08 +00:00
|
|
|
// pre-commit 1
|
|
|
|
preCommit1 := ppt.preCommit1(t)
|
|
|
|
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> true
|
2021-05-24 09:52:11 +00:00
|
|
|
require.True(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), size))
|
2021-05-21 10:45:08 +00:00
|
|
|
// read piece
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), size,
|
|
|
|
false, pieceData)
|
|
|
|
|
|
|
|
// pre-commit 2
|
|
|
|
ppt.preCommit2(t, preCommit1)
|
|
|
|
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> true
|
2021-05-24 09:52:11 +00:00
|
|
|
require.True(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), size))
|
2021-05-21 10:45:08 +00:00
|
|
|
// read piece
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), size,
|
|
|
|
false, pieceData)
|
|
|
|
|
|
|
|
// finalize -> nil here will remove unsealed file
|
|
|
|
ppt.finalizeSector(t, nil)
|
|
|
|
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> false
|
2021-05-24 09:52:11 +00:00
|
|
|
require.False(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), size))
|
2021-05-21 10:45:08 +00:00
|
|
|
// Read the piece -> will have to unseal
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), size,
|
|
|
|
true, pieceData)
|
|
|
|
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> true
|
2021-05-24 09:52:11 +00:00
|
|
|
require.True(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), size))
|
2021-05-21 10:45:08 +00:00
|
|
|
// read the piece -> will not have to unseal
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), size,
|
|
|
|
false, pieceData)
|
|
|
|
|
|
|
|
}
|
2021-05-21 05:30:17 +00:00
|
|
|
func TestReadPieceRemoteWorkers(t *testing.T) {
|
|
|
|
logging.SetAllLoggers(logging.LevelDebug)
|
|
|
|
|
|
|
|
// miner's worker can only add pieces to an unsealed sector.
|
2022-11-06 16:38:25 +00:00
|
|
|
sealerCfg := config.SealerConfig{
|
2021-05-21 05:30:17 +00:00
|
|
|
ParallelFetchLimit: 10,
|
|
|
|
AllowAddPiece: true,
|
|
|
|
AllowPreCommit1: false,
|
|
|
|
AllowPreCommit2: false,
|
|
|
|
AllowCommit: false,
|
|
|
|
AllowUnseal: false,
|
|
|
|
}
|
|
|
|
|
|
|
|
// test harness for an 8M sector.
|
|
|
|
ppt := newPieceProviderTestHarness(t, sealerCfg, abi.RegisteredSealProof_StackedDrg8MiBV1)
|
|
|
|
defer ppt.shutdown(t)
|
|
|
|
|
|
|
|
// worker 2 will ONLY help with the sealing by first fetching
|
|
|
|
// the unsealed file from the miner.
|
|
|
|
ppt.addRemoteWorker(t, []sealtasks.TaskType{
|
|
|
|
sealtasks.TTPreCommit1, sealtasks.TTPreCommit2, sealtasks.TTCommit1,
|
2022-11-23 17:54:58 +00:00
|
|
|
sealtasks.TTFetch, sealtasks.TTFinalize, sealtasks.TTFinalizeUnsealed,
|
2021-05-21 05:30:17 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
// create a worker that can ONLY unseal and fetch
|
|
|
|
ppt.addRemoteWorker(t, []sealtasks.TaskType{
|
|
|
|
sealtasks.TTUnseal, sealtasks.TTFetch,
|
|
|
|
})
|
|
|
|
|
|
|
|
// run the test
|
|
|
|
|
|
|
|
// add one piece that aligns with the padding/piece boundaries.
|
|
|
|
pd1 := generatePieceData(8 * 127 * 4 * 1024)
|
|
|
|
pi1 := ppt.addPiece(t, pd1)
|
|
|
|
pd1size := pi1.Size.Unpadded()
|
|
|
|
|
|
|
|
pd2 := generatePieceData(8 * 127 * 4 * 1024)
|
|
|
|
pi2 := ppt.addPiece(t, pd2)
|
|
|
|
pd2size := pi2.Size.Unpadded()
|
|
|
|
|
|
|
|
// pre-commit 1
|
|
|
|
pC1 := ppt.preCommit1(t)
|
2021-05-22 17:10:21 +00:00
|
|
|
|
|
|
|
// check if IsUnsealed -> true
|
2021-05-24 09:52:11 +00:00
|
|
|
require.True(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), pd1size))
|
2021-05-21 10:45:08 +00:00
|
|
|
// Read the piece -> no need to unseal
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), pd1size,
|
|
|
|
false, pd1)
|
2021-05-21 05:30:17 +00:00
|
|
|
|
|
|
|
// pre-commit 2
|
|
|
|
ppt.preCommit2(t, pC1)
|
2021-05-22 17:10:21 +00:00
|
|
|
|
|
|
|
// check if IsUnsealed -> true
|
2021-05-24 09:52:11 +00:00
|
|
|
require.True(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), pd1size))
|
2021-05-21 10:45:08 +00:00
|
|
|
// Read the piece -> no need to unseal
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), pd1size,
|
|
|
|
false, pd1)
|
2021-05-21 05:30:17 +00:00
|
|
|
|
|
|
|
// finalize the sector so we declare to the index we have the sealed file
|
|
|
|
// so the unsealing worker can later look it up and fetch it if needed
|
|
|
|
// sending nil here will remove all unsealed files after sector is finalized.
|
|
|
|
ppt.finalizeSector(t, nil)
|
|
|
|
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> false
|
2021-05-24 09:52:11 +00:00
|
|
|
require.False(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), pd1size))
|
2021-05-21 05:30:17 +00:00
|
|
|
// Read the piece -> have to unseal since we removed the file.
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), pd1size,
|
|
|
|
true, pd1)
|
|
|
|
|
|
|
|
// Read the same piece again -> will NOT have to unseal.
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), pd1size, false, pd1)
|
|
|
|
|
|
|
|
// remove the unsealed file and read again -> will have to unseal.
|
|
|
|
ppt.removeAllUnsealedSectorFiles(t)
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> false
|
2021-05-24 09:52:11 +00:00
|
|
|
require.False(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(0), pd1size))
|
2021-05-21 05:30:17 +00:00
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(0), pd1size,
|
|
|
|
true, pd1)
|
|
|
|
|
2021-05-22 17:10:21 +00:00
|
|
|
// check if IsUnsealed -> true
|
2021-05-24 09:52:11 +00:00
|
|
|
require.True(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(pd1size), pd2size))
|
2021-05-21 05:30:17 +00:00
|
|
|
// Read Piece 2 -> no unsealing as it got unsealed above.
|
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(pd1size), pd2size, false, pd2)
|
|
|
|
|
|
|
|
// remove all unseal files -> Read Piece 2 -> will have to Unseal.
|
|
|
|
ppt.removeAllUnsealedSectorFiles(t)
|
2021-05-22 17:10:21 +00:00
|
|
|
|
|
|
|
// check if IsUnsealed -> false
|
2021-05-24 09:52:11 +00:00
|
|
|
require.False(t, ppt.isUnsealed(t, storiface.UnpaddedByteIndex(pd1size), pd2size))
|
2021-05-21 05:30:17 +00:00
|
|
|
ppt.readPiece(t, storiface.UnpaddedByteIndex(pd1size), pd2size, true, pd2)
|
|
|
|
}
|
|
|
|
|
|
|
|
type pieceProviderTestHarness struct {
|
|
|
|
ctx context.Context
|
2022-06-14 18:25:52 +00:00
|
|
|
index *paths.Index
|
2021-05-21 05:30:17 +00:00
|
|
|
pp PieceProvider
|
2022-06-17 11:31:05 +00:00
|
|
|
sector storiface.SectorRef
|
2021-05-21 05:30:17 +00:00
|
|
|
mgr *Manager
|
|
|
|
ticket abi.SealRandomness
|
|
|
|
commD cid.Cid
|
2022-06-14 18:25:52 +00:00
|
|
|
localStores []*paths.Local
|
2021-05-21 05:30:17 +00:00
|
|
|
|
|
|
|
servers []*http.Server
|
|
|
|
|
|
|
|
addedPieces []abi.PieceInfo
|
|
|
|
}
|
|
|
|
|
|
|
|
func generatePieceData(size uint64) []byte {
|
|
|
|
bz := make([]byte, size)
|
2023-08-21 08:02:06 +00:00
|
|
|
rand.Read(bz)
|
2021-05-21 05:30:17 +00:00
|
|
|
return bz
|
|
|
|
}
|
|
|
|
|
2022-11-06 16:38:25 +00:00
|
|
|
func newPieceProviderTestHarness(t *testing.T, mgrConfig config.SealerConfig, sectorProofType abi.RegisteredSealProof) *pieceProviderTestHarness {
|
2021-05-21 05:30:17 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
// listen on tcp socket to create an http server later
|
|
|
|
address := "0.0.0.0:0"
|
|
|
|
nl, err := net.Listen("tcp", address)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// create index, storage, local store & remote store.
|
2022-07-12 11:55:18 +00:00
|
|
|
index := paths.NewIndex(nil)
|
2021-05-21 05:30:17 +00:00
|
|
|
storage := newTestStorage(t)
|
2022-06-14 18:25:52 +00:00
|
|
|
localStore, err := paths.NewLocal(ctx, storage, index, []string{"http://" + nl.Addr().String() + "/remote"})
|
2021-05-21 05:30:17 +00:00
|
|
|
require.NoError(t, err)
|
2022-06-14 18:25:52 +00:00
|
|
|
remoteStore := paths.NewRemote(localStore, index, nil, 6000, &paths.DefaultPartialFileHandler{})
|
2021-05-21 05:30:17 +00:00
|
|
|
|
|
|
|
// data stores for state tracking.
|
|
|
|
dstore := ds_sync.MutexWrap(datastore.NewMapDatastore())
|
|
|
|
wsts := statestore.New(namespace.Wrap(dstore, datastore.NewKey("/worker/calls")))
|
|
|
|
smsts := statestore.New(namespace.Wrap(dstore, datastore.NewKey("/stmgr/calls")))
|
|
|
|
|
2022-11-06 16:38:25 +00:00
|
|
|
mgr, err := New(ctx, localStore, remoteStore, storage, index, mgrConfig, config.ProvingConfig{}, wsts, smsts)
|
2021-05-21 05:30:17 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// start a http server on the manager to serve sector file requests.
|
|
|
|
svc := &http.Server{
|
|
|
|
Addr: nl.Addr().String(),
|
|
|
|
Handler: mgr,
|
|
|
|
}
|
|
|
|
go func() {
|
|
|
|
_ = svc.Serve(nl)
|
|
|
|
}()
|
|
|
|
|
|
|
|
pp := NewPieceProvider(remoteStore, index, mgr)
|
|
|
|
|
2022-06-17 11:31:05 +00:00
|
|
|
sector := storiface.SectorRef{
|
2021-05-21 05:30:17 +00:00
|
|
|
ID: abi.SectorID{
|
|
|
|
Miner: 100,
|
|
|
|
Number: 10,
|
|
|
|
},
|
|
|
|
ProofType: sectorProofType,
|
|
|
|
}
|
|
|
|
|
|
|
|
ticket := abi.SealRandomness{9, 9, 9, 9, 9, 9, 9, 9}
|
|
|
|
|
|
|
|
ppt := &pieceProviderTestHarness{
|
|
|
|
ctx: ctx,
|
|
|
|
index: index,
|
|
|
|
pp: pp,
|
|
|
|
sector: sector,
|
|
|
|
mgr: mgr,
|
|
|
|
ticket: ticket,
|
|
|
|
}
|
|
|
|
ppt.servers = append(ppt.servers, svc)
|
|
|
|
ppt.localStores = append(ppt.localStores, localStore)
|
|
|
|
return ppt
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *pieceProviderTestHarness) addRemoteWorker(t *testing.T, tasks []sealtasks.TaskType) {
|
|
|
|
// start an http Server
|
|
|
|
address := "0.0.0.0:0"
|
|
|
|
nl, err := net.Listen("tcp", address)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2022-06-14 18:25:52 +00:00
|
|
|
localStore, err := paths.NewLocal(p.ctx, newTestStorage(t), p.index, []string{"http://" + nl.Addr().String() + "/remote"})
|
2021-05-21 05:30:17 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2022-06-14 18:25:52 +00:00
|
|
|
fh := &paths.FetchHandler{
|
2021-05-21 05:30:17 +00:00
|
|
|
Local: localStore,
|
2022-06-14 18:25:52 +00:00
|
|
|
PfHandler: &paths.DefaultPartialFileHandler{},
|
2021-05-21 05:30:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
mux := mux.NewRouter()
|
|
|
|
mux.PathPrefix("/remote").HandlerFunc(fh.ServeHTTP)
|
|
|
|
svc := &http.Server{
|
|
|
|
Addr: nl.Addr().String(),
|
|
|
|
Handler: mux,
|
|
|
|
}
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
_ = svc.Serve(nl)
|
|
|
|
}()
|
|
|
|
|
2022-06-14 18:25:52 +00:00
|
|
|
remote := paths.NewRemote(localStore, p.index, nil, 1000,
|
|
|
|
&paths.DefaultPartialFileHandler{})
|
2021-05-21 05:30:17 +00:00
|
|
|
|
|
|
|
dstore := ds_sync.MutexWrap(datastore.NewMapDatastore())
|
|
|
|
csts := statestore.New(namespace.Wrap(dstore, datastore.NewKey("/stmgr/calls")))
|
|
|
|
|
|
|
|
worker := newLocalWorker(nil, WorkerConfig{
|
|
|
|
TaskTypes: tasks,
|
2021-11-29 14:14:57 +00:00
|
|
|
}, os.LookupEnv, remote, localStore, p.index, p.mgr, csts)
|
2021-05-21 05:30:17 +00:00
|
|
|
|
|
|
|
p.servers = append(p.servers, svc)
|
|
|
|
p.localStores = append(p.localStores, localStore)
|
|
|
|
|
|
|
|
// register self with manager
|
|
|
|
require.NoError(t, p.mgr.AddWorker(p.ctx, worker))
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *pieceProviderTestHarness) removeAllUnsealedSectorFiles(t *testing.T) {
|
|
|
|
for i := range p.localStores {
|
|
|
|
ls := p.localStores[i]
|
2021-10-11 19:05:05 +00:00
|
|
|
require.NoError(t, ls.Remove(p.ctx, p.sector.ID, storiface.FTUnsealed, false, nil))
|
2021-05-21 05:30:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *pieceProviderTestHarness) addPiece(t *testing.T, pieceData []byte) abi.PieceInfo {
|
|
|
|
var existing []abi.UnpaddedPieceSize
|
|
|
|
for _, pi := range p.addedPieces {
|
|
|
|
existing = append(existing, pi.Size.Unpadded())
|
|
|
|
}
|
|
|
|
|
|
|
|
size := abi.UnpaddedPieceSize(len(pieceData))
|
|
|
|
pieceInfo, err := p.mgr.AddPiece(p.ctx, p.sector, existing, size, bytes.NewReader(pieceData))
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
p.addedPieces = append(p.addedPieces, pieceInfo)
|
|
|
|
return pieceInfo
|
|
|
|
}
|
|
|
|
|
2022-06-17 11:31:05 +00:00
|
|
|
func (p *pieceProviderTestHarness) preCommit1(t *testing.T) storiface.PreCommit1Out {
|
2021-05-21 05:30:17 +00:00
|
|
|
preCommit1, err := p.mgr.SealPreCommit1(p.ctx, p.sector, p.ticket, p.addedPieces)
|
|
|
|
require.NoError(t, err)
|
|
|
|
return preCommit1
|
|
|
|
}
|
|
|
|
|
2022-06-17 11:31:05 +00:00
|
|
|
func (p *pieceProviderTestHarness) preCommit2(t *testing.T, pc1 storiface.PreCommit1Out) {
|
2021-05-21 05:30:17 +00:00
|
|
|
sectorCids, err := p.mgr.SealPreCommit2(p.ctx, p.sector, pc1)
|
|
|
|
require.NoError(t, err)
|
|
|
|
commD := sectorCids.Unsealed
|
|
|
|
p.commD = commD
|
|
|
|
}
|
|
|
|
|
2021-05-24 09:52:11 +00:00
|
|
|
func (p *pieceProviderTestHarness) isUnsealed(t *testing.T, offset storiface.UnpaddedByteIndex, size abi.UnpaddedPieceSize) bool {
|
2021-05-22 17:10:21 +00:00
|
|
|
b, err := p.pp.IsUnsealed(p.ctx, p.sector, offset, size)
|
|
|
|
require.NoError(t, err)
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2021-05-21 05:30:17 +00:00
|
|
|
func (p *pieceProviderTestHarness) readPiece(t *testing.T, offset storiface.UnpaddedByteIndex, size abi.UnpaddedPieceSize,
|
|
|
|
expectedHadToUnseal bool, expectedBytes []byte) {
|
2021-12-08 22:16:27 +00:00
|
|
|
rd, isUnsealed, err := p.pp.ReadPiece(p.ctx, p.sector, offset, size, p.ticket, p.commD)
|
2021-05-21 05:30:17 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
require.NotNil(t, rd)
|
|
|
|
require.Equal(t, expectedHadToUnseal, isUnsealed)
|
|
|
|
defer func() { _ = rd.Close() }()
|
|
|
|
|
|
|
|
// Make sure the input matches the output
|
2023-03-29 19:24:07 +00:00
|
|
|
readData, err := io.ReadAll(rd)
|
2021-05-21 05:30:17 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, expectedBytes, readData)
|
|
|
|
}
|
|
|
|
|
2022-06-17 11:31:05 +00:00
|
|
|
func (p *pieceProviderTestHarness) finalizeSector(t *testing.T, keepUnseal []storiface.Range) {
|
2022-11-16 17:11:20 +00:00
|
|
|
require.NoError(t, p.mgr.ReleaseUnsealed(p.ctx, p.sector, keepUnseal))
|
|
|
|
require.NoError(t, p.mgr.FinalizeSector(p.ctx, p.sector))
|
2021-05-21 05:30:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *pieceProviderTestHarness) shutdown(t *testing.T) {
|
|
|
|
for _, svc := range p.servers {
|
|
|
|
s := svc
|
|
|
|
require.NoError(t, s.Shutdown(p.ctx))
|
|
|
|
}
|
|
|
|
}
|