Merge branch 'release/v1.20.0' into iand/issue-9849-config

This commit is contained in:
Ian Davis 2023-01-26 14:45:47 +00:00
commit e73dc3eb74
69 changed files with 2974 additions and 1742 deletions

View File

@ -328,56 +328,6 @@ jobs:
- run: ./scripts/generate-checksums.sh - run: ./scripts/generate-checksums.sh
- run: ./scripts/publish-checksums.sh - run: ./scripts/publish-checksums.sh
build-appimage:
machine:
image: ubuntu-2004:202111-02
steps:
- checkout
- attach_workspace:
at: /tmp/workspace
- run:
name: Update Go
command: |
sudo rm -rf /usr/local/go && \
curl -L https://golang.org/dl/go`cat GO_VERSION_MIN`.linux-amd64.tar.gz -o /tmp/go.tar.gz && \
sudo tar -C /usr/local -xvf /tmp/go.tar.gz
- run: go version
- run:
name: install appimage-builder
command: |
# appimage-builder requires /dev/snd to exist. It creates containers during the testing phase
# that pass sound devices from the host to the testing container. (hard coded!)
# https://github.com/AppImageCrafters/appimage-builder/blob/master/appimagebuilder/modules/test/execution_test.py#L54
# Circleci doesn't provide a working sound device; this is enough to fake it.
if [ ! -e /dev/snd ]
then
sudo mkdir /dev/snd
sudo mknod /dev/snd/ControlC0 c 1 2
fi
# docs: https://appimage-builder.readthedocs.io/en/latest/intro/install.html
sudo apt update
sudo apt install -y python3-pip python3-setuptools patchelf desktop-file-utils libgdk-pixbuf2.0-dev fakeroot strace
sudo curl -Lo /usr/local/bin/appimagetool https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage
sudo chmod +x /usr/local/bin/appimagetool
sudo pip3 install appimage-builder
- run:
name: install lotus dependencies
command: sudo apt install ocl-icd-opencl-dev libhwloc-dev
- run:
name: build appimage
command: |
sed -i "s/version: latest/version: ${CIRCLE_TAG:-latest}/" AppImageBuilder.yml
make appimage
- run: |
mkdir -p /tmp/workspace/appimage && \
mv Lotus-*.AppImage /tmp/workspace/appimage/
- persist_to_workspace:
root: /tmp/workspace
paths:
- appimage
gofmt: gofmt:
executor: golang executor: golang
steps: steps:
@ -444,72 +394,6 @@ jobs:
lint-all: lint-all:
<<: *lint <<: *lint
publish:
description: publish binary artifacts
executor: ubuntu
parameters:
linux:
default: false
description: publish linux binaries?
type: boolean
appimage:
default: false
description: publish appimage binaries?
type: boolean
steps:
- run:
name: Install git jq curl
command: apt update && apt install -y git jq curl sudo
- checkout
- git_fetch_all_tags
- checkout
- install_ipfs
- attach_workspace:
at: /tmp/workspace
- when:
condition: << parameters.linux >>
steps:
- run: ./scripts/build-arch-bundle.sh linux
- run: ./scripts/publish-arch-release.sh linux
- when:
condition: << parameters.appimage >>
steps:
- run: ./scripts/build-appimage-bundle.sh
- run: ./scripts/publish-arch-release.sh appimage
publish-snapcraft:
description: build and push snapcraft
machine:
image: ubuntu-2004:202104-01
resource_class: 2xlarge
parameters:
channel:
type: string
default: "edge"
description: snapcraft channel
snap-name:
type: string
default: 'lotus-filecoin'
description: name of snap in snap store
steps:
- checkout
- run:
name: Install snapcraft
command: sudo snap install snapcraft --classic
- run:
name: Build << parameters.snap-name >> snap
command: |
if [ "<< parameters.snap-name >>" != 'lotus-filecoin' ]; then
cat snap/snapcraft.yaml | sed 's/lotus-filecoin/lotus/' > edited-snapcraft.yaml
mv edited-snapcraft.yaml snap/snapcraft.yaml
fi
snapcraft --use-lxd --debug
- run:
name: Publish snap to << parameters.channel >> channel
shell: /bin/bash -o pipefail
command: |
snapcraft upload *.snap --release << parameters.channel >>
build-docker: build-docker:
description: > description: >
Publish to Dockerhub Publish to Dockerhub
@ -550,7 +434,7 @@ jobs:
equal: [ mainnet, <<parameters.network>> ] equal: [ mainnet, <<parameters.network>> ]
steps: steps:
- when: - when:
condition: <parameters.push>> condition: <<parameters.push>>
steps: steps:
- docker/build: - docker/build:
image: filecoin/<<parameters.image>> image: filecoin/<<parameters.image>>
@ -561,7 +445,7 @@ jobs:
command: | command: |
docker push filecoin/<<parameters.image>>:<<parameters.channel>> docker push filecoin/<<parameters.image>>:<<parameters.channel>>
if [[ ! -z $CIRCLE_SHA ]]; then if [[ ! -z $CIRCLE_SHA ]]; then
docker image tag filecoin/<<parameters.image>>:<<parameters.channel>>> filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}" docker image tag filecoin/<<parameters.image>>:<<parameters.channel>> filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}"
docker push filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}" docker push filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}"
fi fi
if [[ ! -z $CIRCLE_TAG ]]; then if [[ ! -z $CIRCLE_TAG ]]; then
@ -730,6 +614,11 @@ workflows:
suite: itest-eth_balance suite: itest-eth_balance
target: "./itests/eth_balance_test.go" target: "./itests/eth_balance_test.go"
- test:
name: test-itest-eth_block_hash
suite: itest-eth_block_hash
target: "./itests/eth_block_hash_test.go"
- test: - test:
name: test-itest-eth_deploy name: test-itest-eth_deploy
suite: itest-eth_deploy suite: itest-eth_deploy
@ -1063,71 +952,6 @@ workflows:
branches: branches:
only: only:
- /^release\/v\d+\.\d+\.\d+(-rc\d+)?$/ - /^release\/v\d+\.\d+\.\d+(-rc\d+)?$/
- build-appimage:
name: "Build AppImage"
filters:
branches:
only:
- /^release\/v\d+\.\d+\.\d+(-rc\d+)?$/
tags:
only:
- /^v\d+\.\d+\.\d+(-rc\d+)?$/
- publish:
name: "Publish AppImage"
appimage: true
requires:
- "Build AppImage"
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+(-rc\d+)?$/
- publish-snapcraft:
name: "Publish Snapcraft (lotus / stable)"
channel: stable
snap-name: lotus
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+$/
- publish-snapcraft:
name: "Publish Snapcraft (lotus / candidate)"
channel: candidate
snap-name: lotus
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+-rc\d+$/
- publish-snapcraft:
name: "Publish Snapcraft (lotus-filecoin / stable)"
channel: stable
snap-name: lotus-filecoin
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+$/
- publish-snapcraft:
name: "Publish Snapcraft (lotus-filecoin / candidate)"
channel: candidate
snap-name: lotus-filecoin
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+-rc\d+$/
- build-docker: - build-docker:
name: "Docker push (lotus-all-in-one / stable / mainnet)" name: "Docker push (lotus-all-in-one / stable / mainnet)"
image: lotus-all-in-one image: lotus-all-in-one
@ -1363,14 +1187,6 @@ workflows:
only: only:
- master - master
jobs: jobs:
- publish-snapcraft:
name: "Publish Snapcraft (lotus / edge)"
channel: edge
snap-name: lotus
- publish-snapcraft:
name: "Publish Snapcraft (lotus-filecoin / edge)"
channel: edge
snap-name: lotus-filecoin
- build-docker: - build-docker:
name: "Docker (lotus-all-in-one / nightly / mainnet)" name: "Docker (lotus-all-in-one / nightly / mainnet)"
image: lotus-all-in-one image: lotus-all-in-one

View File

@ -107,13 +107,11 @@ func main() {
// form the input data. // form the input data.
type data struct { type data struct {
Networks []string Networks []string
SnapNames []string
ItestFiles []string ItestFiles []string
UnitSuites map[string]string UnitSuites map[string]string
} }
in := data{ in := data{
Networks: []string{"mainnet", "butterflynet", "calibnet", "debug"}, Networks: []string{"mainnet", "butterflynet", "calibnet", "debug"},
SnapNames: []string{"lotus", "lotus-filecoin"},
ItestFiles: itests, ItestFiles: itests,
UnitSuites: func() map[string]string { UnitSuites: func() map[string]string {
ret := make(map[string]string) ret := make(map[string]string)

View File

@ -328,56 +328,6 @@ jobs:
- run: ./scripts/generate-checksums.sh - run: ./scripts/generate-checksums.sh
- run: ./scripts/publish-checksums.sh - run: ./scripts/publish-checksums.sh
build-appimage:
machine:
image: ubuntu-2004:202111-02
steps:
- checkout
- attach_workspace:
at: /tmp/workspace
- run:
name: Update Go
command: |
sudo rm -rf /usr/local/go && \
curl -L https://golang.org/dl/go`cat GO_VERSION_MIN`.linux-amd64.tar.gz -o /tmp/go.tar.gz && \
sudo tar -C /usr/local -xvf /tmp/go.tar.gz
- run: go version
- run:
name: install appimage-builder
command: |
# appimage-builder requires /dev/snd to exist. It creates containers during the testing phase
# that pass sound devices from the host to the testing container. (hard coded!)
# https://github.com/AppImageCrafters/appimage-builder/blob/master/appimagebuilder/modules/test/execution_test.py#L54
# Circleci doesn't provide a working sound device; this is enough to fake it.
if [ ! -e /dev/snd ]
then
sudo mkdir /dev/snd
sudo mknod /dev/snd/ControlC0 c 1 2
fi
# docs: https://appimage-builder.readthedocs.io/en/latest/intro/install.html
sudo apt update
sudo apt install -y python3-pip python3-setuptools patchelf desktop-file-utils libgdk-pixbuf2.0-dev fakeroot strace
sudo curl -Lo /usr/local/bin/appimagetool https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage
sudo chmod +x /usr/local/bin/appimagetool
sudo pip3 install appimage-builder
- run:
name: install lotus dependencies
command: sudo apt install ocl-icd-opencl-dev libhwloc-dev
- run:
name: build appimage
command: |
sed -i "s/version: latest/version: ${CIRCLE_TAG:-latest}/" AppImageBuilder.yml
make appimage
- run: |
mkdir -p /tmp/workspace/appimage && \
mv Lotus-*.AppImage /tmp/workspace/appimage/
- persist_to_workspace:
root: /tmp/workspace
paths:
- appimage
gofmt: gofmt:
executor: golang executor: golang
steps: steps:
@ -444,72 +394,6 @@ jobs:
lint-all: lint-all:
<<: *lint <<: *lint
publish:
description: publish binary artifacts
executor: ubuntu
parameters:
linux:
default: false
description: publish linux binaries?
type: boolean
appimage:
default: false
description: publish appimage binaries?
type: boolean
steps:
- run:
name: Install git jq curl
command: apt update && apt install -y git jq curl sudo
- checkout
- git_fetch_all_tags
- checkout
- install_ipfs
- attach_workspace:
at: /tmp/workspace
- when:
condition: << parameters.linux >>
steps:
- run: ./scripts/build-arch-bundle.sh linux
- run: ./scripts/publish-arch-release.sh linux
- when:
condition: << parameters.appimage >>
steps:
- run: ./scripts/build-appimage-bundle.sh
- run: ./scripts/publish-arch-release.sh appimage
publish-snapcraft:
description: build and push snapcraft
machine:
image: ubuntu-2004:202104-01
resource_class: 2xlarge
parameters:
channel:
type: string
default: "edge"
description: snapcraft channel
snap-name:
type: string
default: 'lotus-filecoin'
description: name of snap in snap store
steps:
- checkout
- run:
name: Install snapcraft
command: sudo snap install snapcraft --classic
- run:
name: Build << parameters.snap-name >> snap
command: |
if [ "<< parameters.snap-name >>" != 'lotus-filecoin' ]; then
cat snap/snapcraft.yaml | sed 's/lotus-filecoin/lotus/' > edited-snapcraft.yaml
mv edited-snapcraft.yaml snap/snapcraft.yaml
fi
snapcraft --use-lxd --debug
- run:
name: Publish snap to << parameters.channel >> channel
shell: /bin/bash -o pipefail
command: |
snapcraft upload *.snap --release << parameters.channel >>
build-docker: build-docker:
description: > description: >
Publish to Dockerhub Publish to Dockerhub
@ -550,7 +434,7 @@ jobs:
equal: [ mainnet, <<parameters.network>> ] equal: [ mainnet, <<parameters.network>> ]
steps: steps:
- when: - when:
condition: <parameters.push>> condition: <<parameters.push>>
steps: steps:
- docker/build: - docker/build:
image: filecoin/<<parameters.image>> image: filecoin/<<parameters.image>>
@ -561,7 +445,7 @@ jobs:
command: | command: |
docker push filecoin/<<parameters.image>>:<<parameters.channel>> docker push filecoin/<<parameters.image>>:<<parameters.channel>>
if [["[[ ! -z $CIRCLE_SHA ]]"]]; then if [["[[ ! -z $CIRCLE_SHA ]]"]]; then
docker image tag filecoin/<<parameters.image>>:<<parameters.channel>>> filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}" docker image tag filecoin/<<parameters.image>>:<<parameters.channel>> filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}"
docker push filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}" docker push filecoin/<<parameters.image>>:"${CIRCLE_SHA:0:7}"
fi fi
if [["[[ ! -z $CIRCLE_TAG ]]"]]; then if [["[[ ! -z $CIRCLE_TAG ]]"]]; then
@ -698,51 +582,6 @@ workflows:
branches: branches:
only: only:
- /^release\/v\d+\.\d+\.\d+(-rc\d+)?$/ - /^release\/v\d+\.\d+\.\d+(-rc\d+)?$/
- build-appimage:
name: "Build AppImage"
filters:
branches:
only:
- /^release\/v\d+\.\d+\.\d+(-rc\d+)?$/
tags:
only:
- /^v\d+\.\d+\.\d+(-rc\d+)?$/
- publish:
name: "Publish AppImage"
appimage: true
requires:
- "Build AppImage"
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+(-rc\d+)?$/
[[- range .SnapNames]]
- publish-snapcraft:
name: "Publish Snapcraft ([[.]] / stable)"
channel: stable
snap-name: [[.]]
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+$/
- publish-snapcraft:
name: "Publish Snapcraft ([[.]] / candidate)"
channel: candidate
snap-name: [[.]]
filters:
branches:
ignore:
- /.*/
tags:
only:
- /^v\d+\.\d+\.\d+-rc\d+$/
[[- end]]
[[- range .Networks]] [[- range .Networks]]
- build-docker: - build-docker:
name: "Docker push (lotus-all-in-one / stable / [[.]])" name: "Docker push (lotus-all-in-one / stable / [[.]])"
@ -845,12 +684,6 @@ workflows:
only: only:
- master - master
jobs: jobs:
[[- range .SnapNames]]
- publish-snapcraft:
name: "Publish Snapcraft ([[.]] / edge)"
channel: edge
snap-name: [[.]]
[[- end]]
[[- range .Networks]] [[- range .Networks]]
- build-docker: - build-docker:
name: "Docker (lotus-all-in-one / nightly / [[.]])" name: "Docker (lotus-all-in-one / nightly / [[.]])"

View File

@ -84,12 +84,6 @@ butterflynet: build-devnets
interopnet: GOFLAGS+=-tags=interopnet interopnet: GOFLAGS+=-tags=interopnet
interopnet: build-devnets interopnet: build-devnets
wallabynet: GOFLAGS+=-tags=wallabynet
wallabynet: build-devnets
hyperspacenet: GOFLAGS+=-tags=hyperspacenet
hyperspacenet: build-devnets
lotus: $(BUILD_DEPS) lotus: $(BUILD_DEPS)
rm -f lotus rm -f lotus
$(GOCC) build $(GOFLAGS) -o lotus ./cmd/lotus $(GOCC) build $(GOFLAGS) -o lotus ./cmd/lotus

View File

@ -836,6 +836,9 @@ type FullNode interface {
// Unsubscribe from a websocket subscription // Unsubscribe from a websocket subscription
EthUnsubscribe(ctx context.Context, id ethtypes.EthSubscriptionID) (bool, error) //perm:write EthUnsubscribe(ctx context.Context, id ethtypes.EthSubscriptionID) (bool, error) //perm:write
// Returns the client version
Web3ClientVersion(ctx context.Context) (string, error) //perm:read
// CreateBackup creates node backup onder the specified file name. The // CreateBackup creates node backup onder the specified file name. The
// method requires that the lotus daemon is running with the // method requires that the lotus daemon is running with the
// LOTUS_BACKUP_BASE_PATH environment variable set to some path, and that // LOTUS_BACKUP_BASE_PATH environment variable set to some path, and that

View File

@ -41,4 +41,6 @@ func CreateEthRPCAliases(as apitypes.Aliaser) {
as.AliasMethod("net_version", "Filecoin.NetVersion") as.AliasMethod("net_version", "Filecoin.NetVersion")
as.AliasMethod("net_listening", "Filecoin.NetListening") as.AliasMethod("net_listening", "Filecoin.NetListening")
as.AliasMethod("web3_clientVersion", "Filecoin.Web3ClientVersion")
} }

View File

@ -4096,3 +4096,18 @@ func (mr *MockFullNodeMockRecorder) WalletVerify(arg0, arg1, arg2, arg3 interfac
mr.mock.ctrl.T.Helper() mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WalletVerify", reflect.TypeOf((*MockFullNode)(nil).WalletVerify), arg0, arg1, arg2, arg3) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WalletVerify", reflect.TypeOf((*MockFullNode)(nil).WalletVerify), arg0, arg1, arg2, arg3)
} }
// Web3ClientVersion mocks base method.
func (m *MockFullNode) Web3ClientVersion(arg0 context.Context) (string, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Web3ClientVersion", arg0)
ret0, _ := ret[0].(string)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// Web3ClientVersion indicates an expected call of Web3ClientVersion.
func (mr *MockFullNodeMockRecorder) Web3ClientVersion(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Web3ClientVersion", reflect.TypeOf((*MockFullNode)(nil).Web3ClientVersion), arg0)
}

View File

@ -582,6 +582,8 @@ type FullNodeStruct struct {
WalletValidateAddress func(p0 context.Context, p1 string) (address.Address, error) `perm:"read"` WalletValidateAddress func(p0 context.Context, p1 string) (address.Address, error) `perm:"read"`
WalletVerify func(p0 context.Context, p1 address.Address, p2 []byte, p3 *crypto.Signature) (bool, error) `perm:"read"` WalletVerify func(p0 context.Context, p1 address.Address, p2 []byte, p3 *crypto.Signature) (bool, error) `perm:"read"`
Web3ClientVersion func(p0 context.Context) (string, error) `perm:"read"`
} }
} }
@ -3936,6 +3938,17 @@ func (s *FullNodeStub) WalletVerify(p0 context.Context, p1 address.Address, p2 [
return false, ErrNotSupported return false, ErrNotSupported
} }
func (s *FullNodeStruct) Web3ClientVersion(p0 context.Context) (string, error) {
if s.Internal.Web3ClientVersion == nil {
return "", ErrNotSupported
}
return s.Internal.Web3ClientVersion(p0)
}
func (s *FullNodeStub) Web3ClientVersion(p0 context.Context) (string, error) {
return "", ErrNotSupported
}
func (s *GatewayStruct) ChainGetBlockMessages(p0 context.Context, p1 cid.Cid) (*BlockMessages, error) { func (s *GatewayStruct) ChainGetBlockMessages(p0 context.Context, p1 cid.Cid) (*BlockMessages, error) {
if s.Internal.ChainGetBlockMessages == nil { if s.Internal.ChainGetBlockMessages == nil {
return nil, ErrNotSupported return nil, ErrNotSupported

View File

@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
NETWORKS=(devnet mainnet caterpillarnet butterflynet testing testing-fake-proofs calibrationnet hyperspace) NETWORKS=(devnet mainnet caterpillarnet butterflynet testing testing-fake-proofs calibrationnet)
set -e set -e

View File

@ -1,4 +0,0 @@
/dns4/de0.bootstrap.wallaby.network/tcp/1337/p2p/12D3KooWHAvUVk5XuxSwi2dNLWbTDDRSGeHxMuWdQ3SQpRuNHbLz
/dns4/de1.bootstrap.wallaby.network/tcp/1337/p2p/12D3KooWBRqtxhJCtiLmCwKgAQozJtdGinEDdJGoS5oHw7vCjMGc
/dns4/ca0.bootstrap.wallaby.network/tcp/1337/p2p/12D3KooWCApBpUk7EX9pmEfyky1gKC6N2KJ74S1AwFfvnkDqw3pK
/dns4/sg0.bootstrap.wallaby.network/tcp/1337/p2p/12D3KooWLnYqr4hRoNHBJQVXsFGkDoKuoVfw5R2ASw1bHzrWU5Px

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,94 +0,0 @@
//go:build hyperspacenet
// +build hyperspacenet
package build
import (
"github.com/ipfs/go-cid"
"github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-state-types/abi"
actorstypes "github.com/filecoin-project/go-state-types/actors"
"github.com/filecoin-project/go-state-types/network"
builtin2 "github.com/filecoin-project/specs-actors/v2/actors/builtin"
"github.com/filecoin-project/lotus/chain/actors/policy"
)
var NetworkBundle = "hyperspace"
var BundleOverrides map[actorstypes.Version]string
var ActorDebugging = false
const BootstrappersFile = "hyperspacenet.pi"
const GenesisFile = "hyperspacenet.car"
const GenesisNetworkVersion = network.Version18
var UpgradeBreezeHeight = abi.ChainEpoch(-1)
const BreezeGasTampingDuration = 120
var UpgradeSmokeHeight = abi.ChainEpoch(-1)
var UpgradeIgnitionHeight = abi.ChainEpoch(-2)
var UpgradeRefuelHeight = abi.ChainEpoch(-3)
var UpgradeTapeHeight = abi.ChainEpoch(-4)
var UpgradeAssemblyHeight = abi.ChainEpoch(-5)
var UpgradeLiftoffHeight = abi.ChainEpoch(-6)
var UpgradeKumquatHeight = abi.ChainEpoch(-7)
var UpgradeCalicoHeight = abi.ChainEpoch(-9)
var UpgradePersianHeight = abi.ChainEpoch(-10)
var UpgradeOrangeHeight = abi.ChainEpoch(-11)
var UpgradeClausHeight = abi.ChainEpoch(-12)
var UpgradeTrustHeight = abi.ChainEpoch(-13)
var UpgradeNorwegianHeight = abi.ChainEpoch(-14)
var UpgradeTurboHeight = abi.ChainEpoch(-15)
var UpgradeHyperdriveHeight = abi.ChainEpoch(-16)
var UpgradeChocolateHeight = abi.ChainEpoch(-17)
var UpgradeOhSnapHeight = abi.ChainEpoch(-18)
var UpgradeSkyrHeight = abi.ChainEpoch(-19)
var UpgradeSharkHeight = abi.ChainEpoch(-20)
var UpgradeHyggeHeight = abi.ChainEpoch(-21)
var DrandSchedule = map[abi.ChainEpoch]DrandEnum{
0: DrandMainnet,
}
var SupportedProofTypes = []abi.RegisteredSealProof{
abi.RegisteredSealProof_StackedDrg512MiBV1,
abi.RegisteredSealProof_StackedDrg32GiBV1,
abi.RegisteredSealProof_StackedDrg64GiBV1,
}
var ConsensusMinerMinPower = abi.NewStoragePower(16 << 30)
var MinVerifiedDealSize = abi.NewStoragePower(1 << 20)
var PreCommitChallengeDelay = abi.ChainEpoch(10)
func init() {
policy.SetSupportedProofTypes(SupportedProofTypes...)
policy.SetConsensusMinerMinPower(ConsensusMinerMinPower)
policy.SetMinVerifiedDealSize(MinVerifiedDealSize)
policy.SetPreCommitChallengeDelay(PreCommitChallengeDelay)
BuildType = BuildHyperspacenet
SetAddressNetwork(address.Testnet)
Devnet = true
}
const BlockDelaySecs = uint64(builtin2.EpochDurationSeconds)
const PropagationDelaySecs = uint64(6)
// BootstrapPeerThreshold is the minimum number peers we need to track for a sync worker to start
const BootstrapPeerThreshold = 2
// ChainId defines the chain ID used in the Ethereum JSON-RPC endpoint.
// As per https://github.com/ethereum-lists/chains
const Eip155ChainId = 3141
var WhitelistedBlock = cid.Undef

View File

@ -1,5 +1,5 @@
//go:build !debug && !2k && !testground && !calibnet && !butterflynet && !interopnet && !wallabynet && !hyperspacenet //go:build !debug && !2k && !testground && !calibnet && !butterflynet && !interopnet
// +build !debug,!2k,!testground,!calibnet,!butterflynet,!interopnet,!wallabynet,!hyperspacenet // +build !debug,!2k,!testground,!calibnet,!butterflynet,!interopnet
package build package build

View File

@ -42,9 +42,6 @@ var (
AllowableClockDriftSecs = uint64(1) AllowableClockDriftSecs = uint64(1)
Finality = policy.ChainFinality
ForkLengthThreshold = Finality
SlashablePowerDelay = 20 SlashablePowerDelay = 20
InteractivePoRepConfidence = 6 InteractivePoRepConfidence = 6
@ -130,6 +127,9 @@ var (
GenesisFile = "" GenesisFile = ""
) )
const Finality = policy.ChainFinality
const ForkLengthThreshold = Finality
const BootstrapPeerThreshold = 1 const BootstrapPeerThreshold = 1
// ChainId defines the chain ID used in the Ethereum JSON-RPC endpoint. // ChainId defines the chain ID used in the Ethereum JSON-RPC endpoint.

View File

@ -1,94 +0,0 @@
//go:build wallabynet
// +build wallabynet
package build
import (
"github.com/ipfs/go-cid"
"github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-state-types/abi"
actorstypes "github.com/filecoin-project/go-state-types/actors"
"github.com/filecoin-project/go-state-types/network"
builtin2 "github.com/filecoin-project/specs-actors/v2/actors/builtin"
"github.com/filecoin-project/lotus/chain/actors/policy"
)
var NetworkBundle = "wallaby"
var BundleOverrides map[actorstypes.Version]string
var ActorDebugging = false
const BootstrappersFile = "wallabynet.pi"
const GenesisFile = "wallabynet.car"
const GenesisNetworkVersion = network.Version18
var UpgradeBreezeHeight = abi.ChainEpoch(-1)
const BreezeGasTampingDuration = 120
var UpgradeSmokeHeight = abi.ChainEpoch(-1)
var UpgradeIgnitionHeight = abi.ChainEpoch(-2)
var UpgradeRefuelHeight = abi.ChainEpoch(-3)
var UpgradeTapeHeight = abi.ChainEpoch(-4)
var UpgradeAssemblyHeight = abi.ChainEpoch(-5)
var UpgradeLiftoffHeight = abi.ChainEpoch(-6)
var UpgradeKumquatHeight = abi.ChainEpoch(-7)
var UpgradeCalicoHeight = abi.ChainEpoch(-9)
var UpgradePersianHeight = abi.ChainEpoch(-10)
var UpgradeOrangeHeight = abi.ChainEpoch(-11)
var UpgradeClausHeight = abi.ChainEpoch(-12)
var UpgradeTrustHeight = abi.ChainEpoch(-13)
var UpgradeNorwegianHeight = abi.ChainEpoch(-14)
var UpgradeTurboHeight = abi.ChainEpoch(-15)
var UpgradeHyperdriveHeight = abi.ChainEpoch(-16)
var UpgradeChocolateHeight = abi.ChainEpoch(-17)
var UpgradeOhSnapHeight = abi.ChainEpoch(-18)
var UpgradeSkyrHeight = abi.ChainEpoch(-19)
var UpgradeSharkHeight = abi.ChainEpoch(-20)
var UpgradeHyggeHeight = abi.ChainEpoch(-21)
var DrandSchedule = map[abi.ChainEpoch]DrandEnum{
0: DrandMainnet,
}
var SupportedProofTypes = []abi.RegisteredSealProof{
abi.RegisteredSealProof_StackedDrg512MiBV1,
abi.RegisteredSealProof_StackedDrg32GiBV1,
abi.RegisteredSealProof_StackedDrg64GiBV1,
}
var ConsensusMinerMinPower = abi.NewStoragePower(16 << 30)
var MinVerifiedDealSize = abi.NewStoragePower(1 << 20)
var PreCommitChallengeDelay = abi.ChainEpoch(10)
func init() {
policy.SetSupportedProofTypes(SupportedProofTypes...)
policy.SetConsensusMinerMinPower(ConsensusMinerMinPower)
policy.SetMinVerifiedDealSize(MinVerifiedDealSize)
policy.SetPreCommitChallengeDelay(PreCommitChallengeDelay)
BuildType = BuildWallabynet
SetAddressNetwork(address.Testnet)
Devnet = true
}
const BlockDelaySecs = uint64(builtin2.EpochDurationSeconds)
const PropagationDelaySecs = uint64(6)
// BootstrapPeerThreshold is the minimum number peers we need to track for a sync worker to start
const BootstrapPeerThreshold = 2
// ChainId defines the chain ID used in the Ethereum JSON-RPC endpoint.
// As per https://github.com/ethereum-lists/chains
const Eip155ChainId = 31415
var WhitelistedBlock = cid.Undef

View File

@ -6,15 +6,13 @@ var CurrentCommit string
var BuildType int var BuildType int
const ( const (
BuildDefault = 0 BuildDefault = 0
BuildMainnet = 0x1 BuildMainnet = 0x1
Build2k = 0x2 Build2k = 0x2
BuildDebug = 0x3 BuildDebug = 0x3
BuildCalibnet = 0x4 BuildCalibnet = 0x4
BuildInteropnet = 0x5 BuildInteropnet = 0x5
BuildButterflynet = 0x7 BuildButterflynet = 0x7
BuildWallabynet = 0x8
BuildHyperspacenet = 0x9
) )
func BuildTypeString() string { func BuildTypeString() string {
@ -33,10 +31,6 @@ func BuildTypeString() string {
return "+interopnet" return "+interopnet"
case BuildButterflynet: case BuildButterflynet:
return "+butterflynet" return "+butterflynet"
case BuildWallabynet:
return "+wallabynet"
case BuildHyperspacenet:
return "+hyperspacenet"
default: default:
return "+huh?" return "+huh?"
} }

View File

@ -20,7 +20,12 @@ import (
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
) )
const indexed uint8 = 0x01 func isIndexedValue(b uint8) bool {
// currently we mark the full entry as indexed if either the key
// or the value are indexed; in the future we will need finer-grained
// management of indices
return b&(types.EventFlagIndexedKey|types.EventFlagIndexedValue) > 0
}
type EventFilter struct { type EventFilter struct {
id types.FilterID id types.FilterID
@ -100,9 +105,18 @@ func (f *EventFilter) CollectEvents(ctx context.Context, te *TipSetEvents, rever
continue continue
} }
entries := make([]types.EventEntry, len(ev.Entries))
for i, entry := range ev.Entries {
entries[i] = types.EventEntry{
Flags: entry.Flags,
Key: entry.Key,
Value: entry.Value,
}
}
// event matches filter, so record it // event matches filter, so record it
cev := &CollectedEvent{ cev := &CollectedEvent{
Entries: ev.Entries, Entries: entries,
EmitterAddr: addr, EmitterAddr: addr,
EventIdx: evIdx, EventIdx: evIdx,
Reverted: revert, Reverted: revert,
@ -200,7 +214,7 @@ func (f *EventFilter) matchKeys(ees []types.EventEntry) bool {
matched := map[string]bool{} matched := map[string]bool{}
for _, ee := range ees { for _, ee := range ees {
// Skip an entry that is not indexable // Skip an entry that is not indexable
if ee.Flags&indexed != indexed { if !isIndexedValue(ee.Flags) {
continue continue
} }
@ -212,7 +226,7 @@ func (f *EventFilter) matchKeys(ees []types.EventEntry) bool {
} }
wantlist, ok := f.keys[keyname] wantlist, ok := f.keys[keyname]
if !ok { if !ok || len(wantlist) == 0 {
continue continue
} }
@ -266,13 +280,13 @@ func (te *TipSetEvents) messages(ctx context.Context) ([]executedMessage, error)
} }
type executedMessage struct { type executedMessage struct {
msg *types.Message msg types.ChainMsg
rct *types.MessageReceipt rct *types.MessageReceipt
// events extracted from receipt // events extracted from receipt
evs []*types.Event evs []*types.Event
} }
func (e *executedMessage) Message() *types.Message { func (e *executedMessage) Message() types.ChainMsg {
return e.msg return e.msg
} }
@ -428,7 +442,7 @@ func (m *EventFilterManager) loadExecutedMessages(ctx context.Context, msgTs, rc
ems := make([]executedMessage, len(msgs)) ems := make([]executedMessage, len(msgs))
for i := 0; i < len(msgs); i++ { for i := 0; i < len(msgs); i++ {
ems[i].msg = msgs[i].VMMessage() ems[i].msg = msgs[i]
var rct types.MessageReceipt var rct types.MessageReceipt
found, err := arr.Get(uint64(i), &rct) found, err := arr.Get(uint64(i), &rct)

View File

@ -1,7 +1,6 @@
package filter package filter
import ( import (
"bytes"
"context" "context"
"database/sql" "database/sql"
"errors" "errors"
@ -11,7 +10,6 @@ import (
"github.com/ipfs/go-cid" "github.com/ipfs/go-cid"
_ "github.com/mattn/go-sqlite3" _ "github.com/mattn/go-sqlite3"
cbg "github.com/whyrusleeping/cbor-gen"
"golang.org/x/xerrors" "golang.org/x/xerrors"
"github.com/filecoin-project/go-address" "github.com/filecoin-project/go-address"
@ -153,13 +151,6 @@ func (ei *EventIndex) CollectEvents(ctx context.Context, te *TipSetEvents, rever
return xerrors.Errorf("prepare insert entry: %w", err) return xerrors.Errorf("prepare insert entry: %w", err)
} }
isIndexedValue := func(b uint8) bool {
// currently we mark the full entry as indexed if either the key
// or the value are indexed; in the future we will need finer-grained
// management of indices
return b&(types.EventFlagIndexedKey|types.EventFlagIndexedValue) > 0
}
for msgIdx, em := range ems { for msgIdx, em := range ems {
for evIdx, ev := range em.Events() { for evIdx, ev := range em.Events() {
addr, found := addressLookups[ev.Emitter] addr, found := addressLookups[ev.Emitter]
@ -198,13 +189,12 @@ func (ei *EventIndex) CollectEvents(ctx context.Context, te *TipSetEvents, rever
} }
for _, entry := range ev.Entries { for _, entry := range ev.Entries {
value := decodeLogBytes(entry.Value)
_, err := stmtEntry.Exec( _, err := stmtEntry.Exec(
lastID, // event_id lastID, // event_id
isIndexedValue(entry.Flags), // indexed isIndexedValue(entry.Flags), // indexed
[]byte{entry.Flags}, // flags []byte{entry.Flags}, // flags
entry.Key, // key entry.Key, // key
value, // value entry.Value, // value
) )
if err != nil { if err != nil {
return xerrors.Errorf("exec insert entry: %w", err) return xerrors.Errorf("exec insert entry: %w", err)
@ -220,21 +210,6 @@ func (ei *EventIndex) CollectEvents(ctx context.Context, te *TipSetEvents, rever
return nil return nil
} }
// decodeLogBytes decodes a CBOR-serialized array into its original form.
//
// This function swallows errors and returns the original array if it failed
// to decode.
func decodeLogBytes(orig []byte) []byte {
if orig == nil {
return orig
}
decoded, err := cbg.ReadByteArray(bytes.NewReader(orig), uint64(len(orig)))
if err != nil {
return orig
}
return decoded
}
// PrefillFilter fills a filter's collection of events from the historic index // PrefillFilter fills a filter's collection of events from the historic index
func (ei *EventIndex) PrefillFilter(ctx context.Context, f *EventFilter) error { func (ei *EventIndex) PrefillFilter(ctx context.Context, f *EventFilter) error {
clauses := []string{} clauses := []string{}

View File

@ -13,10 +13,13 @@ import (
"golang.org/x/xerrors" "golang.org/x/xerrors"
"github.com/filecoin-project/go-address" "github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-state-types/crypto"
"github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/api"
"github.com/filecoin-project/lotus/chain/messagepool" "github.com/filecoin-project/lotus/chain/messagepool"
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
"github.com/filecoin-project/lotus/chain/types/ethtypes"
"github.com/filecoin-project/lotus/chain/wallet/key"
"github.com/filecoin-project/lotus/node/modules/dtypes" "github.com/filecoin-project/lotus/node/modules/dtypes"
) )
@ -66,15 +69,24 @@ func (ms *MessageSigner) SignMessage(ctx context.Context, msg *types.Message, sp
// Sign the message with the nonce // Sign the message with the nonce
msg.Nonce = nonce msg.Nonce = nonce
keyInfo, err := ms.wallet.WalletExport(ctx, msg.From)
if err != nil {
return nil, err
}
sb, err := SigningBytes(msg, key.ActSigType(keyInfo.Type))
if err != nil {
return nil, err
}
mb, err := msg.ToStorageBlock() mb, err := msg.ToStorageBlock()
if err != nil { if err != nil {
return nil, xerrors.Errorf("serializing message: %w", err) return nil, xerrors.Errorf("serializing message: %w", err)
} }
sig, err := ms.wallet.WalletSign(ctx, msg.From, mb.Cid().Bytes(), api.MsgMeta{ sig, err := ms.wallet.WalletSign(ctx, msg.From, sb, api.MsgMeta{
Type: api.MTChainMsg, Type: api.MTChainMsg,
Extra: mb.RawData(), Extra: mb.RawData(),
}) })
if err != nil { if err != nil {
return nil, xerrors.Errorf("failed to sign message: %w, addr=%s", err, msg.From) return nil, xerrors.Errorf("failed to sign message: %w, addr=%s", err, msg.From)
} }
@ -187,3 +199,19 @@ func (ms *MessageSigner) SaveNonce(ctx context.Context, addr address.Address, no
func (ms *MessageSigner) dstoreKey(addr address.Address) datastore.Key { func (ms *MessageSigner) dstoreKey(addr address.Address) datastore.Key {
return datastore.KeyWithNamespaces([]string{dsKeyActorNonce, addr.String()}) return datastore.KeyWithNamespaces([]string{dsKeyActorNonce, addr.String()})
} }
func SigningBytes(msg *types.Message, sigType crypto.SigType) ([]byte, error) {
if sigType == crypto.SigTypeDelegated {
txArgs, err := ethtypes.EthTxArgsFromMessage(msg)
if err != nil {
return nil, xerrors.Errorf("failed to reconstruct eth transaction: %w", err)
}
rlpEncodedMsg, err := txArgs.ToRlpUnsignedMsg()
if err != nil {
return nil, xerrors.Errorf("failed to repack eth rlp message: %w", err)
}
return rlpEncodedMsg, nil
}
return msg.Cid().Bytes(), nil
}

View File

@ -22,7 +22,7 @@ import (
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
) )
const TIPSETKEY_BACKFILL_RANGE = 2 * build.Finality const TipsetkeyBackfillRange = 2 * build.Finality
func (cs *ChainStore) UnionStore() bstore.Blockstore { func (cs *ChainStore) UnionStore() bstore.Blockstore {
return bstore.Union(cs.stateBlockstore, cs.chainBlockstore) return bstore.Union(cs.stateBlockstore, cs.chainBlockstore)
@ -116,7 +116,7 @@ func (cs *ChainStore) Import(ctx context.Context, r io.Reader) (*types.TipSet, e
} }
ts := root ts := root
for i := 0; i < int(TIPSETKEY_BACKFILL_RANGE); i++ { for i := 0; i < int(TipsetkeyBackfillRange); i++ {
err = cs.PersistTipset(ctx, ts) err = cs.PersistTipset(ctx, ts)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -384,7 +384,19 @@ func (cs *ChainStore) PutTipSet(ctx context.Context, ts *types.TipSet) error {
if err != nil { if err != nil {
return xerrors.Errorf("errored while expanding tipset: %w", err) return xerrors.Errorf("errored while expanding tipset: %w", err)
} }
log.Debugf("expanded %s into %s\n", ts.Cids(), expanded.Cids())
if expanded.Key() != ts.Key() {
log.Debugf("expanded %s into %s\n", ts.Cids(), expanded.Cids())
tsBlk, err := expanded.Key().ToStorageBlock()
if err != nil {
return xerrors.Errorf("failed to get tipset key block: %w", err)
}
if err = cs.chainLocalBlockstore.Put(ctx, tsBlk); err != nil {
return xerrors.Errorf("failed to put tipset key block: %w", err)
}
}
if err := cs.MaybeTakeHeavierTipSet(ctx, expanded); err != nil { if err := cs.MaybeTakeHeavierTipSet(ctx, expanded); err != nil {
return xerrors.Errorf("MaybeTakeHeavierTipSet failed in PutTipSet: %w", err) return xerrors.Errorf("MaybeTakeHeavierTipSet failed in PutTipSet: %w", err)

View File

@ -12,13 +12,11 @@ import (
"github.com/filecoin-project/go-address" "github.com/filecoin-project/go-address"
gocrypto "github.com/filecoin-project/go-crypto" gocrypto "github.com/filecoin-project/go-crypto"
"github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/go-state-types/big" "github.com/filecoin-project/go-state-types/big"
builtintypes "github.com/filecoin-project/go-state-types/builtin" builtintypes "github.com/filecoin-project/go-state-types/builtin"
typescrypto "github.com/filecoin-project/go-state-types/crypto" typescrypto "github.com/filecoin-project/go-state-types/crypto"
"github.com/filecoin-project/lotus/build" "github.com/filecoin-project/lotus/build"
"github.com/filecoin-project/lotus/chain/actors"
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
) )
@ -63,6 +61,7 @@ func EthTxArgsFromMessage(msg *types.Message) (EthTxArgs, error) {
to *EthAddress to *EthAddress
params []byte params []byte
paramsReader = bytes.NewReader(msg.Params) paramsReader = bytes.NewReader(msg.Params)
err error
) )
if msg.Version != 0 { if msg.Version != 0 {
@ -72,11 +71,10 @@ func EthTxArgsFromMessage(msg *types.Message) (EthTxArgs, error) {
if msg.To == builtintypes.EthereumAddressManagerActorAddr { if msg.To == builtintypes.EthereumAddressManagerActorAddr {
switch msg.Method { switch msg.Method {
case builtintypes.MethodsEAM.CreateExternal: case builtintypes.MethodsEAM.CreateExternal:
var create abi.CborBytes params, err = cbg.ReadByteArray(paramsReader, uint64(len(msg.Params)))
if err := create.UnmarshalCBOR(paramsReader); err != nil { if err != nil {
return EthTxArgs{}, err return EthTxArgs{}, xerrors.Errorf("failed to read params byte array: %w", err)
} }
params = create
default: default:
return EthTxArgs{}, fmt.Errorf("unsupported EAM method") return EthTxArgs{}, fmt.Errorf("unsupported EAM method")
} }
@ -103,11 +101,6 @@ func EthTxArgsFromMessage(msg *types.Message) (EthTxArgs, error) {
if err != nil { if err != nil {
return EthTxArgs{}, xerrors.Errorf("failed to read params byte array: %w", err) return EthTxArgs{}, xerrors.Errorf("failed to read params byte array: %w", err)
} }
if len(params) == 0 {
// Otherwise, we don't get a guaranteed round-trip.
return EthTxArgs{}, xerrors.Errorf("cannot invoke contracts with empty parameters from an eth-account")
}
} }
} }
@ -115,6 +108,11 @@ func EthTxArgsFromMessage(msg *types.Message) (EthTxArgs, error) {
return EthTxArgs{}, xerrors.Errorf("extra data found in params") return EthTxArgs{}, xerrors.Errorf("extra data found in params")
} }
if len(params) == 0 && msg.Method != builtintypes.MethodSend {
// Otherwise, we don't get a guaranteed round-trip.
return EthTxArgs{}, xerrors.Errorf("msgs with empty parameters from an eth-account must be Sends (MethodNum: %d)", msg.Method)
}
return EthTxArgs{ return EthTxArgs{
ChainID: build.Eip155ChainId, ChainID: build.Eip155ChainId,
Nonce: int(msg.Nonce), Nonce: int(msg.Nonce),
@ -143,12 +141,13 @@ func (tx *EthTxArgs) ToUnsignedMessage(from address.Address) (*types.Message, er
if len(tx.Input) == 0 { if len(tx.Input) == 0 {
return nil, xerrors.New("cannot call CreateExternal without params") return nil, xerrors.New("cannot call CreateExternal without params")
} }
inputParams := abi.CborBytes(tx.Input)
params, err = actors.SerializeParams(&inputParams) buf := new(bytes.Buffer)
if err != nil { if err = cbg.WriteByteArray(buf, tx.Input); err != nil {
return nil, fmt.Errorf("failed to serialize Create params: %w", err) return nil, xerrors.Errorf("failed to serialize Create params: %w", err)
} }
params = buf.Bytes()
} else { } else {
to, err = tx.To.ToFilecoinAddress() to, err = tx.To.ToFilecoinAddress()
if err != nil { if err != nil {

View File

@ -36,10 +36,7 @@ var ErrInvalidAddress = errors.New("invalid Filecoin Eth address")
type EthUint64 uint64 type EthUint64 uint64
func (e EthUint64) MarshalJSON() ([]byte, error) { func (e EthUint64) MarshalJSON() ([]byte, error) {
if e == 0 { return json.Marshal(e.Hex())
return json.Marshal("0x0")
}
return json.Marshal(fmt.Sprintf("0x%x", e))
} }
func (e *EthUint64) UnmarshalJSON(b []byte) error { func (e *EthUint64) UnmarshalJSON(b []byte) error {
@ -64,6 +61,13 @@ func EthUint64FromHex(s string) (EthUint64, error) {
return EthUint64(parsedInt), nil return EthUint64(parsedInt), nil
} }
func (e EthUint64) Hex() string {
if e == 0 {
return "0x0"
}
return fmt.Sprintf("0x%x", e)
}
// EthBigInt represents a large integer whose zero value serializes to "0x0". // EthBigInt represents a large integer whose zero value serializes to "0x0".
type EthBigInt big.Int type EthBigInt big.Int
@ -360,14 +364,7 @@ func (h *EthHash) UnmarshalJSON(b []byte) error {
} }
func decodeHexString(s string, expectedLen int) ([]byte, error) { func decodeHexString(s string, expectedLen int) ([]byte, error) {
// Strip the leading 0x or 0X prefix since hex.DecodeString does not support it. s = handleHexStringPrefix(s)
if strings.HasPrefix(s, "0x") || strings.HasPrefix(s, "0X") {
s = s[2:]
}
// Sometimes clients will omit a leading zero in a byte; pad so we can decode correctly.
if len(s)%2 == 1 {
s = "0" + s
}
if len(s) != expectedLen*2 { if len(s) != expectedLen*2 {
return nil, xerrors.Errorf("expected hex string length sans prefix %d, got %d", expectedLen*2, len(s)) return nil, xerrors.Errorf("expected hex string length sans prefix %d, got %d", expectedLen*2, len(s))
} }
@ -378,6 +375,27 @@ func decodeHexString(s string, expectedLen int) ([]byte, error) {
return b, nil return b, nil
} }
func DecodeHexString(s string) ([]byte, error) {
s = handleHexStringPrefix(s)
b, err := hex.DecodeString(s)
if err != nil {
return nil, xerrors.Errorf("cannot parse hex value: %w", err)
}
return b, nil
}
func handleHexStringPrefix(s string) string {
// Strip the leading 0x or 0X prefix since hex.DecodeString does not support it.
if strings.HasPrefix(s, "0x") || strings.HasPrefix(s, "0X") {
s = s[2:]
}
// Sometimes clients will omit a leading zero in a byte; pad so we can decode correctly.
if len(s)%2 == 1 {
s = "0" + s
}
return s
}
func EthHashFromCid(c cid.Cid) (EthHash, error) { func EthHashFromCid(c cid.Cid) (EthHash, error) {
return ParseEthHash(c.Hash().HexString()[8:]) return ParseEthHash(c.Hash().HexString()[8:])
} }

View File

@ -291,7 +291,10 @@ func DumpActorState(i *ActorRegistry, act *types.Actor, b []byte) (interface{},
um := actInfo.vmActor.State() um := actInfo.vmActor.State()
if um == nil { if um == nil {
// TODO::FVM @arajasek I would like to assert that we have the empty object here if act.Code != EmptyObjectCid {
return nil, xerrors.Errorf("actor with code %s should only have empty object (%s) as its Head, instead has %s", act.Code, EmptyObjectCid, act.Head)
}
return nil, nil return nil, nil
} }
if err := um.UnmarshalCBOR(bytes.NewReader(b)); err != nil { if err := um.UnmarshalCBOR(bytes.NewReader(b)); err != nil {

View File

@ -1517,6 +1517,8 @@ func GetAsks(ctx context.Context, api lapi.FullNode) ([]QueriedAsk, error) {
} }
}(miner) }(miner)
} }
wg.Wait()
}() }()
loop: loop:
@ -1590,6 +1592,8 @@ loop:
lk.Unlock() lk.Unlock()
}(miner) }(miner)
} }
wg.Wait()
}() }()
loop2: loop2:

View File

@ -38,25 +38,13 @@ var EvmCmd = &cli.Command{
} }
var EvmGetInfoCmd = &cli.Command{ var EvmGetInfoCmd = &cli.Command{
Name: "stat", Name: "stat",
Usage: "Print eth/filecoin addrs and code cid", Usage: "Print eth/filecoin addrs and code cid",
Flags: []cli.Flag{ ArgsUsage: "address",
&cli.StringFlag{
Name: "filAddr",
Usage: "Filecoin address",
},
&cli.StringFlag{
Name: "ethAddr",
Usage: "Ethereum address",
},
},
Action: func(cctx *cli.Context) error { Action: func(cctx *cli.Context) error {
if cctx.NArg() != 1 {
filAddr := cctx.String("filAddr") return IncorrectNumArgs(cctx)
ethAddr := cctx.String("ethAddr") }
var faddr address.Address
var eaddr ethtypes.EthAddress
api, closer, err := GetFullNodeAPI(cctx) api, closer, err := GetFullNodeAPI(cctx)
if err != nil { if err != nil {
@ -65,26 +53,25 @@ var EvmGetInfoCmd = &cli.Command{
defer closer() defer closer()
ctx := ReqContext(cctx) ctx := ReqContext(cctx)
if filAddr != "" { addrString := cctx.Args().Get(0)
addr, err := address.NewFromString(filAddr)
if err != nil { var faddr address.Address
return err var eaddr ethtypes.EthAddress
} addr, err := address.NewFromString(addrString)
eaddr, faddr, err = ethAddrFromFilecoinAddress(ctx, addr, api) if err != nil { // This isn't a filecoin address
if err != nil { eaddr, err = ethtypes.ParseEthAddress(addrString)
return err if err != nil { // This isn't an Eth address either
} return xerrors.Errorf("address is not a filecoin or eth address")
} else if ethAddr != "" {
eaddr, err = ethtypes.ParseEthAddress(ethAddr)
if err != nil {
return err
} }
faddr, err = eaddr.ToFilecoinAddress() faddr, err = eaddr.ToFilecoinAddress()
if err != nil { if err != nil {
return err return err
} }
} else { } else {
return xerrors.Errorf("Neither filAddr nor ethAddr specified") eaddr, faddr, err = ethAddrFromFilecoinAddress(ctx, addr, api)
if err != nil {
return err
}
} }
actor, err := api.StateGetActor(ctx, faddr, types.EmptyTSK) actor, err := api.StateGetActor(ctx, faddr, types.EmptyTSK)
@ -97,7 +84,6 @@ var EvmGetInfoCmd = &cli.Command{
fmt.Println("Code cid: ", actor.Code.String()) fmt.Println("Code cid: ", actor.Code.String())
return nil return nil
}, },
} }
@ -121,7 +107,7 @@ var EvmCallSimulateCmd = &cli.Command{
return err return err
} }
params, err := hex.DecodeString(cctx.Args().Get(2)) params, err := ethtypes.DecodeHexString(cctx.Args().Get(2))
if err != nil { if err != nil {
return err return err
} }
@ -165,7 +151,7 @@ var EvmGetContractAddress = &cli.Command{
return err return err
} }
salt, err := hex.DecodeString(cctx.Args().Get(1)) salt, err := ethtypes.DecodeHexString(cctx.Args().Get(1))
if err != nil { if err != nil {
return xerrors.Errorf("Could not decode salt: %w", err) return xerrors.Errorf("Could not decode salt: %w", err)
} }
@ -184,7 +170,7 @@ var EvmGetContractAddress = &cli.Command{
return err return err
} }
contract, err := hex.DecodeString(string(contractHex)) contract, err := ethtypes.DecodeHexString(string(contractHex))
if err != nil { if err != nil {
return xerrors.Errorf("Could not decode contract file: %w", err) return xerrors.Errorf("Could not decode contract file: %w", err)
} }
@ -233,7 +219,7 @@ var EvmDeployCmd = &cli.Command{
return xerrors.Errorf("failed to read contract: %w", err) return xerrors.Errorf("failed to read contract: %w", err)
} }
if cctx.Bool("hex") { if cctx.Bool("hex") {
contract, err = hex.DecodeString(string(contract)) contract, err = ethtypes.DecodeHexString(string(contract))
if err != nil { if err != nil {
return xerrors.Errorf("failed to decode contract: %w", err) return xerrors.Errorf("failed to decode contract: %w", err)
} }
@ -345,8 +331,8 @@ var EvmInvokeCmd = &cli.Command{
defer closer() defer closer()
ctx := ReqContext(cctx) ctx := ReqContext(cctx)
if argc := cctx.Args().Len(); argc < 2 || argc > 3 { if argc := cctx.Args().Len(); argc != 2 {
return xerrors.Errorf("must pass the address, entry point and (optionally) input data") return xerrors.Errorf("must pass the address and calldata")
} }
addr, err := address.NewFromString(cctx.Args().Get(0)) addr, err := address.NewFromString(cctx.Args().Get(0))
@ -355,7 +341,7 @@ var EvmInvokeCmd = &cli.Command{
} }
var calldata []byte var calldata []byte
calldata, err = hex.DecodeString(cctx.Args().Get(2)) calldata, err = ethtypes.DecodeHexString(cctx.Args().Get(1))
if err != nil { if err != nil {
return xerrors.Errorf("decoding hex input data: %w", err) return xerrors.Errorf("decoding hex input data: %w", err)
} }
@ -388,7 +374,7 @@ var EvmInvokeCmd = &cli.Command{
To: addr, To: addr,
From: fromAddr, From: fromAddr,
Value: val, Value: val,
Method: abi.MethodNum(2), Method: builtintypes.MethodsEVM.InvokeContract,
Params: calldata, Params: calldata,
} }

View File

@ -13,6 +13,7 @@ import (
"github.com/filecoin-project/lotus/chain/actors/builtin" "github.com/filecoin-project/lotus/chain/actors/builtin"
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
"github.com/filecoin-project/lotus/chain/types/ethtypes"
) )
var sendCmd = &cli.Command{ var sendCmd = &cli.Command{
@ -24,6 +25,10 @@ var sendCmd = &cli.Command{
Name: "from", Name: "from",
Usage: "optionally specify the account to send funds from", Usage: "optionally specify the account to send funds from",
}, },
&cli.StringFlag{
Name: "from-eth-addr",
Usage: "optionally specify the eth addr to send funds from",
},
&cli.StringFlag{ &cli.StringFlag{
Name: "gas-premium", Name: "gas-premium",
Usage: "specify gas price to use in AttoFIL", Usage: "specify gas price to use in AttoFIL",
@ -98,6 +103,18 @@ var sendCmd = &cli.Command{
} }
params.From = addr params.From = addr
} else if from := cctx.String("from-eth-addr"); from != "" {
eaddr, err := ethtypes.ParseEthAddress(from)
if err != nil {
return err
}
faddr, err := eaddr.ToFilecoinAddress()
if err != nil {
fmt.Println("error on conversion to faddr")
return err
}
fmt.Println("f4 addr: ", faddr)
params.From = faddr
} }
if cctx.IsSet("gas-premium") { if cctx.IsSet("gas-premium") {

View File

@ -776,7 +776,9 @@ var StateGetActorCmd = &cli.Command{
fmt.Printf("Nonce:\t\t%d\n", a.Nonce) fmt.Printf("Nonce:\t\t%d\n", a.Nonce)
fmt.Printf("Code:\t\t%s (%s)\n", a.Code, strtype) fmt.Printf("Code:\t\t%s (%s)\n", a.Code, strtype)
fmt.Printf("Head:\t\t%s\n", a.Head) fmt.Printf("Head:\t\t%s\n", a.Head)
fmt.Printf("Delegated address:\t\t%s\n", a.Address) if a.Address != nil {
fmt.Printf("Delegated address:\t\t%s\n", a.Address)
}
return nil return nil
}, },

View File

@ -291,6 +291,8 @@
* [WalletSignMessage](#WalletSignMessage) * [WalletSignMessage](#WalletSignMessage)
* [WalletValidateAddress](#WalletValidateAddress) * [WalletValidateAddress](#WalletValidateAddress)
* [WalletVerify](#WalletVerify) * [WalletVerify](#WalletVerify)
* [Web3](#Web3)
* [Web3ClientVersion](#Web3ClientVersion)
## ##
@ -9207,3 +9209,16 @@ Inputs:
Response: `true` Response: `true`
## Web3
### Web3ClientVersion
Returns the client version
Perms: read
Inputs: `null`
Response: `"string value"`

View File

@ -181,15 +181,16 @@ CATEGORY:
BASIC BASIC
OPTIONS: OPTIONS:
--force Deprecated: use global 'force-send' (default: false) --force Deprecated: use global 'force-send' (default: false)
--from value optionally specify the account to send funds from --from value optionally specify the account to send funds from
--gas-feecap value specify gas fee cap to use in AttoFIL (default: "0") --from-eth-addr value optionally specify the eth addr to send funds from
--gas-limit value specify gas limit (default: 0) --gas-feecap value specify gas fee cap to use in AttoFIL (default: "0")
--gas-premium value specify gas price to use in AttoFIL (default: "0") --gas-limit value specify gas limit (default: 0)
--method value specify method to invoke (default: 0) --gas-premium value specify gas price to use in AttoFIL (default: "0")
--nonce value specify the nonce to use (default: 0) --method value specify method to invoke (default: 0)
--params-hex value specify invocation parameters in hex --nonce value specify the nonce to use (default: 0)
--params-json value specify invocation parameters in json --params-hex value specify invocation parameters in hex
--params-json value specify invocation parameters in json
``` ```
@ -2595,11 +2596,10 @@ NAME:
lotus evm stat - Print eth/filecoin addrs and code cid lotus evm stat - Print eth/filecoin addrs and code cid
USAGE: USAGE:
lotus evm stat [command options] [arguments...] lotus evm stat [command options] address
OPTIONS: OPTIONS:
--ethAddr value Ethereum address --help, -h show help (default: false)
--filAddr value Filecoin address
``` ```

View File

@ -293,63 +293,13 @@
#Tracing = false #Tracing = false
[ActorEvent]
# EnableRealTimeFilterAPI enables APIs that can create and query filters for actor events as they are emitted.
#
# type: bool
# env var: LOTUS_ACTOREVENT_ENABLEREALTIMEFILTERAPI
#EnableRealTimeFilterAPI = false
# EnableHistoricFilterAPI enables APIs that can create and query filters for actor events that occurred in the past.
# A queryable index of events will be maintained.
#
# type: bool
# env var: LOTUS_ACTOREVENT_ENABLEHISTORICFILTERAPI
#EnableHistoricFilterAPI = false
# FilterTTL specifies the time to live for actor event filters. Filters that haven't been accessed longer than
# this time become eligible for automatic deletion.
#
# type: Duration
# env var: LOTUS_ACTOREVENT_FILTERTTL
#FilterTTL = "24h0m0s"
# MaxFilters specifies the maximum number of filters that may exist at any one time.
#
# type: int
# env var: LOTUS_ACTOREVENT_MAXFILTERS
#MaxFilters = 100
# MaxFilterResults specifies the maximum number of results that can be accumulated by an actor event filter.
#
# type: int
# env var: LOTUS_ACTOREVENT_MAXFILTERRESULTS
#MaxFilterResults = 10000
# MaxFilterHeightRange specifies the maximum range of heights that can be used in a filter (to avoid querying
# the entire chain)
#
# type: uint64
# env var: LOTUS_ACTOREVENT_MAXFILTERHEIGHTRANGE
#MaxFilterHeightRange = 2880
# ActorEventDatabasePath is the full path to a sqlite database that will be used to index actor events to
# support the historic filter APIs. If the database does not exist it will be created. The directory containing
# the database must already exist and be writeable. If a relative path is provided here, sqlite treats it as
# relative to the CWD (current working directory).
#
# type: string
# env var: LOTUS_ACTOREVENT_ACTOREVENTDATABASEPATH
#ActorEventDatabasePath = ""
[Fevm] [Fevm]
# EnableEthHashToFilecoinCidMapping enables storing a mapping of eth transaction hashes to filecoin message Cids # EnableEthRPC enables eth_ rpc, and enables storing a mapping of eth transaction hashes to filecoin message Cids.
# You will not be able to look up ethereum transactions by their hash if this is disabled. # This will also enable the RealTimeFilterAPI and HistoricFilterAPI by default, but they can be disabled by config options above.
# #
# type: bool # type: bool
# env var: LOTUS_FEVM_ENABLEETHHASHTOFILECOINCIDMAPPING # env var: LOTUS_FEVM_ENABLEETHRPC
#EnableEthHashToFilecoinCidMapping = false #EnableEthRPC = false
# EthTxHashMappingLifetimeDays the transaction hash lookup database will delete mappings that have been stored for more than x days # EthTxHashMappingLifetimeDays the transaction hash lookup database will delete mappings that have been stored for more than x days
# Set to 0 to keep all mappings # Set to 0 to keep all mappings
@ -358,4 +308,56 @@
# env var: LOTUS_FEVM_ETHTXHASHMAPPINGLIFETIMEDAYS # env var: LOTUS_FEVM_ETHTXHASHMAPPINGLIFETIMEDAYS
#EthTxHashMappingLifetimeDays = 0 #EthTxHashMappingLifetimeDays = 0
[Fevm.Events]
# EnableEthRPC enables APIs that
# DisableRealTimeFilterAPI will disable the RealTimeFilterAPI that can create and query filters for actor events as they are emitted.
# The API is enabled when EnableEthRPC is true, but can be disabled selectively with this flag.
#
# type: bool
# env var: LOTUS_FEVM_EVENTS_DISABLEREALTIMEFILTERAPI
#DisableRealTimeFilterAPI = false
# DisableHistoricFilterAPI will disable the HistoricFilterAPI that can create and query filters for actor events
# that occurred in the past. HistoricFilterAPI maintains a queryable index of events.
# The API is enabled when EnableEthRPC is true, but can be disabled selectively with this flag.
#
# type: bool
# env var: LOTUS_FEVM_EVENTS_DISABLEHISTORICFILTERAPI
#DisableHistoricFilterAPI = false
# FilterTTL specifies the time to live for actor event filters. Filters that haven't been accessed longer than
# this time become eligible for automatic deletion.
#
# type: Duration
# env var: LOTUS_FEVM_EVENTS_FILTERTTL
#FilterTTL = "24h0m0s"
# MaxFilters specifies the maximum number of filters that may exist at any one time.
#
# type: int
# env var: LOTUS_FEVM_EVENTS_MAXFILTERS
#MaxFilters = 100
# MaxFilterResults specifies the maximum number of results that can be accumulated by an actor event filter.
#
# type: int
# env var: LOTUS_FEVM_EVENTS_MAXFILTERRESULTS
#MaxFilterResults = 10000
# MaxFilterHeightRange specifies the maximum range of heights that can be used in a filter (to avoid querying
# the entire chain)
#
# type: uint64
# env var: LOTUS_FEVM_EVENTS_MAXFILTERHEIGHTRANGE
#MaxFilterHeightRange = 2880
# DatabasePath is the full path to a sqlite database that will be used to index actor events to
# support the historic filter APIs. If the database does not exist it will be created. The directory containing
# the database must already exist and be writeable. If a relative path is provided here, sqlite treats it as
# relative to the CWD (current working directory).
#
# type: string
# env var: LOTUS_FEVM_EVENTS_DATABASEPATH
#DatabasePath = ""

View File

@ -0,0 +1 @@
608060405234801561001057600080fd5b506105eb806100206000396000f3fe608060405234801561001057600080fd5b50600436106100a95760003560e01c8063c755553811610071578063c755553814610198578063cbfc3b58146101c6578063cc6f8faf14610212578063cd5b6c3d14610254578063e2a614731461028c578063fb62b28b146102d8576100a9565b80630919b8be146100ae5780636199074d146100e657806366eef3461461012857806375091b1f14610132578063a63ae81a1461016a575b600080fd5b6100e4600480360360408110156100c457600080fd5b81019080803590602001909291908035906020019092919050505061031a565b005b610126600480360360608110156100fc57600080fd5b8101908080359060200190929190803590602001909291908035906020019092919050505061035d565b005b610130610391565b005b6101686004803603604081101561014857600080fd5b8101908080359060200190929190803590602001909291905050506103bf565b005b6101966004803603602081101561018057600080fd5b81019080803590602001909291905050506103fb565b005b6101c4600480360360208110156101ae57600080fd5b8101908080359060200190929190505050610435565b005b610210600480360360808110156101dc57600080fd5b8101908080359060200190929190803590602001909291908035906020019092919080359060200190929190505050610465565b005b6102526004803603606081101561022857600080fd5b810190808035906020019092919080359060200190929190803590602001909291905050506104ba565b005b61028a6004803603604081101561026a57600080fd5b8101908080359060200190929190803590602001909291905050506104f8565b005b6102d6600480360360808110156102a257600080fd5b810190808035906020019092919080359060200190929190803590602001909291908035906020019092919050505061052a565b005b610318600480360360608110156102ee57600080fd5b8101908080359060200190929190803590602001909291908035906020019092919050505061056a565b005b7f5469c6b769315f5668523937f05ca07d4cc87849432bc5f5907f1d90fa73b9f98282604051808381526020018281526020019250505060405180910390a15050565b8082847fb89dabcdb7ff41f1794c0da92f65ece6c19b6b0caeac5407b2a721efe27c080460405160405180910390a4505050565b7fc3f6f1c76bd4e74ee5782052b0b4f8bd5c50b86c3c5a2f52638e03066e50a91b60405160405180910390a1565b817f6709824ebe5f6e620ca3f4b02a3428e8ce2dc97c550816eaeeb3a342b214bd85826040518082815260200191505060405180910390a25050565b7fc804e53d6048af1b3e6a352e246d5f3864fea9d635ace499e023a58c383b3a88816040518082815260200191505060405180910390a150565b807f44a227a31429ab5eb00daf6611c6422f10571619f2267e0e149e9ebe6d2a5d0560405160405180910390a250565b7f28d45631a87b2a52a9625f8520fa37ff8c4d926cdf17042e241985da5cb7b850848484846040518085815260200184815260200183815260200182815260200194505050505060405180910390a150505050565b81837fcd5fe5fbc1d27b90036997224cea7aa565e3779622867265081f636b3a5ccb08836040518082815260200191505060405180910390a3505050565b80827f232f09cef3babc26e58d1cc1346c0a8bc626ffe600c9605b5d747783eda484a760405160405180910390a35050565b8183857f812e73dbcf7e267f27ecb1383bfc902a6650b41b6e7d03ac265108c369673d95846040518082815260200191505060405180910390a450505050565b7fd4d143faaf60340ad98e1f2c96fc26f5695834c21b5200edad339ee7e9a372cc83838360405180848152602001838152602001828152602001935050505060405180910390a150505056fea265627a7a72315820954561fde80ab925299e0a9f3356b01f64fb1976dd335ac2ebd9367441e29f0564736f6c63430005110032

View File

@ -0,0 +1,51 @@
pragma solidity ^0.5.0;
contract EventMatrix {
event EventZeroData();
event EventOneData(uint a);
event EventTwoData(uint a, uint b);
event EventThreeData(uint a, uint b, uint c);
event EventFourData(uint a, uint b, uint c, uint d);
event EventOneIndexed(uint indexed a);
event EventTwoIndexed(uint indexed a, uint indexed b);
event EventThreeIndexed(uint indexed a, uint indexed b, uint indexed c);
event EventOneIndexedWithData(uint indexed a, uint b);
event EventTwoIndexedWithData(uint indexed a, uint indexed b, uint c);
event EventThreeIndexedWithData(uint indexed a, uint indexed b, uint indexed c, uint d);
function logEventZeroData() public {
emit EventZeroData();
}
function logEventOneData(uint a) public {
emit EventOneData(a);
}
function logEventTwoData(uint a, uint b) public {
emit EventTwoData(a,b);
}
function logEventThreeData(uint a, uint b, uint c) public {
emit EventThreeData(a,b,c);
}
function logEventFourData(uint a, uint b, uint c, uint d) public {
emit EventFourData(a,b,c,d);
}
function logEventOneIndexed(uint a) public {
emit EventOneIndexed(a);
}
function logEventTwoIndexed(uint a, uint b) public {
emit EventTwoIndexed(a,b);
}
function logEventThreeIndexed(uint a, uint b, uint c) public {
emit EventThreeIndexed(a,b,c);
}
function logEventOneIndexedWithData(uint a, uint b) public {
emit EventOneIndexedWithData(a,b);
}
function logEventTwoIndexedWithData(uint a, uint b, uint c) public {
emit EventTwoIndexedWithData(a,b,c);
}
function logEventThreeIndexedWithData(uint a, uint b, uint c, uint d) public {
emit EventThreeIndexedWithData(a,b,c,d);
}
}

View File

@ -0,0 +1 @@
608060405234801561001057600080fd5b5061025b806100206000396000f3fe60806040526004361061001e5760003560e01c8063cb7786d714610023575b600080fd5b61003d60048036038101906100389190610129565b61003f565b005b600083036100d15760008111156100cc573073ffffffffffffffffffffffffffffffffffffffff1663cb7786d7838460018561007b91906101ab565b6040518463ffffffff1660e01b8152600401610099939291906101ee565b600060405180830381600087803b1580156100b357600080fd5b505af11580156100c7573d6000803e3d6000fd5b505050505b6100e9565b6100e86001846100e191906101ab565b838361003f565b5b505050565b600080fd5b6000819050919050565b610106816100f3565b811461011157600080fd5b50565b600081359050610123816100fd565b92915050565b600080600060608486031215610142576101416100ee565b5b600061015086828701610114565b935050602061016186828701610114565b925050604061017286828701610114565b9150509250925092565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60006101b6826100f3565b91506101c1836100f3565b92508282039050818111156101d9576101d861017c565b5b92915050565b6101e8816100f3565b82525050565b600060608201905061020360008301866101df565b61021060208301856101df565b61021d60408301846101df565b94935050505056fea26469706673582212209a21ff59c642e2970917c07bf498271c2a6df8e3929677952c0c2d8031db15cc64736f6c63430008110033

View File

@ -0,0 +1,16 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.17;
contract StackRecCall {
function exec1(uint256 n, uint256 m, uint256 r) public payable {
if(n == 0) {
if(r > 0) {
StackRecCall(address(this)).exec1(m, m, r-1);
}
return;
}
exec1(n-1, m, r);
}
}

View File

@ -0,0 +1 @@
608060405234801561001057600080fd5b50610162806100206000396000f3fe60806040526004361061001e5760003560e01c8063c38e07dd14610023575b600080fd5b61003d6004803603810190610038919061009c565b61003f565b005b600081031561005e5761005d60018261005891906100f8565b61003f565b5b50565b600080fd5b6000819050919050565b61007981610066565b811461008457600080fd5b50565b60008135905061009681610070565b92915050565b6000602082840312156100b2576100b1610061565b5b60006100c084828501610087565b91505092915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600061010382610066565b915061010e83610066565b9250828203905081811115610126576101256100c9565b5b9291505056fea2646970667358221220ee8f18bfd33b1e0156cfe68e9071dd32960b370c7e63ec53c62dd48e28cb5d3b64736f6c63430008110033

View File

@ -0,0 +1,12 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.17;
contract StackSelf {
function exec1(uint256 n) public payable {
if(n == 0) {
return;
}
exec1(n-1);
}
}

View File

@ -0,0 +1,47 @@
# https://github.com/filecoin-project/builtin-actors/blob/b1ba61053de2ceaddd5116e87823d20a8f5e38d7/actors/evm/tests/events.rs
# method dispatch:
# - 0x00000000 -> log_zero_data
# - 0x00000001 -> log_zero_nodata
# - 0x00000002 -> log_four_data
%dispatch_begin()
%dispatch(0x00, log_zero_data)
%dispatch(0x01, log_zero_nodata)
%dispatch(0x02, log_four_data)
%dispatch_end()
#### log a zero topic event with data
log_zero_data:
jumpdest
push8 0x1122334455667788
push1 0x00
mstore
push1 0x08
push1 0x18 ## index 24 into memory as mstore writes a full word
log0
push1 0x00
push1 0x00
return
#### log a zero topic event with no data
log_zero_nodata:
jumpdest
push1 0x00
push1 0x00
log0
push1 0x00
push1 0x00
return
#### log a four topic event with data
log_four_data:
jumpdest
push8 0x1122334455667788
push1 0x00
mstore
push4 0x4444
push3 0x3333
push2 0x2222
push2 0x1111
push1 0x08
push1 0x18 ## index 24 into memory as mstore writes a full word
log4
push1 0x00
push1 0x00
return

View File

@ -33,7 +33,7 @@ func TestDealPadding(t *testing.T) {
dh := kit.NewDealHarness(t, client, miner, miner) dh := kit.NewDealHarness(t, client, miner, miner)
ctx := context.Background() ctx := context.Background()
client.WaitTillChain(ctx, kit.BlockMinedBy(miner.ActorAddr)) client.WaitTillChain(ctx, kit.BlocksMinedByAll(miner.ActorAddr))
// Create a random file, would originally be a 256-byte sector // Create a random file, would originally be a 256-byte sector
res, inFile := client.CreateImportFile(ctx, 1, 200) res, inFile := client.CreateImportFile(ctx, 1, 200)

View File

@ -52,7 +52,7 @@ func TestFirstDealEnablesMining(t *testing.T) {
providerMined := make(chan struct{}) providerMined := make(chan struct{})
go func() { go func() {
_ = client.WaitTillChain(ctx, kit.BlockMinedBy(provider.ActorAddr)) _ = client.WaitTillChain(ctx, kit.BlocksMinedByAll(provider.ActorAddr))
close(providerMined) close(providerMined)
}() }()

View File

@ -11,6 +11,8 @@ import (
"github.com/filecoin-project/go-state-types/abi" "github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/go-state-types/big" "github.com/filecoin-project/go-state-types/big"
builtin2 "github.com/filecoin-project/go-state-types/builtin"
"github.com/filecoin-project/go-state-types/builtin/v10/eam"
"github.com/filecoin-project/go-state-types/exitcode" "github.com/filecoin-project/go-state-types/exitcode"
"github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/api"
@ -313,3 +315,51 @@ func TestEthAccountAbstractionFailsFromEvmActor(t *testing.T) {
require.Error(t, err, "expected gas estimation to fail") require.Error(t, err, "expected gas estimation to fail")
require.Contains(t, err.Error(), "SysErrSenderInvalid") require.Contains(t, err.Error(), "SysErrSenderInvalid")
} }
func TestEthAccountManagerPermissions(t *testing.T) {
kit.QuietMiningLogs()
client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC())
ens.InterconnectAll().BeginMining(10 * time.Millisecond)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
// setup f1/f3/f4 accounts
wsp, err := client.WalletNew(ctx, types.KTSecp256k1)
require.NoError(t, err)
wbl, err := client.WalletNew(ctx, types.KTBLS)
require.NoError(t, err)
wdl, err := client.WalletNew(ctx, types.KTDelegated)
require.NoError(t, err)
def := client.DefaultKey.Address
// send some funds
client.ExpectSend(ctx, def, wsp, types.FromFil(10), "")
client.ExpectSend(ctx, def, wbl, types.FromFil(10), "")
client.ExpectSend(ctx, def, wdl, types.FromFil(10), "")
require.NoError(t, err)
// make sure that EAM only allows CreateExternal to be called by accounts
client.ExpectSend(ctx, wsp, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "not one of supported (18)", client.MakeSendCall(builtin2.MethodsEAM.Create, &eam.CreateParams{Nonce: 0}))
client.ExpectSend(ctx, wbl, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "not one of supported (18)", client.MakeSendCall(builtin2.MethodsEAM.Create, &eam.CreateParams{Nonce: 0}))
client.ExpectSend(ctx, wdl, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "not one of supported (18)", client.MakeSendCall(builtin2.MethodsEAM.Create, &eam.CreateParams{Nonce: 0}))
client.ExpectSend(ctx, wsp, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "not one of supported (18)", client.MakeSendCall(builtin2.MethodsEAM.Create2, &eam.Create2Params{}))
client.ExpectSend(ctx, wbl, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "not one of supported (18)", client.MakeSendCall(builtin2.MethodsEAM.Create2, &eam.Create2Params{}))
client.ExpectSend(ctx, wdl, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "not one of supported (18)", client.MakeSendCall(builtin2.MethodsEAM.Create2, &eam.Create2Params{}))
contractHex, err := os.ReadFile("contracts/SimpleCoin.hex")
require.NoError(t, err)
contract, err := hex.DecodeString(string(contractHex))
require.NoError(t, err)
contractParams := abi.CborBytes(contract)
client.ExpectSend(ctx, wsp, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "", client.MakeSendCall(builtin2.MethodsEAM.CreateExternal, &contractParams))
client.ExpectSend(ctx, wbl, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "", client.MakeSendCall(builtin2.MethodsEAM.CreateExternal, &contractParams))
client.ExpectSend(ctx, wdl, builtin2.EthereumAddressManagerActorAddr, big.Zero(), "", client.MakeSendCall(builtin2.MethodsEAM.CreateExternal, &contractParams))
}

View File

@ -0,0 +1,65 @@
package itests
import (
"context"
"fmt"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/lotus/itests/kit"
)
// TestEthBlockHashesCorrect_MultiBlockTipset validates that blocks retrieved through
// EthGetBlockByNumber are identical to blocks retrieved through
// EthGetBlockByHash, when using the block hash returned by the former.
//
// Specifically, it checks the system behaves correctly with multiblock tipsets.
//
// Catches regressions around https://github.com/filecoin-project/lotus/issues/10061.
func TestEthBlockHashesCorrect_MultiBlockTipset(t *testing.T) {
// miner is connected to the first node, and we want to observe the chain
// from the second node.
blocktime := 100 * time.Millisecond
n1, m1, m2, ens := kit.EnsembleOneTwo(t,
kit.MockProofs(),
kit.ThroughRPC(),
)
ens.InterconnectAll().BeginMining(blocktime)
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
n1.WaitTillChain(ctx, kit.HeightAtLeast(abi.ChainEpoch(25)))
defer cancel()
var n2 kit.TestFullNode
ens.FullNode(&n2, kit.ThroughRPC()).Start().Connect(n2, n1)
// find the first tipset where all miners mined a block.
ctx, cancel = context.WithTimeout(context.Background(), 1*time.Minute)
n2.WaitTillChain(ctx, kit.BlocksMinedByAll(m1.ActorAddr, m2.ActorAddr))
defer cancel()
head, err := n2.ChainHead(context.Background())
require.NoError(t, err)
// let the chain run a little bit longer to minimise the chance of reorgs
n2.WaitTillChain(ctx, kit.HeightAtLeast(head.Height()+50))
head, err = n2.ChainHead(context.Background())
require.NoError(t, err)
for i := 1; i <= int(head.Height()); i++ {
hex := fmt.Sprintf("0x%x", i)
ethBlockA, err := n2.EthGetBlockByNumber(ctx, hex, true)
require.NoError(t, err)
ethBlockB, err := n2.EthGetBlockByHash(ctx, ethBlockA.Hash, true)
require.NoError(t, err)
require.Equal(t, ethBlockA, ethBlockB)
}
}

View File

@ -20,7 +20,6 @@ import (
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
"github.com/filecoin-project/lotus/chain/types/ethtypes" "github.com/filecoin-project/lotus/chain/types/ethtypes"
"github.com/filecoin-project/lotus/itests/kit" "github.com/filecoin-project/lotus/itests/kit"
"github.com/filecoin-project/lotus/node/config"
) )
// TestDeployment smoke tests the deployment of a contract via the // TestDeployment smoke tests the deployment of a contract via the
@ -36,13 +35,7 @@ func TestDeployment(t *testing.T) {
client, _, ens := kit.EnsembleMinimal( client, _, ens := kit.EnsembleMinimal(
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC())
kit.WithCfgOpt(func(cfg *config.FullNode) error {
cfg.ActorEvent.EnableRealTimeFilterAPI = true
return nil
}),
kit.EthTxHashLookup(),
)
ens.InterconnectAll().BeginMining(blockTime) ens.InterconnectAll().BeginMining(blockTime)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute) ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
@ -100,6 +93,7 @@ func TestDeployment(t *testing.T) {
mpoolTx, err := client.EthGetTransactionByHash(ctx, &hash) mpoolTx, err := client.EthGetTransactionByHash(ctx, &hash)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, mpoolTx)
// require that the hashes are identical // require that the hashes are identical
require.Equal(t, hash, mpoolTx.Hash) require.Equal(t, hash, mpoolTx.Hash)

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,6 @@ import (
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
"github.com/filecoin-project/lotus/chain/types/ethtypes" "github.com/filecoin-project/lotus/chain/types/ethtypes"
"github.com/filecoin-project/lotus/itests/kit" "github.com/filecoin-project/lotus/itests/kit"
"github.com/filecoin-project/lotus/node/config"
) )
// TestTransactionHashLookup tests to see if lotus correctly stores a mapping from ethereum transaction hash to // TestTransactionHashLookup tests to see if lotus correctly stores a mapping from ethereum transaction hash to
@ -29,7 +28,6 @@ func TestTransactionHashLookup(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)
@ -112,86 +110,6 @@ func TestTransactionHashLookup(t *testing.T) {
require.Equal(t, uint64(*chainTx.TransactionIndex), uint64(0)) // only transaction require.Equal(t, uint64(*chainTx.TransactionIndex), uint64(0)) // only transaction
} }
// TestTransactionHashLookupNoDb tests to see if looking up eth transactions by hash breaks without the lookup table
func TestTransactionHashLookupNoDb(t *testing.T) {
kit.QuietMiningLogs()
blocktime := 1 * time.Second
client, _, ens := kit.EnsembleMinimal(
t,
kit.MockProofs(),
kit.ThroughRPC(),
kit.WithCfgOpt(func(cfg *config.FullNode) error {
cfg.Fevm.EnableEthHashToFilecoinCidMapping = false
return nil
}),
)
ens.InterconnectAll().BeginMining(blocktime)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
// install contract
contractHex, err := os.ReadFile("./contracts/SimpleCoin.hex")
require.NoError(t, err)
contract, err := hex.DecodeString(string(contractHex))
require.NoError(t, err)
// create a new Ethereum account
key, ethAddr, deployer := client.EVM().NewAccount()
// send some funds to the f410 address
kit.SendFunds(ctx, t, client, deployer, types.FromFil(10))
gaslimit, err := client.EthEstimateGas(ctx, ethtypes.EthCall{
From: &ethAddr,
Data: contract,
})
require.NoError(t, err)
maxPriorityFeePerGas, err := client.EthMaxPriorityFeePerGas(ctx)
require.NoError(t, err)
// now deploy a contract from the embryo, and validate it went well
tx := ethtypes.EthTxArgs{
ChainID: build.Eip155ChainId,
Value: big.Zero(),
Nonce: 0,
MaxFeePerGas: types.NanoFil,
MaxPriorityFeePerGas: big.Int(maxPriorityFeePerGas),
GasLimit: int(gaslimit),
Input: contract,
V: big.Zero(),
R: big.Zero(),
S: big.Zero(),
}
client.EVM().SignTransaction(&tx, key.PrivateKey)
rawTxHash, err := tx.TxHash()
require.NoError(t, err)
hash := client.EVM().SubmitTransaction(ctx, &tx)
require.Equal(t, rawTxHash, hash)
// We shouldn't be able to find the tx
mpoolTx, err := client.EthGetTransactionByHash(ctx, &hash)
require.NoError(t, err)
require.Nil(t, mpoolTx)
// Wait for message to land on chain, we can't know exactly when because we can't find it.
time.Sleep(20 * blocktime)
receipt, err := client.EthGetTransactionReceipt(ctx, hash)
require.NoError(t, err)
require.Nil(t, receipt)
// We still shouldn't be able to find the tx
chainTx, err := client.EthGetTransactionByHash(ctx, &hash)
require.NoError(t, err)
require.Nil(t, chainTx)
}
// TestTransactionHashLookupBlsFilecoinMessage tests to see if lotus can find a BLS Filecoin Message using the transaction hash // TestTransactionHashLookupBlsFilecoinMessage tests to see if lotus can find a BLS Filecoin Message using the transaction hash
func TestTransactionHashLookupBlsFilecoinMessage(t *testing.T) { func TestTransactionHashLookupBlsFilecoinMessage(t *testing.T) {
kit.QuietMiningLogs() kit.QuietMiningLogs()
@ -201,7 +119,6 @@ func TestTransactionHashLookupBlsFilecoinMessage(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)
@ -271,7 +188,6 @@ func TestTransactionHashLookupSecpFilecoinMessage(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)
@ -348,7 +264,6 @@ func TestTransactionHashLookupNonexistentMessage(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)
@ -379,7 +294,6 @@ func TestEthGetMessageCidByTransactionHashEthTx(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)
@ -476,7 +390,6 @@ func TestEthGetMessageCidByTransactionHashSecp(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)
@ -547,7 +460,6 @@ func TestEthGetMessageCidByTransactionHashBLS(t *testing.T) {
t, t,
kit.MockProofs(), kit.MockProofs(),
kit.ThroughRPC(), kit.ThroughRPC(),
kit.EthTxHashLookup(),
) )
ens.InterconnectAll().BeginMining(blocktime) ens.InterconnectAll().BeginMining(blocktime)

View File

@ -21,7 +21,7 @@ import (
func TestValueTransferValidSignature(t *testing.T) { func TestValueTransferValidSignature(t *testing.T) {
blockTime := 100 * time.Millisecond blockTime := 100 * time.Millisecond
client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC(), kit.EthTxHashLookup()) client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC())
ens.InterconnectAll().BeginMining(blockTime) ens.InterconnectAll().BeginMining(blockTime)
@ -106,7 +106,7 @@ func TestLegacyTransaction(t *testing.T) {
func TestContractDeploymentValidSignature(t *testing.T) { func TestContractDeploymentValidSignature(t *testing.T) {
blockTime := 100 * time.Millisecond blockTime := 100 * time.Millisecond
client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC(), kit.EthTxHashLookup()) client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC())
ens.InterconnectAll().BeginMining(blockTime) ens.InterconnectAll().BeginMining(blockTime)
@ -167,7 +167,7 @@ func TestContractDeploymentValidSignature(t *testing.T) {
func TestContractInvocation(t *testing.T) { func TestContractInvocation(t *testing.T) {
blockTime := 100 * time.Millisecond blockTime := 100 * time.Millisecond
client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC(), kit.EthTxHashLookup()) client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC())
ens.InterconnectAll().BeginMining(blockTime) ens.InterconnectAll().BeginMining(blockTime)

View File

@ -12,6 +12,7 @@ import (
"github.com/filecoin-project/go-address" "github.com/filecoin-project/go-address"
"github.com/filecoin-project/lotus/chain/types/ethtypes"
"github.com/filecoin-project/lotus/itests/kit" "github.com/filecoin-project/lotus/itests/kit"
) )
@ -45,22 +46,22 @@ func TestFEVMEvents(t *testing.T) {
require.NoError(err) require.NoError(err)
t.Logf("actor ID address is %s", idAddr) t.Logf("actor ID address is %s", idAddr)
// var ( var (
// earliest = "earliest" earliest = "earliest"
// latest = "latest" latest = "latest"
// ) )
//
// // Install a filter. // Install a filter.
// filter, err := client.EthNewFilter(ctx, &api.EthFilterSpec{ filter, err := client.EthNewFilter(ctx, &ethtypes.EthFilterSpec{
// FromBlock: &earliest, FromBlock: &earliest,
// ToBlock: &latest, ToBlock: &latest,
// }) })
// require.NoError(err) require.NoError(err)
//
// // No logs yet. // No logs yet.
// res, err := client.EthGetFilterLogs(ctx, filter) res, err := client.EthGetFilterLogs(ctx, filter)
// require.NoError(err) require.NoError(err)
// require.Empty(res.NewLogs) require.Empty(res.Results)
// log a zero topic event with data // log a zero topic event with data
ret := client.EVM().InvokeSolidity(ctx, fromAddr, idAddr, []byte{0x00, 0x00, 0x00, 0x00}, nil) ret := client.EVM().InvokeSolidity(ctx, fromAddr, idAddr, []byte{0x00, 0x00, 0x00, 0x00}, nil)

View File

@ -2,6 +2,7 @@ package itests
import ( import (
"context" "context"
"encoding/binary"
"encoding/hex" "encoding/hex"
"testing" "testing"
"time" "time"
@ -10,6 +11,7 @@ import (
"github.com/filecoin-project/go-address" "github.com/filecoin-project/go-address"
builtintypes "github.com/filecoin-project/go-state-types/builtin" builtintypes "github.com/filecoin-project/go-state-types/builtin"
"github.com/filecoin-project/go-state-types/exitcode"
"github.com/filecoin-project/go-state-types/manifest" "github.com/filecoin-project/go-state-types/manifest"
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
@ -38,9 +40,9 @@ func inputDataFromFrom(ctx context.Context, t *testing.T, client *kit.TestFullNo
func setupFEVMTest(t *testing.T) (context.Context, context.CancelFunc, *kit.TestFullNode) { func setupFEVMTest(t *testing.T) (context.Context, context.CancelFunc, *kit.TestFullNode) {
kit.QuietMiningLogs() kit.QuietMiningLogs()
blockTime := 100 * time.Millisecond blockTime := 5 * time.Millisecond
client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC()) client, _, ens := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC())
ens.InterconnectAll().BeginMining(blockTime) ens.InterconnectAll().BeginMiningMustPost(blockTime)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute) ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
return ctx, cancel, client return ctx, cancel, client
} }
@ -132,3 +134,85 @@ func TestFEVMDelegateCall(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, result, expectedResultActor) require.Equal(t, result, expectedResultActor)
} }
func TestEVMRpcDisable(t *testing.T) {
client, _, _ := kit.EnsembleMinimal(t, kit.MockProofs(), kit.ThroughRPC(), kit.DisableEthRPC())
_, err := client.EthBlockNumber(context.Background())
require.ErrorContains(t, err, "module disabled, enable with Fevm.EnableEthRPC")
}
// TestFEVMRecursiveFuncCall deploys a contract and makes a recursive function calls
func TestFEVMRecursiveFuncCall(t *testing.T) {
ctx, cancel, client := setupFEVMTest(t)
defer cancel()
//install contract Actor
filenameActor := "contracts/StackFunc.hex"
fromAddr, actorAddr := client.EVM().DeployContractFromFilename(ctx, filenameActor)
testN := func(n int, ex exitcode.ExitCode) func(t *testing.T) {
return func(t *testing.T) {
inputData := make([]byte, 32)
binary.BigEndian.PutUint64(inputData[24:], uint64(n))
client.EVM().InvokeContractByFuncNameExpectExit(ctx, fromAddr, actorAddr, "exec1(uint256)", inputData, ex)
}
}
t.Run("n=0", testN(0, exitcode.Ok))
t.Run("n=1", testN(1, exitcode.Ok))
t.Run("n=20", testN(20, exitcode.Ok))
t.Run("n=200", testN(200, exitcode.Ok))
t.Run("n=507", testN(507, exitcode.Ok))
t.Run("n=508", testN(508, exitcode.ExitCode(23))) // 23 means stack overflow
}
// TestFEVMRecursiveActorCall deploys a contract and makes a recursive actor calls
func TestFEVMRecursiveActorCall(t *testing.T) {
ctx, cancel, client := setupFEVMTest(t)
defer cancel()
//install contract Actor
filenameActor := "contracts/RecCall.hex"
fromAddr, actorAddr := client.EVM().DeployContractFromFilename(ctx, filenameActor)
testN := func(n, r int, ex exitcode.ExitCode) func(t *testing.T) {
return func(t *testing.T) {
inputData := make([]byte, 32*3)
binary.BigEndian.PutUint64(inputData[24:], uint64(n))
binary.BigEndian.PutUint64(inputData[32+24:], uint64(n))
binary.BigEndian.PutUint64(inputData[32+32+24:], uint64(r))
client.EVM().InvokeContractByFuncNameExpectExit(ctx, fromAddr, actorAddr, "exec1(uint256,uint256,uint256)", inputData, ex)
}
}
t.Run("n=0,r=1", testN(0, 1, exitcode.Ok))
t.Run("n=1,r=1", testN(1, 1, exitcode.Ok))
t.Run("n=20,r=1", testN(20, 1, exitcode.Ok))
t.Run("n=200,r=1", testN(200, 1, exitcode.Ok))
t.Run("n=251,r=1", testN(251, 1, exitcode.Ok))
t.Run("n=252,r=1-fails", testN(252, 1, exitcode.ExitCode(23))) // 23 means stack overflow
t.Run("n=0,r=10", testN(0, 10, exitcode.Ok))
t.Run("n=1,r=10", testN(1, 10, exitcode.Ok))
t.Run("n=20,r=10", testN(20, 10, exitcode.Ok))
t.Run("n=200,r=10", testN(200, 10, exitcode.Ok))
t.Run("n=251,r=10", testN(251, 10, exitcode.Ok))
t.Run("n=252,r=10-fails", testN(252, 10, exitcode.ExitCode(23)))
t.Run("n=0,r=32", testN(0, 32, exitcode.Ok))
t.Run("n=1,r=32", testN(1, 32, exitcode.Ok))
t.Run("n=20,r=32", testN(20, 32, exitcode.Ok))
t.Run("n=200,r=32", testN(200, 32, exitcode.Ok))
t.Run("n=251,r=32", testN(251, 32, exitcode.Ok))
t.Run("n=0,r=254", testN(0, 254, exitcode.Ok))
t.Run("n=251,r=170", testN(251, 170, exitcode.Ok))
t.Run("n=0,r=255-fails", testN(0, 255, exitcode.ExitCode(33))) // 33 means transaction reverted
t.Run("n=251,r=171-fails", testN(251, 171, exitcode.ExitCode(33)))
}

View File

@ -21,8 +21,10 @@ import (
builtintypes "github.com/filecoin-project/go-state-types/builtin" builtintypes "github.com/filecoin-project/go-state-types/builtin"
"github.com/filecoin-project/go-state-types/builtin/v10/eam" "github.com/filecoin-project/go-state-types/builtin/v10/eam"
"github.com/filecoin-project/go-state-types/crypto" "github.com/filecoin-project/go-state-types/crypto"
"github.com/filecoin-project/go-state-types/exitcode"
"github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/api"
"github.com/filecoin-project/lotus/build"
"github.com/filecoin-project/lotus/chain/actors" "github.com/filecoin-project/lotus/chain/actors"
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
"github.com/filecoin-project/lotus/chain/types/ethtypes" "github.com/filecoin-project/lotus/chain/types/ethtypes"
@ -83,6 +85,9 @@ func (e *EVM) DeployContractFromFilename(ctx context.Context, binFilename string
contractHex, err := os.ReadFile(binFilename) contractHex, err := os.ReadFile(binFilename)
require.NoError(e.t, err) require.NoError(e.t, err)
// strip any trailing newlines from the file
contractHex = bytes.TrimRight(contractHex, "\n")
contract, err := hex.DecodeString(string(contractHex)) contract, err := hex.DecodeString(string(contractHex))
require.NoError(e.t, err) require.NoError(e.t, err)
@ -106,11 +111,12 @@ func (e *EVM) InvokeSolidity(ctx context.Context, sender address.Address, target
params = buffer.Bytes() params = buffer.Bytes()
msg := &types.Message{ msg := &types.Message{
To: target, To: target,
From: sender, From: sender,
Value: big.Zero(), Value: big.Zero(),
Method: builtintypes.MethodsEVM.InvokeContract, Method: builtintypes.MethodsEVM.InvokeContract,
Params: params, GasLimit: build.BlockGasLimit, // note: we hardcode block gas limit due to slightly broken gas estimation - https://github.com/filecoin-project/lotus/issues/10041
Params: params,
} }
e.t.Log("sending invoke message") e.t.Log("sending invoke message")
@ -234,12 +240,18 @@ func (e *EVM) ComputeContractAddress(deployer ethtypes.EthAddress, nonce uint64)
func (e *EVM) InvokeContractByFuncName(ctx context.Context, fromAddr address.Address, idAddr address.Address, funcSignature string, inputData []byte) []byte { func (e *EVM) InvokeContractByFuncName(ctx context.Context, fromAddr address.Address, idAddr address.Address, funcSignature string, inputData []byte) []byte {
entryPoint := CalcFuncSignature(funcSignature) entryPoint := CalcFuncSignature(funcSignature)
wait := e.InvokeSolidity(ctx, fromAddr, idAddr, entryPoint, inputData) wait := e.InvokeSolidity(ctx, fromAddr, idAddr, entryPoint, inputData)
require.True(e.t, wait.Receipt.ExitCode.IsSuccess(), "contract execution failed") require.True(e.t, wait.Receipt.ExitCode.IsSuccess(), "contract execution failed: %d", wait.Receipt.ExitCode)
result, err := cbg.ReadByteArray(bytes.NewBuffer(wait.Receipt.Return), uint64(len(wait.Receipt.Return))) result, err := cbg.ReadByteArray(bytes.NewBuffer(wait.Receipt.Return), uint64(len(wait.Receipt.Return)))
require.NoError(e.t, err) require.NoError(e.t, err)
return result return result
} }
func (e *EVM) InvokeContractByFuncNameExpectExit(ctx context.Context, fromAddr address.Address, idAddr address.Address, funcSignature string, inputData []byte, exit exitcode.ExitCode) {
entryPoint := CalcFuncSignature(funcSignature)
wait := e.InvokeSolidity(ctx, fromAddr, idAddr, entryPoint, inputData)
require.Equal(e.t, exit, wait.Receipt.ExitCode)
}
// function signatures are the first 4 bytes of the hash of the function name and types // function signatures are the first 4 bytes of the hash of the function name and types
func CalcFuncSignature(funcName string) []byte { func CalcFuncSignature(funcName string) []byte {
hasher := sha3.NewLegacyKeccak256() hasher := sha3.NewLegacyKeccak256()

View File

@ -1,6 +1,9 @@
package kit package kit
import ( import (
"io"
"log"
logging "github.com/ipfs/go-log/v2" logging "github.com/ipfs/go-log/v2"
"github.com/filecoin-project/lotus/lib/lotuslog" "github.com/filecoin-project/lotus/lib/lotuslog"
@ -20,3 +23,13 @@ func QuietMiningLogs() {
_ = logging.SetLogLevel("rpc", "ERROR") _ = logging.SetLogLevel("rpc", "ERROR")
_ = logging.SetLogLevel("dht/RtRefreshManager", "ERROR") _ = logging.SetLogLevel("dht/RtRefreshManager", "ERROR")
} }
func QuietAllLogsExcept(names ...string) {
log.SetOutput(io.Discard) // suppress LogDatastore messages
lotuslog.SetupLogLevels()
logging.SetAllLoggers(logging.LevelError)
for _, name := range names {
_ = logging.SetLogLevel(name, "INFO")
}
}

View File

@ -1,6 +1,7 @@
package kit package kit
import ( import (
"bytes"
"context" "context"
"fmt" "fmt"
"testing" "testing"
@ -10,9 +11,11 @@ import (
"github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peer"
"github.com/multiformats/go-multiaddr" "github.com/multiformats/go-multiaddr"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
cbg "github.com/whyrusleeping/cbor-gen"
"github.com/filecoin-project/go-address" "github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-state-types/abi" "github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/go-state-types/exitcode"
"github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/api"
"github.com/filecoin-project/lotus/api/v1api" "github.com/filecoin-project/lotus/api/v1api"
@ -124,6 +127,50 @@ func (f *TestFullNode) AssignPrivKey(pkey *Libp2p) {
f.Pkey = pkey f.Pkey = pkey
} }
type SendCall struct {
Method abi.MethodNum
Params []byte
}
func (f *TestFullNode) MakeSendCall(m abi.MethodNum, params cbg.CBORMarshaler) SendCall {
var b bytes.Buffer
err := params.MarshalCBOR(&b)
require.NoError(f.t, err)
return SendCall{
Method: m,
Params: b.Bytes(),
}
}
func (f *TestFullNode) ExpectSend(ctx context.Context, from, to address.Address, value types.BigInt, errContains string, sc ...SendCall) *types.SignedMessage {
msg := &types.Message{From: from, To: to, Value: value}
if len(sc) == 1 {
msg.Method = sc[0].Method
msg.Params = sc[0].Params
}
_, err := f.GasEstimateMessageGas(ctx, msg, nil, types.EmptyTSK)
if errContains != "" {
require.ErrorContains(f.t, err, errContains)
return nil
}
require.NoError(f.t, err)
if errContains == "" {
m, err := f.MpoolPushMessage(ctx, msg, nil)
require.NoError(f.t, err)
r, err := f.StateWaitMsg(ctx, m.Cid(), 1, api.LookbackNoLimit, true)
require.NoError(f.t, err)
require.Equal(f.t, exitcode.Ok, r.Receipt.ExitCode)
return m
}
return nil
}
// ChainPredicate encapsulates a chain condition. // ChainPredicate encapsulates a chain condition.
type ChainPredicate func(set *types.TipSet) bool type ChainPredicate func(set *types.TipSet) bool
@ -135,13 +182,21 @@ func HeightAtLeast(target abi.ChainEpoch) ChainPredicate {
} }
} }
// BlockMinedBy returns a ChainPredicate that is satisfied when we observe the // BlocksMinedByAll returns a ChainPredicate that is satisfied when we observe a
// first block mined by the specified miner. // tipset including blocks from all the specified miners, in no particular order.
func BlockMinedBy(miner address.Address) ChainPredicate { func BlocksMinedByAll(miner ...address.Address) ChainPredicate {
return func(ts *types.TipSet) bool { return func(ts *types.TipSet) bool {
seen := make([]bool, len(miner))
var done int
for _, b := range ts.Blocks() { for _, b := range ts.Blocks() {
if b.Miner == miner { for i, m := range miner {
return true if b.Miner != m || seen[i] {
continue
}
seen[i] = true
if done++; done == len(miner) {
return true
}
} }
} }
return false return false

View File

@ -58,6 +58,15 @@ var DefaultNodeOpts = nodeOpts{
sectors: DefaultPresealsPerBootstrapMiner, sectors: DefaultPresealsPerBootstrapMiner,
sectorSize: abi.SectorSize(2 << 10), // 2KiB. sectorSize: abi.SectorSize(2 << 10), // 2KiB.
cfgOpts: []CfgOption{
func(cfg *config.FullNode) error {
// test defaults
cfg.Fevm.EnableEthRPC = true
return nil
},
},
workerTasks: []sealtasks.TaskType{sealtasks.TTFetch, sealtasks.TTCommit1, sealtasks.TTFinalize, sealtasks.TTFinalizeUnsealed}, workerTasks: []sealtasks.TaskType{sealtasks.TTFetch, sealtasks.TTCommit1, sealtasks.TTFinalize, sealtasks.TTFinalizeUnsealed},
workerStorageOpt: func(store paths.Store) paths.Store { return store }, workerStorageOpt: func(store paths.Store) paths.Store { return store },
} }
@ -281,25 +290,16 @@ func SplitstoreMessges() NodeOpt {
}) })
} }
func RealTimeFilterAPI() NodeOpt { func WithEthRPC() NodeOpt {
return WithCfgOpt(func(cfg *config.FullNode) error { return WithCfgOpt(func(cfg *config.FullNode) error {
cfg.ActorEvent.EnableRealTimeFilterAPI = true cfg.Fevm.EnableEthRPC = true
return nil return nil
}) })
} }
func HistoricFilterAPI(dbpath string) NodeOpt { func DisableEthRPC() NodeOpt {
return WithCfgOpt(func(cfg *config.FullNode) error { return WithCfgOpt(func(cfg *config.FullNode) error {
cfg.ActorEvent.EnableRealTimeFilterAPI = true cfg.Fevm.EnableEthRPC = false
cfg.ActorEvent.EnableHistoricFilterAPI = true
cfg.ActorEvent.ActorEventDatabasePath = dbpath
return nil
})
}
func EthTxHashLookup() NodeOpt {
return WithCfgOpt(func(cfg *config.FullNode) error {
cfg.Fevm.EnableEthHashToFilecoinCidMapping = true
return nil return nil
}) })
} }

View File

@ -351,13 +351,11 @@ func splitStorePruneIndex(ctx context.Context, t *testing.T, n *kit.TestFullNode
} }
func ipldExists(ctx context.Context, t *testing.T, c cid.Cid, n *kit.TestFullNode) bool { func ipldExists(ctx context.Context, t *testing.T, c cid.Cid, n *kit.TestFullNode) bool {
_, err := n.ChainReadObj(ctx, c) found, err := n.ChainHasObj(ctx, c)
if ipld.IsNotFound(err) { if err != nil {
return false t.Fatalf("ChainHasObj failure: %s", err)
} else if err != nil {
t.Fatalf("ChainReadObj failure on existence check: %s", err)
} }
return true return found
} }
// Create on chain unreachable garbage for a network to exercise splitstore // Create on chain unreachable garbage for a network to exercise splitstore
@ -414,12 +412,10 @@ func (g *Garbager) Exists(ctx context.Context, c cid.Cid) bool {
return false return false
} else if err != nil { } else if err != nil {
g.t.Fatalf("ChainReadObj failure on existence check: %s", err) g.t.Fatalf("ChainReadObj failure on existence check: %s", err)
return false // unreachable
} else { } else {
return true return true
} }
g.t.Fatal("unreachable")
return false
} }
func (g *Garbager) newPeerID(ctx context.Context) abi.ChainEpoch { func (g *Garbager) newPeerID(ctx context.Context) abi.ChainEpoch {

View File

@ -68,7 +68,7 @@ func (delegatedSigner) Verify(sig []byte, a address.Address, msg []byte) error {
} }
if maybeaddr != a { if maybeaddr != a {
return fmt.Errorf("signature did not match") return fmt.Errorf("signature did not match maybeaddr: %s, signer: %s", maybeaddr, a)
} }
return nil return nil

View File

@ -259,9 +259,10 @@ func ConfigFullNode(c interface{}) Option {
// Actor event filtering support // Actor event filtering support
Override(new(events.EventAPI), From(new(modules.EventAPI))), Override(new(events.EventAPI), From(new(modules.EventAPI))),
// in lite-mode Eth event api is provided by gateway // in lite-mode Eth event api is provided by gateway
ApplyIf(isFullNode, Override(new(full.EthEventAPI), modules.EthEventAPI(cfg.ActorEvent))), ApplyIf(isFullNode, Override(new(full.EthEventAPI), modules.EthEventAPI(cfg.Fevm))),
Override(new(full.EthModuleAPI), modules.EthModuleAPI(cfg.Fevm)), If(cfg.Fevm.EnableEthRPC, Override(new(full.EthModuleAPI), modules.EthModuleAPI(cfg.Fevm))),
If(!cfg.Fevm.EnableEthRPC, Override(new(full.EthModuleAPI), &full.EthModuleDummy{})),
) )
} }

View File

@ -99,17 +99,17 @@ func DefaultFullNode() *FullNode {
}, },
}, },
Cluster: *DefaultUserRaftConfig(), Cluster: *DefaultUserRaftConfig(),
ActorEvent: ActorEventConfig{
EnableRealTimeFilterAPI: false,
EnableHistoricFilterAPI: false,
FilterTTL: Duration(time.Hour * 24),
MaxFilters: 100,
MaxFilterResults: 10000,
MaxFilterHeightRange: 2880, // conservative limit of one day
},
Fevm: FevmConfig{ Fevm: FevmConfig{
EnableEthHashToFilecoinCidMapping: false, EnableEthRPC: false,
EthTxHashMappingLifetimeDays: 0, EthTxHashMappingLifetimeDays: 0,
Events: Events{
DisableRealTimeFilterAPI: false,
DisableHistoricFilterAPI: false,
FilterTTL: Duration(time.Hour * 24),
MaxFilters: 100,
MaxFilterResults: 10000,
MaxFilterHeightRange: 2880, // conservative limit of one day
},
}, },
} }
} }

View File

@ -29,56 +29,6 @@ var Doc = map[string][]DocField{
Comment: ``, Comment: ``,
}, },
}, },
"ActorEventConfig": []DocField{
{
Name: "EnableRealTimeFilterAPI",
Type: "bool",
Comment: `EnableRealTimeFilterAPI enables APIs that can create and query filters for actor events as they are emitted.`,
},
{
Name: "EnableHistoricFilterAPI",
Type: "bool",
Comment: `EnableHistoricFilterAPI enables APIs that can create and query filters for actor events that occurred in the past.
A queryable index of events will be maintained.`,
},
{
Name: "FilterTTL",
Type: "Duration",
Comment: `FilterTTL specifies the time to live for actor event filters. Filters that haven't been accessed longer than
this time become eligible for automatic deletion.`,
},
{
Name: "MaxFilters",
Type: "int",
Comment: `MaxFilters specifies the maximum number of filters that may exist at any one time.`,
},
{
Name: "MaxFilterResults",
Type: "int",
Comment: `MaxFilterResults specifies the maximum number of results that can be accumulated by an actor event filter.`,
},
{
Name: "MaxFilterHeightRange",
Type: "uint64",
Comment: `MaxFilterHeightRange specifies the maximum range of heights that can be used in a filter (to avoid querying
the entire chain)`,
},
{
Name: "ActorEventDatabasePath",
Type: "string",
Comment: `ActorEventDatabasePath is the full path to a sqlite database that will be used to index actor events to
support the historic filter APIs. If the database does not exist it will be created. The directory containing
the database must already exist and be writeable. If a relative path is provided here, sqlite treats it as
relative to the CWD (current working directory).`,
},
},
"Backup": []DocField{ "Backup": []DocField{
{ {
Name: "DisableMetadataLog", Name: "DisableMetadataLog",
@ -391,6 +341,59 @@ see https://lotus.filecoin.io/storage-providers/advanced-configurations/market/#
Comment: ``, Comment: ``,
}, },
}, },
"Events": []DocField{
{
Name: "DisableRealTimeFilterAPI",
Type: "bool",
Comment: `EnableEthRPC enables APIs that
DisableRealTimeFilterAPI will disable the RealTimeFilterAPI that can create and query filters for actor events as they are emitted.
The API is enabled when EnableEthRPC is true, but can be disabled selectively with this flag.`,
},
{
Name: "DisableHistoricFilterAPI",
Type: "bool",
Comment: `DisableHistoricFilterAPI will disable the HistoricFilterAPI that can create and query filters for actor events
that occurred in the past. HistoricFilterAPI maintains a queryable index of events.
The API is enabled when EnableEthRPC is true, but can be disabled selectively with this flag.`,
},
{
Name: "FilterTTL",
Type: "Duration",
Comment: `FilterTTL specifies the time to live for actor event filters. Filters that haven't been accessed longer than
this time become eligible for automatic deletion.`,
},
{
Name: "MaxFilters",
Type: "int",
Comment: `MaxFilters specifies the maximum number of filters that may exist at any one time.`,
},
{
Name: "MaxFilterResults",
Type: "int",
Comment: `MaxFilterResults specifies the maximum number of results that can be accumulated by an actor event filter.`,
},
{
Name: "MaxFilterHeightRange",
Type: "uint64",
Comment: `MaxFilterHeightRange specifies the maximum range of heights that can be used in a filter (to avoid querying
the entire chain)`,
},
{
Name: "DatabasePath",
Type: "string",
Comment: `DatabasePath is the full path to a sqlite database that will be used to index actor events to
support the historic filter APIs. If the database does not exist it will be created. The directory containing
the database must already exist and be writeable. If a relative path is provided here, sqlite treats it as
relative to the CWD (current working directory).`,
},
},
"FeeConfig": []DocField{ "FeeConfig": []DocField{
{ {
Name: "DefaultMaxFee", Name: "DefaultMaxFee",
@ -401,11 +404,11 @@ see https://lotus.filecoin.io/storage-providers/advanced-configurations/market/#
}, },
"FevmConfig": []DocField{ "FevmConfig": []DocField{
{ {
Name: "EnableEthHashToFilecoinCidMapping", Name: "EnableEthRPC",
Type: "bool", Type: "bool",
Comment: `EnableEthHashToFilecoinCidMapping enables storing a mapping of eth transaction hashes to filecoin message Cids Comment: `EnableEthRPC enables eth_ rpc, and enables storing a mapping of eth transaction hashes to filecoin message Cids.
You will not be able to look up ethereum transactions by their hash if this is disabled.`, This will also enable the RealTimeFilterAPI and HistoricFilterAPI by default, but they can be disabled by config options above.`,
}, },
{ {
Name: "EthTxHashMappingLifetimeDays", Name: "EthTxHashMappingLifetimeDays",
@ -414,6 +417,12 @@ You will not be able to look up ethereum transactions by their hash if this is d
Comment: `EthTxHashMappingLifetimeDays the transaction hash lookup database will delete mappings that have been stored for more than x days Comment: `EthTxHashMappingLifetimeDays the transaction hash lookup database will delete mappings that have been stored for more than x days
Set to 0 to keep all mappings`, Set to 0 to keep all mappings`,
}, },
{
Name: "Events",
Type: "Events",
Comment: ``,
},
}, },
"FullNode": []DocField{ "FullNode": []DocField{
{ {
@ -446,12 +455,6 @@ Set to 0 to keep all mappings`,
Comment: ``, Comment: ``,
}, },
{
Name: "ActorEvent",
Type: "ActorEventConfig",
Comment: ``,
},
{ {
Name: "Fevm", Name: "Fevm",
Type: "FevmConfig", Type: "FevmConfig",

View File

@ -27,7 +27,6 @@ type FullNode struct {
Fees FeeConfig Fees FeeConfig
Chainstore Chainstore Chainstore Chainstore
Cluster UserRaftConfig Cluster UserRaftConfig
ActorEvent ActorEventConfig
Fevm FevmConfig Fevm FevmConfig
} }
@ -660,13 +659,28 @@ type UserRaftConfig struct {
Tracing bool Tracing bool
} }
type ActorEventConfig struct { type FevmConfig struct {
// EnableRealTimeFilterAPI enables APIs that can create and query filters for actor events as they are emitted. // EnableEthRPC enables eth_ rpc, and enables storing a mapping of eth transaction hashes to filecoin message Cids.
EnableRealTimeFilterAPI bool // This will also enable the RealTimeFilterAPI and HistoricFilterAPI by default, but they can be disabled by config options above.
EnableEthRPC bool
// EnableHistoricFilterAPI enables APIs that can create and query filters for actor events that occurred in the past. // EthTxHashMappingLifetimeDays the transaction hash lookup database will delete mappings that have been stored for more than x days
// A queryable index of events will be maintained. // Set to 0 to keep all mappings
EnableHistoricFilterAPI bool EthTxHashMappingLifetimeDays int
Events Events
}
type Events struct {
// EnableEthRPC enables APIs that
// DisableRealTimeFilterAPI will disable the RealTimeFilterAPI that can create and query filters for actor events as they are emitted.
// The API is enabled when EnableEthRPC is true, but can be disabled selectively with this flag.
DisableRealTimeFilterAPI bool
// DisableHistoricFilterAPI will disable the HistoricFilterAPI that can create and query filters for actor events
// that occurred in the past. HistoricFilterAPI maintains a queryable index of events.
// The API is enabled when EnableEthRPC is true, but can be disabled selectively with this flag.
DisableHistoricFilterAPI bool
// FilterTTL specifies the time to live for actor event filters. Filters that haven't been accessed longer than // FilterTTL specifies the time to live for actor event filters. Filters that haven't been accessed longer than
// this time become eligible for automatic deletion. // this time become eligible for automatic deletion.
@ -682,23 +696,14 @@ type ActorEventConfig struct {
// the entire chain) // the entire chain)
MaxFilterHeightRange uint64 MaxFilterHeightRange uint64
// ActorEventDatabasePath is the full path to a sqlite database that will be used to index actor events to // DatabasePath is the full path to a sqlite database that will be used to index actor events to
// support the historic filter APIs. If the database does not exist it will be created. The directory containing // support the historic filter APIs. If the database does not exist it will be created. The directory containing
// the database must already exist and be writeable. If a relative path is provided here, sqlite treats it as // the database must already exist and be writeable. If a relative path is provided here, sqlite treats it as
// relative to the CWD (current working directory). // relative to the CWD (current working directory).
ActorEventDatabasePath string DatabasePath string
// Others, not implemented yet: // Others, not implemented yet:
// Set a limit on the number of active websocket subscriptions (may be zero) // Set a limit on the number of active websocket subscriptions (may be zero)
// Set a timeout for subscription clients // Set a timeout for subscription clients
// Set upper bound on index size // Set upper bound on index size
} }
type FevmConfig struct {
// EnableEthHashToFilecoinCidMapping enables storing a mapping of eth transaction hashes to filecoin message Cids
// You will not be able to look up ethereum transactions by their hash if this is disabled.
EnableEthHashToFilecoinCidMapping bool
// EthTxHashMappingLifetimeDays the transaction hash lookup database will delete mappings that have been stored for more than x days
// Set to 0 to keep all mappings
EthTxHashMappingLifetimeDays int
}

View File

@ -4,106 +4,122 @@ import (
"context" "context"
"errors" "errors"
"github.com/ipfs/go-cid"
"github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/api"
"github.com/filecoin-project/lotus/chain/types/ethtypes" "github.com/filecoin-project/lotus/chain/types/ethtypes"
) )
var ErrImplementMe = errors.New("Not implemented yet") var ErrModuleDisabled = errors.New("module disabled, enable with Fevm.EnableEthRPC / LOTUS_FEVM_ENABLEETHPRC")
type EthModuleDummy struct{} type EthModuleDummy struct{}
func (e *EthModuleDummy) EthGetMessageCidByTransactionHash(ctx context.Context, txHash *ethtypes.EthHash) (*cid.Cid, error) {
return nil, ErrModuleDisabled
}
func (e *EthModuleDummy) EthGetTransactionHashByCid(ctx context.Context, cid cid.Cid) (*ethtypes.EthHash, error) {
return nil, ErrModuleDisabled
}
func (e *EthModuleDummy) EthBlockNumber(ctx context.Context) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthBlockNumber(ctx context.Context) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthAccounts(ctx context.Context) ([]ethtypes.EthAddress, error) { func (e *EthModuleDummy) EthAccounts(ctx context.Context) ([]ethtypes.EthAddress, error) {
return nil, ErrImplementMe return nil, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetBlockTransactionCountByNumber(ctx context.Context, blkNum ethtypes.EthUint64) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthGetBlockTransactionCountByNumber(ctx context.Context, blkNum ethtypes.EthUint64) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetBlockTransactionCountByHash(ctx context.Context, blkHash ethtypes.EthHash) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthGetBlockTransactionCountByHash(ctx context.Context, blkHash ethtypes.EthHash) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetBlockByHash(ctx context.Context, blkHash ethtypes.EthHash, fullTxInfo bool) (ethtypes.EthBlock, error) { func (e *EthModuleDummy) EthGetBlockByHash(ctx context.Context, blkHash ethtypes.EthHash, fullTxInfo bool) (ethtypes.EthBlock, error) {
return ethtypes.EthBlock{}, ErrImplementMe return ethtypes.EthBlock{}, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetBlockByNumber(ctx context.Context, blkNum string, fullTxInfo bool) (ethtypes.EthBlock, error) { func (e *EthModuleDummy) EthGetBlockByNumber(ctx context.Context, blkNum string, fullTxInfo bool) (ethtypes.EthBlock, error) {
return ethtypes.EthBlock{}, ErrImplementMe return ethtypes.EthBlock{}, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetTransactionByHash(ctx context.Context, txHash *ethtypes.EthHash) (*ethtypes.EthTx, error) { func (e *EthModuleDummy) EthGetTransactionByHash(ctx context.Context, txHash *ethtypes.EthHash) (*ethtypes.EthTx, error) {
return nil, ErrImplementMe return nil, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetTransactionCount(ctx context.Context, sender ethtypes.EthAddress, blkOpt string) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthGetTransactionCount(ctx context.Context, sender ethtypes.EthAddress, blkOpt string) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetTransactionReceipt(ctx context.Context, txHash ethtypes.EthHash) (*api.EthTxReceipt, error) { func (e *EthModuleDummy) EthGetTransactionReceipt(ctx context.Context, txHash ethtypes.EthHash) (*api.EthTxReceipt, error) {
return nil, ErrImplementMe return nil, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetTransactionByBlockHashAndIndex(ctx context.Context, blkHash ethtypes.EthHash, txIndex ethtypes.EthUint64) (ethtypes.EthTx, error) { func (e *EthModuleDummy) EthGetTransactionByBlockHashAndIndex(ctx context.Context, blkHash ethtypes.EthHash, txIndex ethtypes.EthUint64) (ethtypes.EthTx, error) {
return ethtypes.EthTx{}, ErrImplementMe return ethtypes.EthTx{}, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetTransactionByBlockNumberAndIndex(ctx context.Context, blkNum ethtypes.EthUint64, txIndex ethtypes.EthUint64) (ethtypes.EthTx, error) { func (e *EthModuleDummy) EthGetTransactionByBlockNumberAndIndex(ctx context.Context, blkNum ethtypes.EthUint64, txIndex ethtypes.EthUint64) (ethtypes.EthTx, error) {
return ethtypes.EthTx{}, ErrImplementMe return ethtypes.EthTx{}, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetCode(ctx context.Context, address ethtypes.EthAddress, blkOpt string) (ethtypes.EthBytes, error) { func (e *EthModuleDummy) EthGetCode(ctx context.Context, address ethtypes.EthAddress, blkOpt string) (ethtypes.EthBytes, error) {
return nil, ErrImplementMe return nil, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetStorageAt(ctx context.Context, address ethtypes.EthAddress, position ethtypes.EthBytes, blkParam string) (ethtypes.EthBytes, error) { func (e *EthModuleDummy) EthGetStorageAt(ctx context.Context, address ethtypes.EthAddress, position ethtypes.EthBytes, blkParam string) (ethtypes.EthBytes, error) {
return nil, ErrImplementMe return nil, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGetBalance(ctx context.Context, address ethtypes.EthAddress, blkParam string) (ethtypes.EthBigInt, error) { func (e *EthModuleDummy) EthGetBalance(ctx context.Context, address ethtypes.EthAddress, blkParam string) (ethtypes.EthBigInt, error) {
return ethtypes.EthBigIntZero, ErrImplementMe return ethtypes.EthBigIntZero, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthFeeHistory(ctx context.Context, blkCount ethtypes.EthUint64, newestBlk string, rewardPercentiles []float64) (ethtypes.EthFeeHistory, error) { func (e *EthModuleDummy) EthFeeHistory(ctx context.Context, blkCount ethtypes.EthUint64, newestBlk string, rewardPercentiles []float64) (ethtypes.EthFeeHistory, error) {
return ethtypes.EthFeeHistory{}, ErrImplementMe return ethtypes.EthFeeHistory{}, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthChainId(ctx context.Context) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthChainId(ctx context.Context) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) NetVersion(ctx context.Context) (string, error) { func (e *EthModuleDummy) NetVersion(ctx context.Context) (string, error) {
return "", ErrImplementMe return "", ErrModuleDisabled
} }
func (e *EthModuleDummy) NetListening(ctx context.Context) (bool, error) { func (e *EthModuleDummy) NetListening(ctx context.Context) (bool, error) {
return false, ErrImplementMe return false, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthProtocolVersion(ctx context.Context) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthProtocolVersion(ctx context.Context) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthGasPrice(ctx context.Context) (ethtypes.EthBigInt, error) { func (e *EthModuleDummy) EthGasPrice(ctx context.Context) (ethtypes.EthBigInt, error) {
return ethtypes.EthBigIntZero, ErrImplementMe return ethtypes.EthBigIntZero, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthEstimateGas(ctx context.Context, tx ethtypes.EthCall) (ethtypes.EthUint64, error) { func (e *EthModuleDummy) EthEstimateGas(ctx context.Context, tx ethtypes.EthCall) (ethtypes.EthUint64, error) {
return 0, ErrImplementMe return 0, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam string) (ethtypes.EthBytes, error) { func (e *EthModuleDummy) EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam string) (ethtypes.EthBytes, error) {
return nil, ErrImplementMe return nil, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthMaxPriorityFeePerGas(ctx context.Context) (ethtypes.EthBigInt, error) { func (e *EthModuleDummy) EthMaxPriorityFeePerGas(ctx context.Context) (ethtypes.EthBigInt, error) {
return ethtypes.EthBigIntZero, ErrImplementMe return ethtypes.EthBigIntZero, ErrModuleDisabled
} }
func (e *EthModuleDummy) EthSendRawTransaction(ctx context.Context, rawTx ethtypes.EthBytes) (ethtypes.EthHash, error) { func (e *EthModuleDummy) EthSendRawTransaction(ctx context.Context, rawTx ethtypes.EthBytes) (ethtypes.EthHash, error) {
return ethtypes.EthHash{}, ErrImplementMe return ethtypes.EthHash{}, ErrModuleDisabled
} }
func (e *EthModuleDummy) Web3ClientVersion(ctx context.Context) (string, error) {
return "", ErrModuleDisabled
}
var _ EthModuleAPI = &EthModuleDummy{}

View File

@ -64,6 +64,7 @@ type EthModuleAPI interface {
EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam string) (ethtypes.EthBytes, error) EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam string) (ethtypes.EthBytes, error)
EthMaxPriorityFeePerGas(ctx context.Context) (ethtypes.EthBigInt, error) EthMaxPriorityFeePerGas(ctx context.Context) (ethtypes.EthBigInt, error)
EthSendRawTransaction(ctx context.Context, rawTx ethtypes.EthBytes) (ethtypes.EthHash, error) EthSendRawTransaction(ctx context.Context, rawTx ethtypes.EthBytes) (ethtypes.EthHash, error)
Web3ClientVersion(ctx context.Context) (string, error)
} }
type EthEventAPI interface { type EthEventAPI interface {
@ -257,14 +258,11 @@ func (a *EthModule) EthGetTransactionByHash(ctx context.Context, txHash *ethtype
return nil, nil return nil, nil
} }
c := cid.Undef c, err := a.EthTxHashManager.TransactionHashLookup.GetCidFromHash(*txHash)
if a.EthTxHashManager != nil { if err != nil {
var err error log.Debug("could not find transaction hash %s in lookup table", txHash.String())
c, err = a.EthTxHashManager.TransactionHashLookup.GetCidFromHash(*txHash)
if err != nil {
log.Debug("could not find transaction hash %s in lookup table", txHash.String())
}
} }
// This isn't an eth transaction we have the mapping for, so let's look it up as a filecoin message // This isn't an eth transaction we have the mapping for, so let's look it up as a filecoin message
if c == cid.Undef { if c == cid.Undef {
c = txHash.ToCid() c = txHash.ToCid()
@ -306,25 +304,22 @@ func (a *EthModule) EthGetMessageCidByTransactionHash(ctx context.Context, txHas
return nil, nil return nil, nil
} }
c := cid.Undef c, err := a.EthTxHashManager.TransactionHashLookup.GetCidFromHash(*txHash)
if a.EthTxHashManager != nil { // We fall out of the first condition and continue
var err error if errors.Is(err, ethhashlookup.ErrNotFound) {
c, err = a.EthTxHashManager.TransactionHashLookup.GetCidFromHash(*txHash) log.Debug("could not find transaction hash %s in lookup table", txHash.String())
// We fall out of the first condition and continue } else if err != nil {
if errors.Is(err, ethhashlookup.ErrNotFound) { return nil, xerrors.Errorf("database error: %w", err)
log.Debug("could not find transaction hash %s in lookup table", txHash.String()) } else {
} else if err != nil { return &c, nil
return nil, xerrors.Errorf("database error: %w", err)
} else {
return &c, nil
}
} }
// This isn't an eth transaction we have the mapping for, so let's try looking it up as a filecoin message // This isn't an eth transaction we have the mapping for, so let's try looking it up as a filecoin message
if c == cid.Undef { if c == cid.Undef {
c = txHash.ToCid() c = txHash.ToCid()
} }
_, err := a.StateAPI.Chain.GetSignedMessage(ctx, c) _, err = a.StateAPI.Chain.GetSignedMessage(ctx, c)
if err == nil { if err == nil {
// This is an Eth Tx, Secp message, Or BLS message in the mpool // This is an Eth Tx, Secp message, Or BLS message in the mpool
return &c, nil return &c, nil
@ -369,14 +364,11 @@ func (a *EthModule) EthGetTransactionCount(ctx context.Context, sender ethtypes.
} }
func (a *EthModule) EthGetTransactionReceipt(ctx context.Context, txHash ethtypes.EthHash) (*api.EthTxReceipt, error) { func (a *EthModule) EthGetTransactionReceipt(ctx context.Context, txHash ethtypes.EthHash) (*api.EthTxReceipt, error) {
c := cid.Undef c, err := a.EthTxHashManager.TransactionHashLookup.GetCidFromHash(txHash)
if a.EthTxHashManager != nil { if err != nil {
var err error log.Debug("could not find transaction hash %s in lookup table", txHash.String())
c, err = a.EthTxHashManager.TransactionHashLookup.GetCidFromHash(txHash)
if err != nil {
log.Debug("could not find transaction hash %s in lookup table", txHash.String())
}
} }
// This isn't an eth transaction we have the mapping for, so let's look it up as a filecoin message // This isn't an eth transaction we have the mapping for, so let's look it up as a filecoin message
if c == cid.Undef { if c == cid.Undef {
c = txHash.ToCid() c = txHash.ToCid()
@ -509,18 +501,8 @@ func (a *EthModule) EthGetStorageAt(ctx context.Context, ethAddr ethtypes.EthAdd
return nil, fmt.Errorf("failed to construct system sender address: %w", err) return nil, fmt.Errorf("failed to construct system sender address: %w", err)
} }
// TODO super duper hack (raulk). The EVM runtime actor uses the U256 parameter type in
// GetStorageAtParams, which serializes as a hex-encoded string. It should serialize
// as bytes. We didn't get to fix in time for Iron, so for now we just pass
// through the hex-encoded value passed through the Eth JSON-RPC API, by remarshalling it.
// We don't fix this at origin (builtin-actors) because we are not updating the bundle
// for Iron.
tmp, err := position.MarshalJSON()
if err != nil {
panic(err)
}
params, err := actors.SerializeParams(&evm.GetStorageAtParams{ params, err := actors.SerializeParams(&evm.GetStorageAtParams{
StorageKey: tmp[1 : len(tmp)-1], // TODO strip the JSON-encoding quotes -- yuck StorageKey: position,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to serialize parameters: %w", err) return nil, fmt.Errorf("failed to serialize parameters: %w", err)
@ -723,6 +705,10 @@ func (a *EthModule) EthSendRawTransaction(ctx context.Context, rawTx ethtypes.Et
return ethtypes.EthHashFromTxBytes(rawTx), nil return ethtypes.EthHashFromTxBytes(rawTx), nil
} }
func (a *EthModule) Web3ClientVersion(ctx context.Context) (string, error) {
return build.UserVersion(), nil
}
func (a *EthModule) ethCallToFilecoinMessage(ctx context.Context, tx ethtypes.EthCall) (*types.Message, error) { func (a *EthModule) ethCallToFilecoinMessage(ctx context.Context, tx ethtypes.EthCall) (*types.Message, error) {
var from address.Address var from address.Address
if tx.From == nil || *tx.From == (ethtypes.EthAddress{}) { if tx.From == nil || *tx.From == (ethtypes.EthAddress{}) {
@ -836,8 +822,9 @@ func (a *EthModule) EthEstimateGas(ctx context.Context, tx ethtypes.EthCall) (et
func (a *EthModule) EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam string) (ethtypes.EthBytes, error) { func (a *EthModule) EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam string) (ethtypes.EthBytes, error) {
msg, err := a.ethCallToFilecoinMessage(ctx, tx) msg, err := a.ethCallToFilecoinMessage(ctx, tx)
if err != nil { if err != nil {
return nil, err return nil, xerrors.Errorf("failed to convert ethcall to filecoin message: %w", err)
} }
ts, err := a.parseBlkParam(ctx, blkParam) ts, err := a.parseBlkParam(ctx, blkParam)
if err != nil { if err != nil {
return nil, xerrors.Errorf("cannot parse block param: %s", blkParam) return nil, xerrors.Errorf("cannot parse block param: %s", blkParam)
@ -845,11 +832,17 @@ func (a *EthModule) EthCall(ctx context.Context, tx ethtypes.EthCall, blkParam s
invokeResult, err := a.applyMessage(ctx, msg, ts.Key()) invokeResult, err := a.applyMessage(ctx, msg, ts.Key())
if err != nil { if err != nil {
return nil, err return nil, xerrors.Errorf("failed to apply message: %w", err)
} }
if len(invokeResult.MsgRct.Return) > 0 {
if msg.To == builtintypes.EthereumAddressManagerActorAddr {
// As far as I can tell, the Eth API always returns empty on contract deployment
return ethtypes.EthBytes{}, nil
} else if len(invokeResult.MsgRct.Return) > 0 {
return cbg.ReadByteArray(bytes.NewReader(invokeResult.MsgRct.Return), uint64(len(invokeResult.MsgRct.Return))) return cbg.ReadByteArray(bytes.NewReader(invokeResult.MsgRct.Return), uint64(len(invokeResult.MsgRct.Return)))
} }
return ethtypes.EthBytes{}, nil return ethtypes.EthBytes{}, nil
} }
@ -961,18 +954,20 @@ func (e *EthEvent) installEthFilterSpec(ctx context.Context, filterSpec *ethtype
// Here the client is looking for events between the head and some future height // Here the client is looking for events between the head and some future height
ts := e.Chain.GetHeaviestTipSet() ts := e.Chain.GetHeaviestTipSet()
if maxHeight-ts.Height() > e.MaxFilterHeightRange { if maxHeight-ts.Height() > e.MaxFilterHeightRange {
return nil, xerrors.Errorf("invalid epoch range") return nil, xerrors.Errorf("invalid epoch range: to block is too far in the future (maximum: %d)", e.MaxFilterHeightRange)
} }
} else if minHeight >= 0 && maxHeight == -1 { } else if minHeight >= 0 && maxHeight == -1 {
// Here the client is looking for events between some time in the past and the current head // Here the client is looking for events between some time in the past and the current head
ts := e.Chain.GetHeaviestTipSet() ts := e.Chain.GetHeaviestTipSet()
if ts.Height()-minHeight > e.MaxFilterHeightRange { if ts.Height()-minHeight > e.MaxFilterHeightRange {
return nil, xerrors.Errorf("invalid epoch range") return nil, xerrors.Errorf("invalid epoch range: from block is too far in the past (maximum: %d)", e.MaxFilterHeightRange)
} }
} else if minHeight >= 0 && maxHeight >= 0 { } else if minHeight >= 0 && maxHeight >= 0 {
if minHeight > maxHeight || maxHeight-minHeight > e.MaxFilterHeightRange { if minHeight > maxHeight {
return nil, xerrors.Errorf("invalid epoch range") return nil, xerrors.Errorf("invalid epoch range: to block (%d) must be after from block (%d)", minHeight, maxHeight)
} else if maxHeight-minHeight > e.MaxFilterHeightRange {
return nil, xerrors.Errorf("invalid epoch range: range between to and from blocks is too large (maximum: %d)", e.MaxFilterHeightRange)
} }
} }
@ -987,14 +982,9 @@ func (e *EthEvent) installEthFilterSpec(ctx context.Context, filterSpec *ethtype
addresses = append(addresses, a) addresses = append(addresses, a)
} }
for idx, vals := range filterSpec.Topics { keys, err := parseEthTopics(filterSpec.Topics)
// Ethereum topics are emitted using `LOG{0..4}` opcodes resulting in topics1..4 if err != nil {
key := fmt.Sprintf("topic%d", idx+1) return nil, err
keyvals := make([][]byte, len(vals))
for i, v := range vals {
keyvals[i] = v[:]
}
keys[key] = keyvals
} }
return e.EventFilterManager.Install(ctx, minHeight, maxHeight, tipsetCid, addresses, keys) return e.EventFilterManager.Install(ctx, minHeight, maxHeight, tipsetCid, addresses, keys)
@ -1019,7 +1009,6 @@ func (e *EthEvent) EthNewFilter(ctx context.Context, filterSpec *ethtypes.EthFil
return ethtypes.EthFilterID{}, err return ethtypes.EthFilterID{}, err
} }
return ethtypes.EthFilterID(f.ID()), nil return ethtypes.EthFilterID(f.ID()), nil
} }
@ -1143,14 +1132,12 @@ func (e *EthEvent) EthSubscribe(ctx context.Context, eventType string, params *e
case EthSubscribeEventTypeLogs: case EthSubscribeEventTypeLogs:
keys := map[string][][]byte{} keys := map[string][][]byte{}
if params != nil { if params != nil {
for idx, vals := range params.Topics { var err error
// Ethereum topics are emitted using `LOG{0..4}` opcodes resulting in topics1..4 keys, err = parseEthTopics(params.Topics)
key := fmt.Sprintf("topic%d", idx+1) if err != nil {
keyvals := make([][]byte, len(vals)) // clean up any previous filters added and stop the sub
for i, v := range vals { _, _ = e.EthUnsubscribe(ctx, sub.id)
keyvals[i] = v[:] return nil, err
}
keys[key] = keyvals
} }
} }
@ -1237,7 +1224,10 @@ func ethFilterResultFromEvents(evs []*filter.CollectedEvent, sa StateAPI) (*etht
var err error var err error
for _, entry := range ev.Entries { for _, entry := range ev.Entries {
value := ethtypes.EthBytes(leftpad32(decodeLogBytes(entry.Value))) value, err := cborDecodeTopicValue(entry.Value)
if err != nil {
return nil, err
}
if entry.Key == ethtypes.EthTopic1 || entry.Key == ethtypes.EthTopic2 || entry.Key == ethtypes.EthTopic3 || entry.Key == ethtypes.EthTopic4 { if entry.Key == ethtypes.EthTopic1 || entry.Key == ethtypes.EthTopic2 || entry.Key == ethtypes.EthTopic3 || entry.Key == ethtypes.EthTopic4 {
log.Topics = append(log.Topics, value) log.Topics = append(log.Topics, value)
} else { } else {
@ -1780,7 +1770,10 @@ func newEthTxReceipt(ctx context.Context, tx ethtypes.EthTx, lookup *api.MsgLook
} }
for _, entry := range evt.Entries { for _, entry := range evt.Entries {
value := ethtypes.EthBytes(leftpad32(decodeLogBytes(entry.Value))) value, err := cborDecodeTopicValue(entry.Value)
if err != nil {
return api.EthTxReceipt{}, xerrors.Errorf("failed to decode event log value: %w", err)
}
if entry.Key == ethtypes.EthTopic1 || entry.Key == ethtypes.EthTopic2 || entry.Key == ethtypes.EthTopic3 || entry.Key == ethtypes.EthTopic4 { if entry.Key == ethtypes.EthTopic1 || entry.Key == ethtypes.EthTopic2 || entry.Key == ethtypes.EthTopic3 || entry.Key == ethtypes.EthTopic4 {
l.Topics = append(l.Topics, value) l.Topics = append(l.Topics, value)
} else { } else {
@ -1891,25 +1884,6 @@ func EthTxHashGC(ctx context.Context, retentionDays int, manager *EthTxHashManag
} }
} }
// decodeLogBytes decodes a CBOR-serialized array into its original form.
//
// This function swallows errors and returns the original array if it failed
// to decode.
func decodeLogBytes(orig []byte) []byte {
if orig == nil {
return orig
}
decoded, err := cbg.ReadByteArray(bytes.NewReader(orig), uint64(len(orig)))
if err != nil {
return orig
}
return decoded
}
// TODO we could also emit full EVM words from the EVM runtime, but not doing so
// makes the contract slightly cheaper (and saves storage bytes), at the expense
// of having to left pad in the API, which is a pretty acceptable tradeoff at
// face value. There may be other protocol implications to consider.
func leftpad32(orig []byte) []byte { func leftpad32(orig []byte) []byte {
needed := 32 - len(orig) needed := 32 - len(orig)
if needed <= 0 { if needed <= 0 {
@ -1919,3 +1893,51 @@ func leftpad32(orig []byte) []byte {
copy(ret[needed:], orig) copy(ret[needed:], orig)
return ret return ret
} }
func trimLeadingZeros(b []byte) []byte {
for i := range b {
if b[i] != 0 {
return b[i:]
}
}
return []byte{}
}
func cborEncodeTopicValue(orig []byte) ([]byte, error) {
var buf bytes.Buffer
err := cbg.WriteByteArray(&buf, trimLeadingZeros(orig))
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func cborDecodeTopicValue(orig []byte) ([]byte, error) {
if len(orig) == 0 {
return orig, nil
}
decoded, err := cbg.ReadByteArray(bytes.NewReader(orig), uint64(len(orig)))
if err != nil {
return nil, err
}
return leftpad32(decoded), nil
}
func parseEthTopics(topics ethtypes.EthTopicSpec) (map[string][][]byte, error) {
keys := map[string][][]byte{}
for idx, vals := range topics {
if len(vals) == 0 {
continue
}
// Ethereum topics are emitted using `LOG{0..4}` opcodes resulting in topics1..4
key := fmt.Sprintf("topic%d", idx+1)
for _, v := range vals {
encodedVal, err := cborEncodeTopicValue(v[:])
if err != nil {
return nil, xerrors.Errorf("failed to encode topic value")
}
keys[key] = append(keys[key], encodedVal)
}
}
return keys, nil
}

View File

@ -11,9 +11,11 @@ import (
"github.com/filecoin-project/go-state-types/crypto" "github.com/filecoin-project/go-state-types/crypto"
"github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/api"
"github.com/filecoin-project/lotus/chain/messagesigner"
"github.com/filecoin-project/lotus/chain/stmgr" "github.com/filecoin-project/lotus/chain/stmgr"
"github.com/filecoin-project/lotus/chain/types" "github.com/filecoin-project/lotus/chain/types"
"github.com/filecoin-project/lotus/chain/wallet" "github.com/filecoin-project/lotus/chain/wallet"
"github.com/filecoin-project/lotus/chain/wallet/key"
"github.com/filecoin-project/lotus/lib/sigs" "github.com/filecoin-project/lotus/lib/sigs"
) )
@ -51,12 +53,20 @@ func (a *WalletAPI) WalletSignMessage(ctx context.Context, k address.Address, ms
return nil, xerrors.Errorf("failed to resolve ID address: %w", keyAddr) return nil, xerrors.Errorf("failed to resolve ID address: %w", keyAddr)
} }
keyInfo, err := a.Wallet.WalletExport(ctx, k)
if err != nil {
return nil, err
}
sb, err := messagesigner.SigningBytes(msg, key.ActSigType(keyInfo.Type))
if err != nil {
return nil, err
}
mb, err := msg.ToStorageBlock() mb, err := msg.ToStorageBlock()
if err != nil { if err != nil {
return nil, xerrors.Errorf("serializing message: %w", err) return nil, xerrors.Errorf("serializing message: %w", err)
} }
sig, err := a.Wallet.WalletSign(ctx, keyAddr, mb.Cid().Bytes(), api.MsgMeta{ sig, err := a.Wallet.WalletSign(ctx, keyAddr, sb, api.MsgMeta{
Type: api.MTChainMsg, Type: api.MTChainMsg,
Extra: mb.RawData(), Extra: mb.RawData(),
}) })

View File

@ -2,6 +2,7 @@ package modules
import ( import (
"context" "context"
"path/filepath"
"time" "time"
"github.com/multiformats/go-varint" "github.com/multiformats/go-varint"
@ -20,6 +21,7 @@ import (
"github.com/filecoin-project/lotus/node/config" "github.com/filecoin-project/lotus/node/config"
"github.com/filecoin-project/lotus/node/impl/full" "github.com/filecoin-project/lotus/node/impl/full"
"github.com/filecoin-project/lotus/node/modules/helpers" "github.com/filecoin-project/lotus/node/modules/helpers"
"github.com/filecoin-project/lotus/node/repo"
) )
type EventAPI struct { type EventAPI struct {
@ -31,16 +33,16 @@ type EventAPI struct {
var _ events.EventAPI = &EventAPI{} var _ events.EventAPI = &EventAPI{}
func EthEventAPI(cfg config.ActorEventConfig) func(helpers.MetricsCtx, fx.Lifecycle, *store.ChainStore, *stmgr.StateManager, EventAPI, *messagepool.MessagePool, full.StateAPI, full.ChainAPI) (*full.EthEvent, error) { func EthEventAPI(cfg config.FevmConfig) func(helpers.MetricsCtx, repo.LockedRepo, fx.Lifecycle, *store.ChainStore, *stmgr.StateManager, EventAPI, *messagepool.MessagePool, full.StateAPI, full.ChainAPI) (*full.EthEvent, error) {
return func(mctx helpers.MetricsCtx, lc fx.Lifecycle, cs *store.ChainStore, sm *stmgr.StateManager, evapi EventAPI, mp *messagepool.MessagePool, stateapi full.StateAPI, chainapi full.ChainAPI) (*full.EthEvent, error) { return func(mctx helpers.MetricsCtx, r repo.LockedRepo, lc fx.Lifecycle, cs *store.ChainStore, sm *stmgr.StateManager, evapi EventAPI, mp *messagepool.MessagePool, stateapi full.StateAPI, chainapi full.ChainAPI) (*full.EthEvent, error) {
ctx := helpers.LifecycleCtx(mctx, lc) ctx := helpers.LifecycleCtx(mctx, lc)
ee := &full.EthEvent{ ee := &full.EthEvent{
Chain: cs, Chain: cs,
MaxFilterHeightRange: abi.ChainEpoch(cfg.MaxFilterHeightRange), MaxFilterHeightRange: abi.ChainEpoch(cfg.Events.MaxFilterHeightRange),
} }
if !cfg.EnableRealTimeFilterAPI { if !cfg.EnableEthRPC || cfg.Events.DisableRealTimeFilterAPI {
// all event functionality is disabled // all event functionality is disabled
// the historic filter API relies on the real time one // the historic filter API relies on the real time one
return ee, nil return ee, nil
@ -51,21 +53,32 @@ func EthEventAPI(cfg config.ActorEventConfig) func(helpers.MetricsCtx, fx.Lifecy
StateAPI: stateapi, StateAPI: stateapi,
ChainAPI: chainapi, ChainAPI: chainapi,
} }
ee.FilterStore = filter.NewMemFilterStore(cfg.MaxFilters) ee.FilterStore = filter.NewMemFilterStore(cfg.Events.MaxFilters)
// Start garbage collection for filters // Start garbage collection for filters
lc.Append(fx.Hook{ lc.Append(fx.Hook{
OnStart: func(context.Context) error { OnStart: func(context.Context) error {
go ee.GC(ctx, time.Duration(cfg.FilterTTL)) go ee.GC(ctx, time.Duration(cfg.Events.FilterTTL))
return nil return nil
}, },
}) })
// Enable indexing of actor events // Enable indexing of actor events
var eventIndex *filter.EventIndex var eventIndex *filter.EventIndex
if cfg.EnableHistoricFilterAPI { if !cfg.Events.DisableHistoricFilterAPI {
var dbPath string
if cfg.Events.DatabasePath == "" {
sqlitePath, err := r.SqlitePath()
if err != nil {
return nil, err
}
dbPath = filepath.Join(sqlitePath, "events.db")
} else {
dbPath = cfg.Events.DatabasePath
}
var err error var err error
eventIndex, err = filter.NewEventIndex(cfg.ActorEventDatabasePath) eventIndex, err = filter.NewEventIndex(dbPath)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -103,13 +116,13 @@ func EthEventAPI(cfg config.ActorEventConfig) func(helpers.MetricsCtx, fx.Lifecy
return *actor.Address, true return *actor.Address, true
}, },
MaxFilterResults: cfg.MaxFilterResults, MaxFilterResults: cfg.Events.MaxFilterResults,
} }
ee.TipSetFilterManager = &filter.TipSetFilterManager{ ee.TipSetFilterManager = &filter.TipSetFilterManager{
MaxFilterResults: cfg.MaxFilterResults, MaxFilterResults: cfg.Events.MaxFilterResults,
} }
ee.MemPoolFilterManager = &filter.MemPoolFilterManager{ ee.MemPoolFilterManager = &filter.MemPoolFilterManager{
MaxFilterResults: cfg.MaxFilterResults, MaxFilterResults: cfg.Events.MaxFilterResults,
} }
const ChainHeadConfidence = 1 const ChainHeadConfidence = 1

View File

@ -19,26 +19,12 @@ import (
func EthModuleAPI(cfg config.FevmConfig) func(helpers.MetricsCtx, repo.LockedRepo, fx.Lifecycle, *store.ChainStore, *stmgr.StateManager, EventAPI, *messagepool.MessagePool, full.StateAPI, full.ChainAPI, full.MpoolAPI) (*full.EthModule, error) { func EthModuleAPI(cfg config.FevmConfig) func(helpers.MetricsCtx, repo.LockedRepo, fx.Lifecycle, *store.ChainStore, *stmgr.StateManager, EventAPI, *messagepool.MessagePool, full.StateAPI, full.ChainAPI, full.MpoolAPI) (*full.EthModule, error) {
return func(mctx helpers.MetricsCtx, r repo.LockedRepo, lc fx.Lifecycle, cs *store.ChainStore, sm *stmgr.StateManager, evapi EventAPI, mp *messagepool.MessagePool, stateapi full.StateAPI, chainapi full.ChainAPI, mpoolapi full.MpoolAPI) (*full.EthModule, error) { return func(mctx helpers.MetricsCtx, r repo.LockedRepo, lc fx.Lifecycle, cs *store.ChainStore, sm *stmgr.StateManager, evapi EventAPI, mp *messagepool.MessagePool, stateapi full.StateAPI, chainapi full.ChainAPI, mpoolapi full.MpoolAPI) (*full.EthModule, error) {
em := &full.EthModule{ sqlitePath, err := r.SqlitePath()
Chain: cs,
Mpool: mp,
StateManager: sm,
ChainAPI: chainapi,
MpoolAPI: mpoolapi,
StateAPI: stateapi,
}
if !cfg.EnableEthHashToFilecoinCidMapping {
// mapping functionality disabled. Nothing to do here
return em, nil
}
dbPath, err := r.SqlitePath()
if err != nil { if err != nil {
return nil, err return nil, err
} }
transactionHashLookup, err := ethhashlookup.NewTransactionHashLookup(filepath.Join(dbPath, "txhash.db")) transactionHashLookup, err := ethhashlookup.NewTransactionHashLookup(filepath.Join(sqlitePath, "txhash.db"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -54,8 +40,6 @@ func EthModuleAPI(cfg config.FevmConfig) func(helpers.MetricsCtx, repo.LockedRep
TransactionHashLookup: transactionHashLookup, TransactionHashLookup: transactionHashLookup,
} }
em.EthTxHashManager = &ethTxHashManager
const ChainHeadConfidence = 1 const ChainHeadConfidence = 1
ctx := helpers.LifecycleCtx(mctx, lc) ctx := helpers.LifecycleCtx(mctx, lc)
@ -80,6 +64,16 @@ func EthModuleAPI(cfg config.FevmConfig) func(helpers.MetricsCtx, repo.LockedRep
}, },
}) })
return em, nil return &full.EthModule{
Chain: cs,
Mpool: mp,
StateManager: sm,
ChainAPI: chainapi,
MpoolAPI: mpoolapi,
StateAPI: stateapi,
EthTxHashManager: &ethTxHashManager,
}, nil
} }
} }

View File

@ -1,26 +0,0 @@
#!/usr/bin/env bash
set -ex
REQUIRED=(
"ipfs"
"sha512sum"
)
for REQUIRE in "${REQUIRED[@]}"
do
command -v "${REQUIRE}" >/dev/null 2>&1 || echo >&2 "'${REQUIRE}' must be installed"
done
mkdir bundle
pushd bundle
export IPFS_PATH=`mktemp -d`
ipfs init
ipfs daemon &
PID="$!"
trap "kill -9 ${PID}" EXIT
sleep 30
cp "/tmp/workspace/appimage/Lotus-${CIRCLE_TAG}-x86_64.AppImage" .
sha512sum "Lotus-${CIRCLE_TAG}-x86_64.AppImage" > "Lotus-${CIRCLE_TAG}-x86_64.AppImage.sha512"
ipfs add -q "Lotus-${CIRCLE_TAG}-x86_64.AppImage" > "Lotus-${CIRCLE_TAG}-x86_64.AppImage.cid"
popd

View File

@ -1,46 +0,0 @@
#!/usr/bin/env bash
set -ex
ARCH=$1
REQUIRED=(
"ipfs"
"sha512sum"
)
for REQUIRE in "${REQUIRED[@]}"
do
command -v "${REQUIRE}" >/dev/null 2>&1 || echo >&2 "'${REQUIRE}' must be installed"
done
mkdir bundle
pushd bundle
BINARIES=(
"lotus"
"lotus-miner"
"lotus-worker"
)
export IPFS_PATH=`mktemp -d`
ipfs init
ipfs daemon &
PID="$!"
trap "kill -9 ${PID}" EXIT
sleep 30
mkdir -p "${ARCH}/lotus"
pushd "${ARCH}"
for BINARY in "${BINARIES[@]}"
do
cp "../../${ARCH}/${BINARY}" "lotus/"
chmod +x "lotus/${BINARY}"
done
tar -zcvf "../lotus_${CIRCLE_TAG}_${ARCH}-amd64.tar.gz" lotus
popd
rm -rf "${ARCH}"
sha512sum "lotus_${CIRCLE_TAG}_${ARCH}-amd64.tar.gz" > "lotus_${CIRCLE_TAG}_${ARCH}-amd64.tar.gz.sha512"
ipfs add -q "lotus_${CIRCLE_TAG}_${ARCH}-amd64.tar.gz" > "lotus_${CIRCLE_TAG}_${ARCH}-amd64.tar.gz.cid"
popd

View File

@ -1,121 +0,0 @@
#!/usr/bin/env bash
set -e
ARCH=$1
pushd bundle
# make sure we have a token set, api requests won't work otherwise
if [ -z "${GITHUB_TOKEN}" ]; then
echo "\${GITHUB_TOKEN} not set, publish failed"
exit 1
fi
REQUIRED=(
"jq"
"curl"
)
for REQUIRE in "${REQUIRED[@]}"
do
command -v "${REQUIRE}" >/dev/null 2>&1 || echo >&2 "'${REQUIRE}' must be installed"
done
#see if the release already exists by tag
RELEASE_RESPONSE=`
curl \
--header "Authorization: token ${GITHUB_TOKEN}" \
"https://api.github.com/repos/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/releases/tags/${CIRCLE_TAG}"
`
RELEASE_ID=`echo "${RELEASE_RESPONSE}" | jq '.id'`
if [ "${RELEASE_ID}" = "null" ]; then
echo "creating release"
COND_CREATE_DISCUSSION=""
PRERELEASE=true
if [[ ${CIRCLE_TAG} =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
COND_CREATE_DISCUSSION="\"discussion_category_name\": \"announcement\","
PRERELEASE=false
fi
RELEASE_DATA="{
\"tag_name\": \"${CIRCLE_TAG}\",
\"target_commitish\": \"${CIRCLE_SHA1}\",
${COND_CREATE_DISCUSSION}
\"name\": \"${CIRCLE_TAG}\",
\"body\": \"\",
\"prerelease\": ${PRERELEASE}
}"
# create it if it doesn't exist yet
RELEASE_RESPONSE=`
curl \
--request POST \
--header "Authorization: token ${GITHUB_TOKEN}" \
--header "Content-Type: application/json" \
--data "${RELEASE_DATA}" \
"https://api.github.com/repos/$CIRCLE_PROJECT_USERNAME/${CIRCLE_PROJECT_REPONAME}/releases"
`
else
echo "release already exists"
fi
RELEASE_UPLOAD_URL=`echo "${RELEASE_RESPONSE}" | jq -r '.upload_url' | cut -d'{' -f1`
echo "Preparing to send artifacts to ${RELEASE_UPLOAD_URL}"
if [ $ARCH = 'linux' ]; then
artifacts=(
"lotus_${CIRCLE_TAG}_linux-amd64.tar.gz"
"lotus_${CIRCLE_TAG}_linux-amd64.tar.gz.cid"
"lotus_${CIRCLE_TAG}_linux-amd64.tar.gz.sha512"
)
elif [ $ARCH = 'darwin' ]; then
artifacts=(
"lotus_${CIRCLE_TAG}_darwin-amd64.tar.gz"
"lotus_${CIRCLE_TAG}_darwin-amd64.tar.gz.cid"
"lotus_${CIRCLE_TAG}_darwin-amd64.tar.gz.sha512"
)
elif [ $ARCH = 'appimage' ]; then
artifacts=(
"Lotus-${CIRCLE_TAG}-x86_64.AppImage"
"Lotus-${CIRCLE_TAG}-x86_64.AppImage.cid"
"Lotus-${CIRCLE_TAG}-x86_64.AppImage.sha512"
)
else
echo "$1 is not a supported architecture to publish a release for" 1>&2
exit 1
fi
for RELEASE_FILE in "${artifacts[@]}"
do
echo "Uploading ${RELEASE_FILE}..."
curl \
--request POST \
--fail \
--header "Authorization: token ${GITHUB_TOKEN}" \
--header "Content-Type: application/octet-stream" \
--data-binary "@${RELEASE_FILE}" \
"$RELEASE_UPLOAD_URL?name=$(basename "${RELEASE_FILE}")"
echo "Uploaded ${RELEASE_FILE}"
done
popd
miscellaneous=(
"README.md"
"LICENSE-MIT"
"LICENSE-APACHE"
)
for MISC in "${miscellaneous[@]}"
do
echo "Uploading release bundle: ${MISC}"
curl \
--request POST \
--header "Authorization: token ${GITHUB_TOKEN}" \
--header "Content-Type: application/octet-stream" \
--data-binary "@${MISC}" \
"$RELEASE_UPLOAD_URL?name=$(basename "${MISC}")"
echo "Release bundle uploaded: ${MISC}"
done