Update bip-39 ref and remove ebfe/scard from vendor
This commit is contained in:
parent
8ee5bb2289
commit
86806d8b24
15
vendor/github.com/tyler-smith/go-bip39/Gopkg.lock
generated
vendored
Normal file
15
vendor/github.com/tyler-smith/go-bip39/Gopkg.lock
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/crypto"
|
||||
packages = ["pbkdf2"]
|
||||
revision = "a49355c7e3f8fe157a85be2f77e6e269a0f89602"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "d7f1a7207c39125afcb9ca2365832cb83458edfc17f2f7e8d28fd56f19436856"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
26
vendor/github.com/tyler-smith/go-bip39/Gopkg.toml
generated
vendored
Normal file
26
vendor/github.com/tyler-smith/go-bip39/Gopkg.toml
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
|
||||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/crypto"
|
2
vendor/github.com/tyler-smith/go-bip39/LICENSE
generated
vendored
2
vendor/github.com/tyler-smith/go-bip39/LICENSE
generated
vendored
@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Tyler Smith
|
||||
Copyright (c) 2014-2018 Tyler Smith and contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
11
vendor/github.com/tyler-smith/go-bip39/Makefile
generated
vendored
Normal file
11
vendor/github.com/tyler-smith/go-bip39/Makefile
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
tests: ## Run tests with coverage
|
||||
go test -v -cover ./...
|
||||
|
||||
profile_tests: ## Run tests and output coverage profiling
|
||||
go test -v -coverprofile=coverage.out .
|
||||
go tool cover -html=coverage.out
|
||||
|
||||
help:
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
19
vendor/github.com/tyler-smith/go-bip39/README.md
generated
vendored
19
vendor/github.com/tyler-smith/go-bip39/README.md
generated
vendored
@ -1,13 +1,13 @@
|
||||
# go-bip39
|
||||
[![Build Status](https://travis-ci.org/tyler-smith/go-bip39.svg?branch=master)](https://travis-ci.org/tyler-smith/go-bip39)
|
||||
[![license](https://img.shields.io/github/license/tyler-smith/go-bip39.svg?maxAge=2592000)](https://github.com/tyler-smith/go-bip39/blob/master/LICENSE)
|
||||
[![Documentation](https://godoc.org/github.com/tyler-smith/go-bip39?status.svg)](http://godoc.org/github.com/tyler-smith/go-bip39)
|
||||
[![Go Report Card](https://goreportcard.com/badge/github.com/tyler-smith/go-bip39)](https://goreportcard.com/report/github.com/tyler-smith/go-bip39)
|
||||
[![GitHub issues](https://img.shields.io/github/issues/tyler-smith/go-bip39.svg)](https://github.com/tyler-smith/go-bip39/issues)
|
||||
|
||||
|
||||
A golang implementation of the BIP0039 spec for mnemonic seeds
|
||||
|
||||
|
||||
## Credits
|
||||
|
||||
English wordlist and test vectors are from the standard Python BIP0039 implementation
|
||||
from the Trezor guys: [https://github.com/trezor/python-mnemonic](https://github.com/trezor/python-mnemonic)
|
||||
|
||||
## Example
|
||||
|
||||
```go
|
||||
@ -36,3 +36,10 @@ func main(){
|
||||
fmt.Println("Master public key: ", publicKey)
|
||||
}
|
||||
```
|
||||
|
||||
## Credits
|
||||
|
||||
Wordlists are from the [bip39 spec](https://github.com/bitcoin/bips/tree/master/bip-0039).
|
||||
|
||||
Test vectors are from the standard Python BIP0039 implementation from the
|
||||
Trezor team: [https://github.com/trezor/python-mnemonic](https://github.com/trezor/python-mnemonic)
|
||||
|
352
vendor/github.com/tyler-smith/go-bip39/bip39.go
generated
vendored
352
vendor/github.com/tyler-smith/go-bip39/bip39.go
generated
vendored
@ -1,3 +1,7 @@
|
||||
// Package bip39 is the Golang implementation of the BIP39 spec.
|
||||
//
|
||||
// The official BIP39 spec can be found at
|
||||
// https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki
|
||||
package bip39
|
||||
|
||||
import (
|
||||
@ -10,19 +14,88 @@ import (
|
||||
"math/big"
|
||||
"strings"
|
||||
|
||||
"github.com/tyler-smith/go-bip39/wordlists"
|
||||
"golang.org/x/crypto/pbkdf2"
|
||||
)
|
||||
|
||||
// Some bitwise operands for working with big.Ints
|
||||
var (
|
||||
Last11BitsMask = big.NewInt(2047)
|
||||
RightShift11BitsDivider = big.NewInt(2048)
|
||||
BigOne = big.NewInt(1)
|
||||
BigTwo = big.NewInt(2)
|
||||
// Some bitwise operands for working with big.Ints
|
||||
last11BitsMask = big.NewInt(2047)
|
||||
shift11BitsMask = big.NewInt(2048)
|
||||
bigOne = big.NewInt(1)
|
||||
bigTwo = big.NewInt(2)
|
||||
|
||||
// used to isolate the checksum bits from the entropy+checksum byte array
|
||||
wordLengthChecksumMasksMapping = map[int]*big.Int{
|
||||
12: big.NewInt(15),
|
||||
15: big.NewInt(31),
|
||||
18: big.NewInt(63),
|
||||
21: big.NewInt(127),
|
||||
24: big.NewInt(255),
|
||||
}
|
||||
// used to use only the desired x of 8 available checksum bits.
|
||||
// 256 bit (word length 24) requires all 8 bits of the checksum,
|
||||
// and thus no shifting is needed for it (we would get a divByZero crash if we did)
|
||||
wordLengthChecksumShiftMapping = map[int]*big.Int{
|
||||
12: big.NewInt(16),
|
||||
15: big.NewInt(8),
|
||||
18: big.NewInt(4),
|
||||
21: big.NewInt(2),
|
||||
}
|
||||
|
||||
// wordList is the set of words to use
|
||||
wordList []string
|
||||
|
||||
// wordMap is a reverse lookup map for wordList
|
||||
wordMap map[string]int
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrInvalidMnemonic is returned when trying to use a malformed mnemonic.
|
||||
ErrInvalidMnemonic = errors.New("Invalid mnenomic")
|
||||
|
||||
// ErrEntropyLengthInvalid is returned when trying to use an entropy set with
|
||||
// an invalid size.
|
||||
ErrEntropyLengthInvalid = errors.New("Entropy length must be [128, 256] and a multiple of 32")
|
||||
|
||||
// ErrValidatedSeedLengthMismatch is returned when a validated seed is not the
|
||||
// same size as the given seed. This should never happen is present only as a
|
||||
// sanity assertion.
|
||||
ErrValidatedSeedLengthMismatch = errors.New("Seed length does not match validated seed length")
|
||||
|
||||
// ErrChecksumIncorrect is returned when entropy has the incorrect checksum.
|
||||
ErrChecksumIncorrect = errors.New("Checksum incorrect")
|
||||
)
|
||||
|
||||
func init() {
|
||||
SetWordList(wordlists.English)
|
||||
}
|
||||
|
||||
// SetWordList sets the list of words to use for mnemonics. Currently the list
|
||||
// that is set is used package-wide.
|
||||
func SetWordList(list []string) {
|
||||
wordList = list
|
||||
wordMap = map[string]int{}
|
||||
for i, v := range wordList {
|
||||
wordMap[v] = i
|
||||
}
|
||||
}
|
||||
|
||||
// GetWordList gets the list of words to use for mnemonics.
|
||||
func GetWordList() []string {
|
||||
return wordList
|
||||
}
|
||||
|
||||
// GetWordIndex gets word index in wordMap.
|
||||
func GetWordIndex(word string) (int, bool) {
|
||||
idx, ok := wordMap[word]
|
||||
return idx, ok
|
||||
}
|
||||
|
||||
// NewEntropy will create random entropy bytes
|
||||
// so long as the requested size bitSize is an appropriate size.
|
||||
//
|
||||
// bitSize has to be a multiple 32 and be within the inclusive range of {128, 256}
|
||||
func NewEntropy(bitSize int) ([]byte, error) {
|
||||
err := validateEntropyBitSize(bitSize)
|
||||
if err != nil {
|
||||
@ -34,47 +107,98 @@ func NewEntropy(bitSize int) ([]byte, error) {
|
||||
return entropy, err
|
||||
}
|
||||
|
||||
// EntropyFromMnemonic takes a mnemonic generated by this library,
|
||||
// and returns the input entropy used to generate the given mnemonic.
|
||||
// An error is returned if the given mnemonic is invalid.
|
||||
func EntropyFromMnemonic(mnemonic string) ([]byte, error) {
|
||||
mnemonicSlice, isValid := splitMnemonicWords(mnemonic)
|
||||
if !isValid {
|
||||
return nil, ErrInvalidMnemonic
|
||||
}
|
||||
|
||||
// Decode the words into a big.Int.
|
||||
b := big.NewInt(0)
|
||||
for _, v := range mnemonicSlice {
|
||||
index, found := wordMap[v]
|
||||
if found == false {
|
||||
return nil, fmt.Errorf("word `%v` not found in reverse map", v)
|
||||
}
|
||||
var wordBytes [2]byte
|
||||
binary.BigEndian.PutUint16(wordBytes[:], uint16(index))
|
||||
b = b.Mul(b, shift11BitsMask)
|
||||
b = b.Or(b, big.NewInt(0).SetBytes(wordBytes[:]))
|
||||
}
|
||||
|
||||
// Build and add the checksum to the big.Int.
|
||||
checksum := big.NewInt(0)
|
||||
checksumMask := wordLengthChecksumMasksMapping[len(mnemonicSlice)]
|
||||
checksum = checksum.And(b, checksumMask)
|
||||
|
||||
b.Div(b, big.NewInt(0).Add(checksumMask, bigOne))
|
||||
|
||||
// The entropy is the underlying bytes of the big.Int. Any upper bytes of
|
||||
// all 0's are not returned so we pad the beginning of the slice with empty
|
||||
// bytes if necessary.
|
||||
entropy := b.Bytes()
|
||||
entropy = padByteSlice(entropy, len(mnemonicSlice)/3*4)
|
||||
|
||||
// Generate the checksum and compare with the one we got from the mneomnic.
|
||||
entropyChecksumBytes := computeChecksum(entropy)
|
||||
entropyChecksum := big.NewInt(int64(entropyChecksumBytes[0]))
|
||||
if l := len(mnemonicSlice); l != 24 {
|
||||
checksumShift := wordLengthChecksumShiftMapping[l]
|
||||
entropyChecksum.Div(entropyChecksum, checksumShift)
|
||||
}
|
||||
|
||||
if checksum.Cmp(entropyChecksum) != 0 {
|
||||
return nil, ErrChecksumIncorrect
|
||||
}
|
||||
|
||||
return entropy, nil
|
||||
}
|
||||
|
||||
// NewMnemonic will return a string consisting of the mnemonic words for
|
||||
// the given entropy.
|
||||
// If the provide entropy is invalid, an error will be returned.
|
||||
func NewMnemonic(entropy []byte) (string, error) {
|
||||
// Compute some lengths for convenience
|
||||
// Compute some lengths for convenience.
|
||||
entropyBitLength := len(entropy) * 8
|
||||
checksumBitLength := entropyBitLength / 32
|
||||
sentenceLength := (entropyBitLength + checksumBitLength) / 11
|
||||
|
||||
// Validate that the requested size is supported.
|
||||
err := validateEntropyBitSize(entropyBitLength)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Add checksum to entropy
|
||||
// Add checksum to entropy.
|
||||
entropy = addChecksum(entropy)
|
||||
|
||||
// Break entropy up into sentenceLength chunks of 11 bits
|
||||
// For each word AND mask the rightmost 11 bits and find the word at that index
|
||||
// Then bitshift entropy 11 bits right and repeat
|
||||
// Add to the last empty slot so we can work with LSBs instead of MSB
|
||||
// Break entropy up into sentenceLength chunks of 11 bits.
|
||||
// For each word AND mask the rightmost 11 bits and find the word at that index.
|
||||
// Then bitshift entropy 11 bits right and repeat.
|
||||
// Add to the last empty slot so we can work with LSBs instead of MSB.
|
||||
|
||||
// Entropy as an int so we can bitmask without worrying about bytes slices
|
||||
// Entropy as an int so we can bitmask without worrying about bytes slices.
|
||||
entropyInt := new(big.Int).SetBytes(entropy)
|
||||
|
||||
// Slice to hold words in
|
||||
// Slice to hold words in.
|
||||
words := make([]string, sentenceLength)
|
||||
|
||||
// Throw away big int for AND masking
|
||||
// Throw away big.Int for AND masking.
|
||||
word := big.NewInt(0)
|
||||
|
||||
for i := sentenceLength - 1; i >= 0; i-- {
|
||||
// Get 11 right most bits and bitshift 11 to the right for next time
|
||||
word.And(entropyInt, Last11BitsMask)
|
||||
entropyInt.Div(entropyInt, RightShift11BitsDivider)
|
||||
// Get 11 right most bits and bitshift 11 to the right for next time.
|
||||
word.And(entropyInt, last11BitsMask)
|
||||
entropyInt.Div(entropyInt, shift11BitsMask)
|
||||
|
||||
// Get the bytes representing the 11 bits as a 2 byte slice
|
||||
// Get the bytes representing the 11 bits as a 2 byte slice.
|
||||
wordBytes := padByteSlice(word.Bytes(), 2)
|
||||
|
||||
// Convert bytes to an index and add that word to the list
|
||||
words[i] = WordList[binary.BigEndian.Uint16(wordBytes)]
|
||||
// Convert bytes to an index and add that word to the list.
|
||||
words[i] = wordList[binary.BigEndian.Uint16(wordBytes)]
|
||||
}
|
||||
|
||||
return strings.Join(words, " "), nil
|
||||
@ -83,71 +207,50 @@ func NewMnemonic(entropy []byte) (string, error) {
|
||||
// MnemonicToByteArray takes a mnemonic string and turns it into a byte array
|
||||
// suitable for creating another mnemonic.
|
||||
// An error is returned if the mnemonic is invalid.
|
||||
// FIXME
|
||||
// This does not work for all values in
|
||||
// the test vectors. Namely
|
||||
// Vectors 0, 4, and 8.
|
||||
// This is not really important because BIP39 doesnt really define a conversion
|
||||
// from string to bytes.
|
||||
func MnemonicToByteArray(mnemonic string) ([]byte, error) {
|
||||
if IsMnemonicValid(mnemonic) == false {
|
||||
return nil, fmt.Errorf("Invalid mnemonic")
|
||||
}
|
||||
mnemonicSlice := strings.Split(mnemonic, " ")
|
||||
func MnemonicToByteArray(mnemonic string, raw ...bool) ([]byte, error) {
|
||||
var (
|
||||
mnemonicSlice = strings.Split(mnemonic, " ")
|
||||
entropyBitSize = len(mnemonicSlice) * 11
|
||||
checksumBitSize = entropyBitSize % 32
|
||||
fullByteSize = (entropyBitSize-checksumBitSize)/8 + 1
|
||||
checksumByteSize = fullByteSize - (fullByteSize % 4)
|
||||
)
|
||||
|
||||
bitSize := len(mnemonicSlice) * 11
|
||||
err := validateEntropyWithChecksumBitSize(bitSize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// Pre validate that the mnemonic is well formed and only contains words that
|
||||
// are present in the word list.
|
||||
if !IsMnemonicValid(mnemonic) {
|
||||
return nil, ErrInvalidMnemonic
|
||||
}
|
||||
checksumSize := bitSize % 32
|
||||
|
||||
b := big.NewInt(0)
|
||||
// Convert word indices to a big.Int representing the entropy.
|
||||
checksummedEntropy := big.NewInt(0)
|
||||
modulo := big.NewInt(2048)
|
||||
for _, v := range mnemonicSlice {
|
||||
index, found := ReverseWordMap[v]
|
||||
if found == false {
|
||||
return nil, fmt.Errorf("Word `%v` not found in reverse map", v)
|
||||
}
|
||||
add := big.NewInt(int64(index))
|
||||
b = b.Mul(b, modulo)
|
||||
b = b.Add(b, add)
|
||||
}
|
||||
hex := b.Bytes()
|
||||
checksumModulo := big.NewInt(0).Exp(big.NewInt(2), big.NewInt(int64(checksumSize)), nil)
|
||||
entropy, _ := big.NewInt(0).DivMod(b, checksumModulo, big.NewInt(0))
|
||||
|
||||
entropyHex := entropy.Bytes()
|
||||
|
||||
byteSize := bitSize/8 + 1
|
||||
if len(hex) != byteSize {
|
||||
tmp := make([]byte, byteSize)
|
||||
diff := byteSize - len(hex)
|
||||
for i := 0; i < len(hex); i++ {
|
||||
tmp[i+diff] = hex[i]
|
||||
}
|
||||
hex = tmp
|
||||
index := big.NewInt(int64(wordMap[v]))
|
||||
checksummedEntropy.Mul(checksummedEntropy, modulo)
|
||||
checksummedEntropy.Add(checksummedEntropy, index)
|
||||
}
|
||||
|
||||
validationHex := addChecksum(entropyHex)
|
||||
if len(validationHex) != byteSize {
|
||||
tmp2 := make([]byte, byteSize)
|
||||
diff2 := byteSize - len(validationHex)
|
||||
for i := 0; i < len(validationHex); i++ {
|
||||
tmp2[i+diff2] = validationHex[i]
|
||||
}
|
||||
validationHex = tmp2
|
||||
// Calculate the unchecksummed entropy so we can validate that the checksum is
|
||||
// correct.
|
||||
checksumModulo := big.NewInt(0).Exp(bigTwo, big.NewInt(int64(checksumBitSize)), nil)
|
||||
rawEntropy := big.NewInt(0).Div(checksummedEntropy, checksumModulo)
|
||||
|
||||
// Convert big.Ints to byte padded byte slices.
|
||||
rawEntropyBytes := padByteSlice(rawEntropy.Bytes(), checksumByteSize)
|
||||
checksummedEntropyBytes := padByteSlice(checksummedEntropy.Bytes(), fullByteSize)
|
||||
|
||||
// Validate that the checksum is correct.
|
||||
newChecksummedEntropyBytes := padByteSlice(addChecksum(rawEntropyBytes), fullByteSize)
|
||||
if !compareByteSlices(checksummedEntropyBytes, newChecksummedEntropyBytes) {
|
||||
return nil, ErrChecksumIncorrect
|
||||
}
|
||||
|
||||
if len(hex) != len(validationHex) {
|
||||
panic("[]byte len mismatch - it shouldn't happen")
|
||||
if len(raw) > 0 && raw[0] {
|
||||
return rawEntropyBytes, nil
|
||||
}
|
||||
for i := range validationHex {
|
||||
if hex[i] != validationHex[i] {
|
||||
return nil, fmt.Errorf("Invalid byte at position %v", i)
|
||||
}
|
||||
}
|
||||
return hex, nil
|
||||
|
||||
return checksummedEntropyBytes, nil
|
||||
}
|
||||
|
||||
// NewSeedWithErrorChecking creates a hashed seed output given the mnemonic string and a password.
|
||||
@ -166,13 +269,36 @@ func NewSeed(mnemonic string, password string) []byte {
|
||||
return pbkdf2.Key([]byte(mnemonic), []byte("mnemonic"+password), 2048, 64, sha512.New)
|
||||
}
|
||||
|
||||
// IsMnemonicValid attempts to verify that the provided mnemonic is valid.
|
||||
// Validity is determined by both the number of words being appropriate,
|
||||
// and that all the words in the mnemonic are present in the word list.
|
||||
func IsMnemonicValid(mnemonic string) bool {
|
||||
// Create a list of all the words in the mnemonic sentence
|
||||
words := strings.Fields(mnemonic)
|
||||
|
||||
// Get word count
|
||||
wordCount := len(words)
|
||||
|
||||
// The number of words should be 12, 15, 18, 21 or 24
|
||||
if wordCount%3 != 0 || wordCount < 12 || wordCount > 24 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if all words belong in the wordlist
|
||||
for _, word := range words {
|
||||
if _, ok := wordMap[word]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// Appends to data the first (len(data) / 32)bits of the result of sha256(data)
|
||||
// Currently only supports data up to 32 bytes
|
||||
func addChecksum(data []byte) []byte {
|
||||
// Get first byte of sha256
|
||||
hasher := sha256.New()
|
||||
hasher.Write(data)
|
||||
hash := hasher.Sum(nil)
|
||||
hash := computeChecksum(data)
|
||||
firstChecksumByte := hash[0]
|
||||
|
||||
// len() is in bytes so we divide by 4
|
||||
@ -184,66 +310,68 @@ func addChecksum(data []byte) []byte {
|
||||
dataBigInt := new(big.Int).SetBytes(data)
|
||||
for i := uint(0); i < checksumBitLength; i++ {
|
||||
// Bitshift 1 left
|
||||
dataBigInt.Mul(dataBigInt, BigTwo)
|
||||
dataBigInt.Mul(dataBigInt, bigTwo)
|
||||
|
||||
// Set rightmost bit if leftmost checksum bit is set
|
||||
if uint8(firstChecksumByte&(1<<(7-i))) > 0 {
|
||||
dataBigInt.Or(dataBigInt, BigOne)
|
||||
dataBigInt.Or(dataBigInt, bigOne)
|
||||
}
|
||||
}
|
||||
|
||||
return dataBigInt.Bytes()
|
||||
}
|
||||
|
||||
func padByteSlice(slice []byte, length int) []byte {
|
||||
newSlice := make([]byte, length-len(slice))
|
||||
return append(newSlice, slice...)
|
||||
func computeChecksum(data []byte) []byte {
|
||||
hasher := sha256.New()
|
||||
hasher.Write(data)
|
||||
return hasher.Sum(nil)
|
||||
}
|
||||
|
||||
// validateEntropyBitSize ensures that entropy is the correct size for being a
|
||||
// mnemonic.
|
||||
func validateEntropyBitSize(bitSize int) error {
|
||||
if (bitSize%32) != 0 || bitSize < 128 || bitSize > 256 {
|
||||
return errors.New("Entropy length must be [128, 256] and a multiple of 32")
|
||||
return ErrEntropyLengthInvalid
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateEntropyWithChecksumBitSize(bitSize int) error {
|
||||
if (bitSize != 128+4) && (bitSize != 160+5) && (bitSize != 192+6) && (bitSize != 224+7) && (bitSize != 256+8) {
|
||||
return fmt.Errorf("Wrong entropy + checksum size - expected %v, got %v", int((bitSize-bitSize%32)+(bitSize-bitSize%32)/32), bitSize)
|
||||
// padByteSlice returns a byte slice of the given size with contents of the
|
||||
// given slice left padded and any empty spaces filled with 0's.
|
||||
func padByteSlice(slice []byte, length int) []byte {
|
||||
offset := length - len(slice)
|
||||
if offset <= 0 {
|
||||
return slice
|
||||
}
|
||||
return nil
|
||||
newSlice := make([]byte, length)
|
||||
copy(newSlice[offset:], slice)
|
||||
return newSlice
|
||||
}
|
||||
|
||||
// IsMnemonicValid attempts to verify that the provided mnemonic is valid.
|
||||
// Validity is determined by both the number of words being appropriate,
|
||||
// and that all the words in the mnemonic are present in the word list.
|
||||
func IsMnemonicValid(mnemonic string) bool {
|
||||
// compareByteSlices returns true of the byte slices have equal contents and
|
||||
// returns false otherwise.
|
||||
func compareByteSlices(a, b []byte) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func splitMnemonicWords(mnemonic string) ([]string, bool) {
|
||||
// Create a list of all the words in the mnemonic sentence
|
||||
words := strings.Fields(mnemonic)
|
||||
|
||||
//Get num of words
|
||||
// Get num of words
|
||||
numOfWords := len(words)
|
||||
|
||||
// The number of words should be 12, 15, 18, 21 or 24
|
||||
if numOfWords%3 != 0 || numOfWords < 12 || numOfWords > 24 {
|
||||
return false
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Check if all words belong in the wordlist
|
||||
for i := 0; i < numOfWords; i++ {
|
||||
if !contains(WordList, words[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func contains(s []string, e string) bool {
|
||||
for _, a := range s {
|
||||
if a == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return words, true
|
||||
}
|
||||
|
15
vendor/github.com/tyler-smith/go-bip39/coverage.txt
generated
vendored
15
vendor/github.com/tyler-smith/go-bip39/coverage.txt
generated
vendored
@ -1,15 +0,0 @@
|
||||
|
||||
github.com/CrowBits/go-bip39/bip39.go addChecksum 100.00% (11/11)
|
||||
github.com/CrowBits/go-bip39/bip39.go IsMnemonicValid 100.00% (8/8)
|
||||
github.com/CrowBits/go-bip39/bip39.go NewEntropy 100.00% (6/6)
|
||||
github.com/CrowBits/go-bip39/bip39.go contains 100.00% (4/4)
|
||||
github.com/CrowBits/go-bip39/bip39.go NewSeedWithErrorChecking 100.00% (4/4)
|
||||
github.com/CrowBits/go-bip39/bip39.go validateEntropyBitSize 100.00% (3/3)
|
||||
github.com/CrowBits/go-bip39/bip39.go validateEntropyWithChecksumBitSize 100.00% (3/3)
|
||||
github.com/CrowBits/go-bip39/bip39.go padByteSlice 100.00% (2/2)
|
||||
github.com/CrowBits/go-bip39/wordlist.go init 100.00% (2/2)
|
||||
github.com/CrowBits/go-bip39/bip39.go NewSeed 100.00% (1/1)
|
||||
github.com/CrowBits/go-bip39/bip39.go NewMnemonic 93.75% (15/16)
|
||||
github.com/CrowBits/go-bip39/bip39.go MnemonicToByteArray 90.24% (37/41)
|
||||
github.com/CrowBits/go-bip39 ---------------------------------- 95.05% (96/101)
|
||||
|
2067
vendor/github.com/tyler-smith/go-bip39/wordlist.go
generated
vendored
2067
vendor/github.com/tyler-smith/go-bip39/wordlist.go
generated
vendored
File diff suppressed because it is too large
Load Diff
117
vendor/golang.org/x/text/unicode/norm/triegen.go
generated
vendored
Normal file
117
vendor/golang.org/x/text/unicode/norm/triegen.go
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// Trie table generator.
|
||||
// Used by make*tables tools to generate a go file with trie data structures
|
||||
// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
|
||||
// sequence are used to lookup offsets in the index table to be used for the
|
||||
// next byte. The last byte is used to index into a table with 16-bit values.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
const maxSparseEntries = 16
|
||||
|
||||
type normCompacter struct {
|
||||
sparseBlocks [][]uint64
|
||||
sparseOffset []uint16
|
||||
sparseCount int
|
||||
name string
|
||||
}
|
||||
|
||||
func mostFrequentStride(a []uint64) int {
|
||||
counts := make(map[int]int)
|
||||
var v int
|
||||
for _, x := range a {
|
||||
if stride := int(x) - v; v != 0 && stride >= 0 {
|
||||
counts[stride]++
|
||||
}
|
||||
v = int(x)
|
||||
}
|
||||
var maxs, maxc int
|
||||
for stride, cnt := range counts {
|
||||
if cnt > maxc || (cnt == maxc && stride < maxs) {
|
||||
maxs, maxc = stride, cnt
|
||||
}
|
||||
}
|
||||
return maxs
|
||||
}
|
||||
|
||||
func countSparseEntries(a []uint64) int {
|
||||
stride := mostFrequentStride(a)
|
||||
var v, count int
|
||||
for _, tv := range a {
|
||||
if int(tv)-v != stride {
|
||||
if tv != 0 {
|
||||
count++
|
||||
}
|
||||
}
|
||||
v = int(tv)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
|
||||
if n := countSparseEntries(v); n <= maxSparseEntries {
|
||||
return (n+1)*4 + 2, true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func (c *normCompacter) Store(v []uint64) uint32 {
|
||||
h := uint32(len(c.sparseOffset))
|
||||
c.sparseBlocks = append(c.sparseBlocks, v)
|
||||
c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
|
||||
c.sparseCount += countSparseEntries(v) + 1
|
||||
return h
|
||||
}
|
||||
|
||||
func (c *normCompacter) Handler() string {
|
||||
return c.name + "Sparse.lookup"
|
||||
}
|
||||
|
||||
func (c *normCompacter) Print(w io.Writer) (retErr error) {
|
||||
p := func(f string, x ...interface{}) {
|
||||
if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
|
||||
retErr = err
|
||||
}
|
||||
}
|
||||
|
||||
ls := len(c.sparseBlocks)
|
||||
p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
|
||||
p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
|
||||
|
||||
ns := c.sparseCount
|
||||
p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
|
||||
p("var %sSparseValues = [%d]valueRange {", c.name, ns)
|
||||
for i, b := range c.sparseBlocks {
|
||||
p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
|
||||
var v int
|
||||
stride := mostFrequentStride(b)
|
||||
n := countSparseEntries(b)
|
||||
p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
|
||||
for i, nv := range b {
|
||||
if int(nv)-v != stride {
|
||||
if v != 0 {
|
||||
p(",hi:%#02x},", 0x80+i-1)
|
||||
}
|
||||
if nv != 0 {
|
||||
p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
|
||||
}
|
||||
}
|
||||
v = int(nv)
|
||||
}
|
||||
if v != 0 {
|
||||
p(",hi:%#02x},", 0x80+len(b)-1)
|
||||
}
|
||||
}
|
||||
p("\n}\n\n")
|
||||
return
|
||||
}
|
12
vendor/vendor.json
vendored
12
vendor/vendor.json
vendored
@ -103,12 +103,6 @@
|
||||
"revision": "8e610b2b55bfd1bfa9436ab110d311f5e8a74dcb",
|
||||
"revisionTime": "2018-06-25T18:44:42Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "B1sVd5XOmjuSFYivGhBd+vPlc1w=",
|
||||
"path": "github.com/ebfe/scard",
|
||||
"revision": "0147d7ead790ac8a5ecc91ee6de68beb7b17c4e9",
|
||||
"revisionTime": "2017-12-31T19:32:11Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "zYnPsNAVm1/ViwCkN++dX2JQhBo=",
|
||||
"path": "github.com/edsrzf/mmap-go",
|
||||
@ -532,10 +526,10 @@
|
||||
"revisionTime": "2018-11-28T10:09:59Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "vW7IiPtoA4hQQ/ScHlbmRktY89U=",
|
||||
"checksumSHA1": "SsMMqb3xn7hg1ZX5ugwZz5rzpx0=",
|
||||
"path": "github.com/tyler-smith/go-bip39",
|
||||
"revision": "8e7a99b3e716f36d3b080a9a70f9eb45abe4edcc",
|
||||
"revisionTime": "2016-06-29T16:38:56Z"
|
||||
"revision": "dbb3b84ba2ef14e894f5e33d6c6e43641e665738",
|
||||
"revisionTime": "2018-10-17T06:06:43Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "nD6S4KB0S+YHxVMDDE+w3PyXaMk=",
|
||||
|
Loading…
Reference in New Issue
Block a user