Update bip-39 ref and remove ebfe/scard from vendor

This commit is contained in:
Guillaume Ballet 2019-04-08 19:16:27 +02:00
parent 8ee5bb2289
commit 86806d8b24
10 changed files with 426 additions and 2210 deletions

15
vendor/github.com/tyler-smith/go-bip39/Gopkg.lock generated vendored Normal file
View File

@ -0,0 +1,15 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
packages = ["pbkdf2"]
revision = "a49355c7e3f8fe157a85be2f77e6e269a0f89602"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "d7f1a7207c39125afcb9ca2365832cb83458edfc17f2f7e8d28fd56f19436856"
solver-name = "gps-cdcl"
solver-version = 1

26
vendor/github.com/tyler-smith/go-bip39/Gopkg.toml generated vendored Normal file
View File

@ -0,0 +1,26 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
[[constraint]]
branch = "master"
name = "golang.org/x/crypto"

View File

@ -1,6 +1,6 @@
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 2014 Tyler Smith Copyright (c) 2014-2018 Tyler Smith and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

11
vendor/github.com/tyler-smith/go-bip39/Makefile generated vendored Normal file
View File

@ -0,0 +1,11 @@
.DEFAULT_GOAL := help
tests: ## Run tests with coverage
go test -v -cover ./...
profile_tests: ## Run tests and output coverage profiling
go test -v -coverprofile=coverage.out .
go tool cover -html=coverage.out
help:
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'

View File

@ -1,13 +1,13 @@
# go-bip39 # go-bip39
[![Build Status](https://travis-ci.org/tyler-smith/go-bip39.svg?branch=master)](https://travis-ci.org/tyler-smith/go-bip39)
[![license](https://img.shields.io/github/license/tyler-smith/go-bip39.svg?maxAge=2592000)](https://github.com/tyler-smith/go-bip39/blob/master/LICENSE)
[![Documentation](https://godoc.org/github.com/tyler-smith/go-bip39?status.svg)](http://godoc.org/github.com/tyler-smith/go-bip39)
[![Go Report Card](https://goreportcard.com/badge/github.com/tyler-smith/go-bip39)](https://goreportcard.com/report/github.com/tyler-smith/go-bip39)
[![GitHub issues](https://img.shields.io/github/issues/tyler-smith/go-bip39.svg)](https://github.com/tyler-smith/go-bip39/issues)
A golang implementation of the BIP0039 spec for mnemonic seeds A golang implementation of the BIP0039 spec for mnemonic seeds
## Credits
English wordlist and test vectors are from the standard Python BIP0039 implementation
from the Trezor guys: [https://github.com/trezor/python-mnemonic](https://github.com/trezor/python-mnemonic)
## Example ## Example
```go ```go
@ -36,3 +36,10 @@ func main(){
fmt.Println("Master public key: ", publicKey) fmt.Println("Master public key: ", publicKey)
} }
``` ```
## Credits
Wordlists are from the [bip39 spec](https://github.com/bitcoin/bips/tree/master/bip-0039).
Test vectors are from the standard Python BIP0039 implementation from the
Trezor team: [https://github.com/trezor/python-mnemonic](https://github.com/trezor/python-mnemonic)

View File

@ -1,3 +1,7 @@
// Package bip39 is the Golang implementation of the BIP39 spec.
//
// The official BIP39 spec can be found at
// https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki
package bip39 package bip39
import ( import (
@ -10,19 +14,88 @@ import (
"math/big" "math/big"
"strings" "strings"
"github.com/tyler-smith/go-bip39/wordlists"
"golang.org/x/crypto/pbkdf2" "golang.org/x/crypto/pbkdf2"
) )
// Some bitwise operands for working with big.Ints
var ( var (
Last11BitsMask = big.NewInt(2047) // Some bitwise operands for working with big.Ints
RightShift11BitsDivider = big.NewInt(2048) last11BitsMask = big.NewInt(2047)
BigOne = big.NewInt(1) shift11BitsMask = big.NewInt(2048)
BigTwo = big.NewInt(2) bigOne = big.NewInt(1)
bigTwo = big.NewInt(2)
// used to isolate the checksum bits from the entropy+checksum byte array
wordLengthChecksumMasksMapping = map[int]*big.Int{
12: big.NewInt(15),
15: big.NewInt(31),
18: big.NewInt(63),
21: big.NewInt(127),
24: big.NewInt(255),
}
// used to use only the desired x of 8 available checksum bits.
// 256 bit (word length 24) requires all 8 bits of the checksum,
// and thus no shifting is needed for it (we would get a divByZero crash if we did)
wordLengthChecksumShiftMapping = map[int]*big.Int{
12: big.NewInt(16),
15: big.NewInt(8),
18: big.NewInt(4),
21: big.NewInt(2),
}
// wordList is the set of words to use
wordList []string
// wordMap is a reverse lookup map for wordList
wordMap map[string]int
) )
var (
// ErrInvalidMnemonic is returned when trying to use a malformed mnemonic.
ErrInvalidMnemonic = errors.New("Invalid mnenomic")
// ErrEntropyLengthInvalid is returned when trying to use an entropy set with
// an invalid size.
ErrEntropyLengthInvalid = errors.New("Entropy length must be [128, 256] and a multiple of 32")
// ErrValidatedSeedLengthMismatch is returned when a validated seed is not the
// same size as the given seed. This should never happen is present only as a
// sanity assertion.
ErrValidatedSeedLengthMismatch = errors.New("Seed length does not match validated seed length")
// ErrChecksumIncorrect is returned when entropy has the incorrect checksum.
ErrChecksumIncorrect = errors.New("Checksum incorrect")
)
func init() {
SetWordList(wordlists.English)
}
// SetWordList sets the list of words to use for mnemonics. Currently the list
// that is set is used package-wide.
func SetWordList(list []string) {
wordList = list
wordMap = map[string]int{}
for i, v := range wordList {
wordMap[v] = i
}
}
// GetWordList gets the list of words to use for mnemonics.
func GetWordList() []string {
return wordList
}
// GetWordIndex gets word index in wordMap.
func GetWordIndex(word string) (int, bool) {
idx, ok := wordMap[word]
return idx, ok
}
// NewEntropy will create random entropy bytes // NewEntropy will create random entropy bytes
// so long as the requested size bitSize is an appropriate size. // so long as the requested size bitSize is an appropriate size.
//
// bitSize has to be a multiple 32 and be within the inclusive range of {128, 256}
func NewEntropy(bitSize int) ([]byte, error) { func NewEntropy(bitSize int) ([]byte, error) {
err := validateEntropyBitSize(bitSize) err := validateEntropyBitSize(bitSize)
if err != nil { if err != nil {
@ -34,47 +107,98 @@ func NewEntropy(bitSize int) ([]byte, error) {
return entropy, err return entropy, err
} }
// EntropyFromMnemonic takes a mnemonic generated by this library,
// and returns the input entropy used to generate the given mnemonic.
// An error is returned if the given mnemonic is invalid.
func EntropyFromMnemonic(mnemonic string) ([]byte, error) {
mnemonicSlice, isValid := splitMnemonicWords(mnemonic)
if !isValid {
return nil, ErrInvalidMnemonic
}
// Decode the words into a big.Int.
b := big.NewInt(0)
for _, v := range mnemonicSlice {
index, found := wordMap[v]
if found == false {
return nil, fmt.Errorf("word `%v` not found in reverse map", v)
}
var wordBytes [2]byte
binary.BigEndian.PutUint16(wordBytes[:], uint16(index))
b = b.Mul(b, shift11BitsMask)
b = b.Or(b, big.NewInt(0).SetBytes(wordBytes[:]))
}
// Build and add the checksum to the big.Int.
checksum := big.NewInt(0)
checksumMask := wordLengthChecksumMasksMapping[len(mnemonicSlice)]
checksum = checksum.And(b, checksumMask)
b.Div(b, big.NewInt(0).Add(checksumMask, bigOne))
// The entropy is the underlying bytes of the big.Int. Any upper bytes of
// all 0's are not returned so we pad the beginning of the slice with empty
// bytes if necessary.
entropy := b.Bytes()
entropy = padByteSlice(entropy, len(mnemonicSlice)/3*4)
// Generate the checksum and compare with the one we got from the mneomnic.
entropyChecksumBytes := computeChecksum(entropy)
entropyChecksum := big.NewInt(int64(entropyChecksumBytes[0]))
if l := len(mnemonicSlice); l != 24 {
checksumShift := wordLengthChecksumShiftMapping[l]
entropyChecksum.Div(entropyChecksum, checksumShift)
}
if checksum.Cmp(entropyChecksum) != 0 {
return nil, ErrChecksumIncorrect
}
return entropy, nil
}
// NewMnemonic will return a string consisting of the mnemonic words for // NewMnemonic will return a string consisting of the mnemonic words for
// the given entropy. // the given entropy.
// If the provide entropy is invalid, an error will be returned. // If the provide entropy is invalid, an error will be returned.
func NewMnemonic(entropy []byte) (string, error) { func NewMnemonic(entropy []byte) (string, error) {
// Compute some lengths for convenience // Compute some lengths for convenience.
entropyBitLength := len(entropy) * 8 entropyBitLength := len(entropy) * 8
checksumBitLength := entropyBitLength / 32 checksumBitLength := entropyBitLength / 32
sentenceLength := (entropyBitLength + checksumBitLength) / 11 sentenceLength := (entropyBitLength + checksumBitLength) / 11
// Validate that the requested size is supported.
err := validateEntropyBitSize(entropyBitLength) err := validateEntropyBitSize(entropyBitLength)
if err != nil { if err != nil {
return "", err return "", err
} }
// Add checksum to entropy // Add checksum to entropy.
entropy = addChecksum(entropy) entropy = addChecksum(entropy)
// Break entropy up into sentenceLength chunks of 11 bits // Break entropy up into sentenceLength chunks of 11 bits.
// For each word AND mask the rightmost 11 bits and find the word at that index // For each word AND mask the rightmost 11 bits and find the word at that index.
// Then bitshift entropy 11 bits right and repeat // Then bitshift entropy 11 bits right and repeat.
// Add to the last empty slot so we can work with LSBs instead of MSB // Add to the last empty slot so we can work with LSBs instead of MSB.
// Entropy as an int so we can bitmask without worrying about bytes slices // Entropy as an int so we can bitmask without worrying about bytes slices.
entropyInt := new(big.Int).SetBytes(entropy) entropyInt := new(big.Int).SetBytes(entropy)
// Slice to hold words in // Slice to hold words in.
words := make([]string, sentenceLength) words := make([]string, sentenceLength)
// Throw away big int for AND masking // Throw away big.Int for AND masking.
word := big.NewInt(0) word := big.NewInt(0)
for i := sentenceLength - 1; i >= 0; i-- { for i := sentenceLength - 1; i >= 0; i-- {
// Get 11 right most bits and bitshift 11 to the right for next time // Get 11 right most bits and bitshift 11 to the right for next time.
word.And(entropyInt, Last11BitsMask) word.And(entropyInt, last11BitsMask)
entropyInt.Div(entropyInt, RightShift11BitsDivider) entropyInt.Div(entropyInt, shift11BitsMask)
// Get the bytes representing the 11 bits as a 2 byte slice // Get the bytes representing the 11 bits as a 2 byte slice.
wordBytes := padByteSlice(word.Bytes(), 2) wordBytes := padByteSlice(word.Bytes(), 2)
// Convert bytes to an index and add that word to the list // Convert bytes to an index and add that word to the list.
words[i] = WordList[binary.BigEndian.Uint16(wordBytes)] words[i] = wordList[binary.BigEndian.Uint16(wordBytes)]
} }
return strings.Join(words, " "), nil return strings.Join(words, " "), nil
@ -83,71 +207,50 @@ func NewMnemonic(entropy []byte) (string, error) {
// MnemonicToByteArray takes a mnemonic string and turns it into a byte array // MnemonicToByteArray takes a mnemonic string and turns it into a byte array
// suitable for creating another mnemonic. // suitable for creating another mnemonic.
// An error is returned if the mnemonic is invalid. // An error is returned if the mnemonic is invalid.
// FIXME func MnemonicToByteArray(mnemonic string, raw ...bool) ([]byte, error) {
// This does not work for all values in var (
// the test vectors. Namely mnemonicSlice = strings.Split(mnemonic, " ")
// Vectors 0, 4, and 8. entropyBitSize = len(mnemonicSlice) * 11
// This is not really important because BIP39 doesnt really define a conversion checksumBitSize = entropyBitSize % 32
// from string to bytes. fullByteSize = (entropyBitSize-checksumBitSize)/8 + 1
func MnemonicToByteArray(mnemonic string) ([]byte, error) { checksumByteSize = fullByteSize - (fullByteSize % 4)
if IsMnemonicValid(mnemonic) == false { )
return nil, fmt.Errorf("Invalid mnemonic")
}
mnemonicSlice := strings.Split(mnemonic, " ")
bitSize := len(mnemonicSlice) * 11 // Pre validate that the mnemonic is well formed and only contains words that
err := validateEntropyWithChecksumBitSize(bitSize) // are present in the word list.
if err != nil { if !IsMnemonicValid(mnemonic) {
return nil, err return nil, ErrInvalidMnemonic
} }
checksumSize := bitSize % 32
b := big.NewInt(0) // Convert word indices to a big.Int representing the entropy.
checksummedEntropy := big.NewInt(0)
modulo := big.NewInt(2048) modulo := big.NewInt(2048)
for _, v := range mnemonicSlice { for _, v := range mnemonicSlice {
index, found := ReverseWordMap[v] index := big.NewInt(int64(wordMap[v]))
if found == false { checksummedEntropy.Mul(checksummedEntropy, modulo)
return nil, fmt.Errorf("Word `%v` not found in reverse map", v) checksummedEntropy.Add(checksummedEntropy, index)
}
add := big.NewInt(int64(index))
b = b.Mul(b, modulo)
b = b.Add(b, add)
}
hex := b.Bytes()
checksumModulo := big.NewInt(0).Exp(big.NewInt(2), big.NewInt(int64(checksumSize)), nil)
entropy, _ := big.NewInt(0).DivMod(b, checksumModulo, big.NewInt(0))
entropyHex := entropy.Bytes()
byteSize := bitSize/8 + 1
if len(hex) != byteSize {
tmp := make([]byte, byteSize)
diff := byteSize - len(hex)
for i := 0; i < len(hex); i++ {
tmp[i+diff] = hex[i]
}
hex = tmp
} }
validationHex := addChecksum(entropyHex) // Calculate the unchecksummed entropy so we can validate that the checksum is
if len(validationHex) != byteSize { // correct.
tmp2 := make([]byte, byteSize) checksumModulo := big.NewInt(0).Exp(bigTwo, big.NewInt(int64(checksumBitSize)), nil)
diff2 := byteSize - len(validationHex) rawEntropy := big.NewInt(0).Div(checksummedEntropy, checksumModulo)
for i := 0; i < len(validationHex); i++ {
tmp2[i+diff2] = validationHex[i] // Convert big.Ints to byte padded byte slices.
} rawEntropyBytes := padByteSlice(rawEntropy.Bytes(), checksumByteSize)
validationHex = tmp2 checksummedEntropyBytes := padByteSlice(checksummedEntropy.Bytes(), fullByteSize)
// Validate that the checksum is correct.
newChecksummedEntropyBytes := padByteSlice(addChecksum(rawEntropyBytes), fullByteSize)
if !compareByteSlices(checksummedEntropyBytes, newChecksummedEntropyBytes) {
return nil, ErrChecksumIncorrect
} }
if len(hex) != len(validationHex) { if len(raw) > 0 && raw[0] {
panic("[]byte len mismatch - it shouldn't happen") return rawEntropyBytes, nil
} }
for i := range validationHex {
if hex[i] != validationHex[i] { return checksummedEntropyBytes, nil
return nil, fmt.Errorf("Invalid byte at position %v", i)
}
}
return hex, nil
} }
// NewSeedWithErrorChecking creates a hashed seed output given the mnemonic string and a password. // NewSeedWithErrorChecking creates a hashed seed output given the mnemonic string and a password.
@ -166,13 +269,36 @@ func NewSeed(mnemonic string, password string) []byte {
return pbkdf2.Key([]byte(mnemonic), []byte("mnemonic"+password), 2048, 64, sha512.New) return pbkdf2.Key([]byte(mnemonic), []byte("mnemonic"+password), 2048, 64, sha512.New)
} }
// IsMnemonicValid attempts to verify that the provided mnemonic is valid.
// Validity is determined by both the number of words being appropriate,
// and that all the words in the mnemonic are present in the word list.
func IsMnemonicValid(mnemonic string) bool {
// Create a list of all the words in the mnemonic sentence
words := strings.Fields(mnemonic)
// Get word count
wordCount := len(words)
// The number of words should be 12, 15, 18, 21 or 24
if wordCount%3 != 0 || wordCount < 12 || wordCount > 24 {
return false
}
// Check if all words belong in the wordlist
for _, word := range words {
if _, ok := wordMap[word]; !ok {
return false
}
}
return true
}
// Appends to data the first (len(data) / 32)bits of the result of sha256(data) // Appends to data the first (len(data) / 32)bits of the result of sha256(data)
// Currently only supports data up to 32 bytes // Currently only supports data up to 32 bytes
func addChecksum(data []byte) []byte { func addChecksum(data []byte) []byte {
// Get first byte of sha256 // Get first byte of sha256
hasher := sha256.New() hash := computeChecksum(data)
hasher.Write(data)
hash := hasher.Sum(nil)
firstChecksumByte := hash[0] firstChecksumByte := hash[0]
// len() is in bytes so we divide by 4 // len() is in bytes so we divide by 4
@ -184,66 +310,68 @@ func addChecksum(data []byte) []byte {
dataBigInt := new(big.Int).SetBytes(data) dataBigInt := new(big.Int).SetBytes(data)
for i := uint(0); i < checksumBitLength; i++ { for i := uint(0); i < checksumBitLength; i++ {
// Bitshift 1 left // Bitshift 1 left
dataBigInt.Mul(dataBigInt, BigTwo) dataBigInt.Mul(dataBigInt, bigTwo)
// Set rightmost bit if leftmost checksum bit is set // Set rightmost bit if leftmost checksum bit is set
if uint8(firstChecksumByte&(1<<(7-i))) > 0 { if uint8(firstChecksumByte&(1<<(7-i))) > 0 {
dataBigInt.Or(dataBigInt, BigOne) dataBigInt.Or(dataBigInt, bigOne)
} }
} }
return dataBigInt.Bytes() return dataBigInt.Bytes()
} }
func padByteSlice(slice []byte, length int) []byte { func computeChecksum(data []byte) []byte {
newSlice := make([]byte, length-len(slice)) hasher := sha256.New()
return append(newSlice, slice...) hasher.Write(data)
return hasher.Sum(nil)
} }
// validateEntropyBitSize ensures that entropy is the correct size for being a
// mnemonic.
func validateEntropyBitSize(bitSize int) error { func validateEntropyBitSize(bitSize int) error {
if (bitSize%32) != 0 || bitSize < 128 || bitSize > 256 { if (bitSize%32) != 0 || bitSize < 128 || bitSize > 256 {
return errors.New("Entropy length must be [128, 256] and a multiple of 32") return ErrEntropyLengthInvalid
} }
return nil return nil
} }
func validateEntropyWithChecksumBitSize(bitSize int) error { // padByteSlice returns a byte slice of the given size with contents of the
if (bitSize != 128+4) && (bitSize != 160+5) && (bitSize != 192+6) && (bitSize != 224+7) && (bitSize != 256+8) { // given slice left padded and any empty spaces filled with 0's.
return fmt.Errorf("Wrong entropy + checksum size - expected %v, got %v", int((bitSize-bitSize%32)+(bitSize-bitSize%32)/32), bitSize) func padByteSlice(slice []byte, length int) []byte {
offset := length - len(slice)
if offset <= 0 {
return slice
} }
return nil newSlice := make([]byte, length)
copy(newSlice[offset:], slice)
return newSlice
} }
// IsMnemonicValid attempts to verify that the provided mnemonic is valid. // compareByteSlices returns true of the byte slices have equal contents and
// Validity is determined by both the number of words being appropriate, // returns false otherwise.
// and that all the words in the mnemonic are present in the word list. func compareByteSlices(a, b []byte) bool {
func IsMnemonicValid(mnemonic string) bool { if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
func splitMnemonicWords(mnemonic string) ([]string, bool) {
// Create a list of all the words in the mnemonic sentence // Create a list of all the words in the mnemonic sentence
words := strings.Fields(mnemonic) words := strings.Fields(mnemonic)
//Get num of words // Get num of words
numOfWords := len(words) numOfWords := len(words)
// The number of words should be 12, 15, 18, 21 or 24 // The number of words should be 12, 15, 18, 21 or 24
if numOfWords%3 != 0 || numOfWords < 12 || numOfWords > 24 { if numOfWords%3 != 0 || numOfWords < 12 || numOfWords > 24 {
return false return nil, false
} }
return words, true
// Check if all words belong in the wordlist
for i := 0; i < numOfWords; i++ {
if !contains(WordList, words[i]) {
return false
}
}
return true
}
func contains(s []string, e string) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
} }

View File

@ -1,15 +0,0 @@
github.com/CrowBits/go-bip39/bip39.go addChecksum 100.00% (11/11)
github.com/CrowBits/go-bip39/bip39.go IsMnemonicValid 100.00% (8/8)
github.com/CrowBits/go-bip39/bip39.go NewEntropy 100.00% (6/6)
github.com/CrowBits/go-bip39/bip39.go contains 100.00% (4/4)
github.com/CrowBits/go-bip39/bip39.go NewSeedWithErrorChecking 100.00% (4/4)
github.com/CrowBits/go-bip39/bip39.go validateEntropyBitSize 100.00% (3/3)
github.com/CrowBits/go-bip39/bip39.go validateEntropyWithChecksumBitSize 100.00% (3/3)
github.com/CrowBits/go-bip39/bip39.go padByteSlice 100.00% (2/2)
github.com/CrowBits/go-bip39/wordlist.go init 100.00% (2/2)
github.com/CrowBits/go-bip39/bip39.go NewSeed 100.00% (1/1)
github.com/CrowBits/go-bip39/bip39.go NewMnemonic 93.75% (15/16)
github.com/CrowBits/go-bip39/bip39.go MnemonicToByteArray 90.24% (37/41)
github.com/CrowBits/go-bip39 ---------------------------------- 95.05% (96/101)

File diff suppressed because it is too large Load Diff

117
vendor/golang.org/x/text/unicode/norm/triegen.go generated vendored Normal file
View File

@ -0,0 +1,117 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build ignore
// Trie table generator.
// Used by make*tables tools to generate a go file with trie data structures
// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
// sequence are used to lookup offsets in the index table to be used for the
// next byte. The last byte is used to index into a table with 16-bit values.
package main
import (
"fmt"
"io"
)
const maxSparseEntries = 16
type normCompacter struct {
sparseBlocks [][]uint64
sparseOffset []uint16
sparseCount int
name string
}
func mostFrequentStride(a []uint64) int {
counts := make(map[int]int)
var v int
for _, x := range a {
if stride := int(x) - v; v != 0 && stride >= 0 {
counts[stride]++
}
v = int(x)
}
var maxs, maxc int
for stride, cnt := range counts {
if cnt > maxc || (cnt == maxc && stride < maxs) {
maxs, maxc = stride, cnt
}
}
return maxs
}
func countSparseEntries(a []uint64) int {
stride := mostFrequentStride(a)
var v, count int
for _, tv := range a {
if int(tv)-v != stride {
if tv != 0 {
count++
}
}
v = int(tv)
}
return count
}
func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
if n := countSparseEntries(v); n <= maxSparseEntries {
return (n+1)*4 + 2, true
}
return 0, false
}
func (c *normCompacter) Store(v []uint64) uint32 {
h := uint32(len(c.sparseOffset))
c.sparseBlocks = append(c.sparseBlocks, v)
c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
c.sparseCount += countSparseEntries(v) + 1
return h
}
func (c *normCompacter) Handler() string {
return c.name + "Sparse.lookup"
}
func (c *normCompacter) Print(w io.Writer) (retErr error) {
p := func(f string, x ...interface{}) {
if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
retErr = err
}
}
ls := len(c.sparseBlocks)
p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
ns := c.sparseCount
p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
p("var %sSparseValues = [%d]valueRange {", c.name, ns)
for i, b := range c.sparseBlocks {
p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
var v int
stride := mostFrequentStride(b)
n := countSparseEntries(b)
p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
for i, nv := range b {
if int(nv)-v != stride {
if v != 0 {
p(",hi:%#02x},", 0x80+i-1)
}
if nv != 0 {
p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
}
}
v = int(nv)
}
if v != 0 {
p(",hi:%#02x},", 0x80+len(b)-1)
}
}
p("\n}\n\n")
return
}

12
vendor/vendor.json vendored
View File

@ -103,12 +103,6 @@
"revision": "8e610b2b55bfd1bfa9436ab110d311f5e8a74dcb", "revision": "8e610b2b55bfd1bfa9436ab110d311f5e8a74dcb",
"revisionTime": "2018-06-25T18:44:42Z" "revisionTime": "2018-06-25T18:44:42Z"
}, },
{
"checksumSHA1": "B1sVd5XOmjuSFYivGhBd+vPlc1w=",
"path": "github.com/ebfe/scard",
"revision": "0147d7ead790ac8a5ecc91ee6de68beb7b17c4e9",
"revisionTime": "2017-12-31T19:32:11Z"
},
{ {
"checksumSHA1": "zYnPsNAVm1/ViwCkN++dX2JQhBo=", "checksumSHA1": "zYnPsNAVm1/ViwCkN++dX2JQhBo=",
"path": "github.com/edsrzf/mmap-go", "path": "github.com/edsrzf/mmap-go",
@ -532,10 +526,10 @@
"revisionTime": "2018-11-28T10:09:59Z" "revisionTime": "2018-11-28T10:09:59Z"
}, },
{ {
"checksumSHA1": "vW7IiPtoA4hQQ/ScHlbmRktY89U=", "checksumSHA1": "SsMMqb3xn7hg1ZX5ugwZz5rzpx0=",
"path": "github.com/tyler-smith/go-bip39", "path": "github.com/tyler-smith/go-bip39",
"revision": "8e7a99b3e716f36d3b080a9a70f9eb45abe4edcc", "revision": "dbb3b84ba2ef14e894f5e33d6c6e43641e665738",
"revisionTime": "2016-06-29T16:38:56Z" "revisionTime": "2018-10-17T06:06:43Z"
}, },
{ {
"checksumSHA1": "nD6S4KB0S+YHxVMDDE+w3PyXaMk=", "checksumSHA1": "nD6S4KB0S+YHxVMDDE+w3PyXaMk=",