diff options
Diffstat (limited to 'consensus/ethash')
-rw-r--r-- | consensus/ethash/algorithm.go | 3 | ||||
-rw-r--r-- | consensus/ethash/algorithm_go1.8_test.go | 2 | ||||
-rw-r--r-- | consensus/ethash/consensus.go | 3 | ||||
-rw-r--r-- | consensus/ethash/ethash.go | 4 |
4 files changed, 3 insertions, 9 deletions
diff --git a/consensus/ethash/algorithm.go b/consensus/ethash/algorithm.go index 365fe5520..a737bc636 100644 --- a/consensus/ethash/algorithm.go +++ b/consensus/ethash/algorithm.go @@ -53,7 +53,6 @@ type hasher func(dest []byte, data []byte) // makeHasher creates a repetitive hasher, allowing the same hash data structures // to be reused between hash runs instead of requiring new ones to be created. -// // The returned function is not thread safe! func makeHasher(h hash.Hash) hasher { return func(dest []byte, data []byte) { @@ -82,7 +81,6 @@ func seedHash(block uint64) []byte { // memory, then performing two passes of Sergio Demian Lerner's RandMemoHash // algorithm from Strict Memory Hard Hashing Functions (2014). The output is a // set of 524288 64-byte values. -// // This method places the result into dest in machine byte order. func generateCache(dest []uint32, epoch uint64, seed []byte) { // Print some debug logs to allow analysis on low end devices @@ -220,7 +218,6 @@ func generateDatasetItem(cache []uint32, index uint32, keccak512 hasher) []byte } // generateDataset generates the entire ethash dataset for mining. -// // This method places the result into dest in machine byte order. func generateDataset(dest []uint32, epoch uint64, cache []uint32) { // Print some debug logs to allow analysis on low end devices diff --git a/consensus/ethash/algorithm_go1.8_test.go b/consensus/ethash/algorithm_go1.8_test.go index fdc302318..a822944a6 100644 --- a/consensus/ethash/algorithm_go1.8_test.go +++ b/consensus/ethash/algorithm_go1.8_test.go @@ -20,7 +20,7 @@ package ethash import "testing" -// Tests whether the dataset size calculator work correctly by cross checking the +// Tests whether the dataset size calculator works correctly by cross checking the // hard coded lookup table with the value generated by it. func TestSizeCalculations(t *testing.T) { var tests []uint64 diff --git a/consensus/ethash/consensus.go b/consensus/ethash/consensus.go index a6c1432e3..dd9c81fd4 100644 --- a/consensus/ethash/consensus.go +++ b/consensus/ethash/consensus.go @@ -218,7 +218,6 @@ func (ethash *Ethash) VerifyUncles(chain consensus.ChainReader, block *types.Blo // verifyHeader checks whether a header conforms to the consensus rules of the // stock Ethereum ethash engine. -// // See YP section 4.3.4. "Block Header Validity" func (ethash *Ethash) verifyHeader(chain consensus.ChainReader, header, parent *types.Header, uncle bool, seal bool) error { // Ensure that the header's extra-data section is of a reasonable size @@ -286,7 +285,6 @@ func (ethash *Ethash) verifyHeader(chain consensus.ChainReader, header, parent * // CalcDifficulty is the difficulty adjustment algorithm. It returns // the difficulty that a new block should have when created at time // given the parent block's time and difficulty. -// // TODO (karalabe): Move the chain maker into this package and make this private! func CalcDifficulty(config *params.ChainConfig, time uint64, parent *types.Header) *big.Int { next := new(big.Int).Add(parent.Number, common.Big1) @@ -462,7 +460,6 @@ var ( // AccumulateRewards credits the coinbase of the given block with the mining // reward. The total reward consists of the static block reward and rewards for // included uncles. The coinbase of each uncle block is also rewarded. -// // TODO (karalabe): Move the chain maker into this package and make this private! func AccumulateRewards(state *state.StateDB, header *types.Header, uncles []*types.Header) { reward := new(big.Int).Set(blockReward) diff --git a/consensus/ethash/ethash.go b/consensus/ethash/ethash.go index 7067e8643..dd6147072 100644 --- a/consensus/ethash/ethash.go +++ b/consensus/ethash/ethash.go @@ -355,7 +355,7 @@ type Ethash struct { // New creates a full sized ethash PoW scheme. func New(cachedir string, cachesinmem, cachesondisk int, dagdir string, dagsinmem, dagsondisk int) *Ethash { if cachesinmem <= 0 { - log.Warn("One ethash cache must alwast be in memory", "requested", cachesinmem) + log.Warn("One ethash cache must always be in memory", "requested", cachesinmem) cachesinmem = 1 } if cachedir != "" && cachesondisk > 0 { @@ -412,7 +412,7 @@ func NewFakeDelayer(delay time.Duration) *Ethash { return &Ethash{fakeMode: true, fakeDelay: delay} } -// NewFullFaker creates a ethash consensus engine with a full fake scheme that +// NewFullFaker creates an ethash consensus engine with a full fake scheme that // accepts all blocks as valid, without checking any consensus rules whatsoever. func NewFullFaker() *Ethash { return &Ethash{fakeMode: true, fakeFull: true} |