aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--eth/downloader/downloader.go23
-rw-r--r--eth/downloader/downloader_test.go43
-rw-r--r--eth/downloader/peer.go2
-rw-r--r--eth/downloader/queue.go2
-rw-r--r--eth/handler.go4
-rw-r--r--eth/peer.go2
6 files changed, 64 insertions, 12 deletions
diff --git a/eth/downloader/downloader.go b/eth/downloader/downloader.go
index cd2fd81f1..7eda49186 100644
--- a/eth/downloader/downloader.go
+++ b/eth/downloader/downloader.go
@@ -17,18 +17,17 @@ import (
"gopkg.in/fatih/set.v0"
)
-const (
+var (
MinHashFetch = 512 // Minimum amount of hashes to not consider a peer stalling
MaxHashFetch = 2048 // Amount of hashes to be fetched per retrieval request
MaxBlockFetch = 128 // Amount of blocks to be fetched per retrieval request
- hashTTL = 5 * time.Second // Time it takes for a hash request to time out
-)
-
-var (
+ hashTTL = 5 * time.Second // Time it takes for a hash request to time out
blockSoftTTL = 3 * time.Second // Request completion threshold for increasing or decreasing a peer's bandwidth
blockHardTTL = 3 * blockSoftTTL // Maximum time allowance before a block request is considered expired
crossCheckCycle = time.Second // Period after which to check for expired cross checks
+
+ maxBannedHashes = 4096 // Number of bannable hashes before phasing old ones out
)
var (
@@ -602,9 +601,19 @@ func (d *Downloader) banBlocks(peerId string, head common.Hash) error {
}
index++
}
+ // Ban the head hash and phase out any excess
d.banned.Add(blocks[index].Hash())
-
- glog.V(logger.Debug).Infof("Banned %d blocks from: %s\n", index+1, peerId)
+ for d.banned.Size() > maxBannedHashes {
+ d.banned.Each(func(item interface{}) bool {
+ // Skip any hard coded bans
+ if core.BadHashes[item.(common.Hash)] {
+ return true
+ }
+ d.banned.Remove(item)
+ return false
+ })
+ }
+ glog.V(logger.Debug).Infof("Banned %d blocks from: %s", index+1, peerId)
return nil
}
}
diff --git a/eth/downloader/downloader_test.go b/eth/downloader/downloader_test.go
index 4219c2788..c9e84371c 100644
--- a/eth/downloader/downloader_test.go
+++ b/eth/downloader/downloader_test.go
@@ -7,6 +7,7 @@ import (
"time"
"github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/event"
)
@@ -559,3 +560,45 @@ func TestBannedChainStarvationAttack(t *testing.T) {
banned = bans
}
}
+
+// Tests that if a peer sends excessively many/large invalid chains that are
+// gradually banned, it will have an upper limit on the consumed memory and also
+// the origin bad hashes will not be evacuated.
+func TestBannedChainMemoryExhaustionAttack(t *testing.T) {
+ // Reduce the test size a bit
+ MaxBlockFetch = 4
+ maxBannedHashes = 256
+
+ // Construct a banned chain with more chunks than the ban limit
+ hashes := createHashes(0, maxBannedHashes*MaxBlockFetch)
+ hashes[len(hashes)-1] = bannedHash // weird index to have non multiple of ban chunk size
+
+ blocks := createBlocksFromHashes(hashes)
+
+ // Create the tester and ban the selected hash
+ tester := newTester(t, hashes, blocks)
+ tester.downloader.banned.Add(bannedHash)
+
+ // Iteratively try to sync, and verify that the banned hash list grows until
+ // the head of the invalid chain is blocked too.
+ tester.newPeer("attack", big.NewInt(10000), hashes[0])
+ for {
+ // Try to sync with the attacker, check hash chain failure
+ if _, err := tester.syncTake("attack", hashes[0]); err != ErrInvalidChain {
+ t.Fatalf("synchronisation error mismatch: have %v, want %v", err, ErrInvalidChain)
+ }
+ // Short circuit if the entire chain was banned
+ if tester.downloader.banned.Has(hashes[0]) {
+ break
+ }
+ // Otherwise ensure we never exceed the memory allowance and the hard coded bans are untouched
+ if bans := tester.downloader.banned.Size(); bans > maxBannedHashes {
+ t.Fatalf("ban cap exceeded: have %v, want max %v", bans, maxBannedHashes)
+ }
+ for hash, _ := range core.BadHashes {
+ if !tester.downloader.banned.Has(hash) {
+ t.Fatalf("hard coded ban evacuated: %x", hash)
+ }
+ }
+ }
+}
diff --git a/eth/downloader/peer.go b/eth/downloader/peer.go
index 5fbc64648..9614a6951 100644
--- a/eth/downloader/peer.go
+++ b/eth/downloader/peer.go
@@ -94,7 +94,7 @@ func (p *peer) SetIdle() {
for {
// Calculate the new download bandwidth allowance
prev := atomic.LoadInt32(&p.capacity)
- next := int32(math.Max(1, math.Min(MaxBlockFetch, float64(prev)*scale)))
+ next := int32(math.Max(1, math.Min(float64(MaxBlockFetch), float64(prev)*scale)))
// Try to update the old value
if atomic.CompareAndSwapInt32(&p.capacity, prev, next) {
diff --git a/eth/downloader/queue.go b/eth/downloader/queue.go
index 3c99efb81..7abbd42fd 100644
--- a/eth/downloader/queue.go
+++ b/eth/downloader/queue.go
@@ -16,7 +16,7 @@ import (
"gopkg.in/karalabe/cookiejar.v2/collections/prque"
)
-const (
+var (
blockCacheLimit = 8 * MaxBlockFetch // Maximum number of blocks to cache before throttling the download
)
diff --git a/eth/handler.go b/eth/handler.go
index aea33452c..37bbd3691 100644
--- a/eth/handler.go
+++ b/eth/handler.go
@@ -213,8 +213,8 @@ func (self *ProtocolManager) handleMsg(p *peer) error {
return errResp(ErrDecode, "->msg %v: %v", msg, err)
}
- if request.Amount > downloader.MaxHashFetch {
- request.Amount = downloader.MaxHashFetch
+ if request.Amount > uint64(downloader.MaxHashFetch) {
+ request.Amount = uint64(downloader.MaxHashFetch)
}
hashes := self.chainman.GetBlockHashesFromHash(request.Hash, request.Amount)
diff --git a/eth/peer.go b/eth/peer.go
index bb6a20349..b2fa20ebb 100644
--- a/eth/peer.go
+++ b/eth/peer.go
@@ -102,7 +102,7 @@ func (p *peer) sendTransaction(tx *types.Transaction) error {
func (p *peer) requestHashes(from common.Hash) error {
glog.V(logger.Debug).Infof("[%s] fetching hashes (%d) %x...\n", p.id, downloader.MaxHashFetch, from[:4])
- return p2p.Send(p.rw, GetBlockHashesMsg, getBlockHashesMsgData{from, downloader.MaxHashFetch})
+ return p2p.Send(p.rw, GetBlockHashesMsg, getBlockHashesMsgData{from, uint64(downloader.MaxHashFetch)})
}
func (p *peer) requestBlocks(hashes []common.Hash) error {