aboutsummaryrefslogtreecommitdiffstats
path: root/core
diff options
context:
space:
mode:
authorFelix Lange <fjl@twurst.com>2016-12-05 02:07:24 +0800
committerFelix Lange <fjl@twurst.com>2016-12-05 17:57:11 +0800
commitf52a1ae849492480b6505ab7805305640518a568 (patch)
tree95918d77278e15d7dfd52ad24a967d291f6cfdcb /core
parent3bc0fe1ee3183311efe851aca8fd10d5a5433929 (diff)
downloadgo-tangerine-f52a1ae849492480b6505ab7805305640518a568.tar.gz
go-tangerine-f52a1ae849492480b6505ab7805305640518a568.tar.zst
go-tangerine-f52a1ae849492480b6505ab7805305640518a568.zip
core, core/vm, eth/filters: move Removed field into vm.Log
This field used to be assigned by the filter system and returned through the RPC API. Now that we have a Go client that uses the underlying type, the field needs to move. It is now assigned to true when the RemovedLogs event is generated so the filter system doesn't need to care about the field at all. While here, remove the log list from ChainSideEvent. There are no users of this field right now and any potential users could subscribe to RemovedLogsEvent instead.
Diffstat (limited to 'core')
-rw-r--r--core/blockchain.go29
-rw-r--r--core/events.go1
-rw-r--r--core/vm/log.go142
-rw-r--r--core/vm/log_test.go73
4 files changed, 189 insertions, 56 deletions
diff --git a/core/blockchain.go b/core/blockchain.go
index 2eb207d39..1f762d147 100644
--- a/core/blockchain.go
+++ b/core/blockchain.go
@@ -988,7 +988,7 @@ func (self *BlockChain) InsertChain(chain types.Blocks) (int, error) {
glog.Infof("inserted forked block #%d [%x…] (TD=%v) in %9v: %3d txs %d uncles.", block.Number(), block.Hash().Bytes()[0:4], block.Difficulty(), common.PrettyDuration(time.Since(bstart)), len(block.Transactions()), len(block.Uncles()))
}
blockInsertTimer.UpdateSince(bstart)
- events = append(events, ChainSideEvent{block, logs})
+ events = append(events, ChainSideEvent{block})
case SplitStatTy:
events = append(events, ChainSplitEvent{block, logs})
@@ -1062,24 +1062,25 @@ func countTransactions(chain []*types.Block) (c int) {
// event about them
func (self *BlockChain) reorg(oldBlock, newBlock *types.Block) error {
var (
- newChain types.Blocks
- oldChain types.Blocks
- commonBlock *types.Block
- oldStart = oldBlock
- newStart = newBlock
- deletedTxs types.Transactions
- deletedLogs vm.Logs
- deletedLogsByHash = make(map[common.Hash]vm.Logs)
+ newChain types.Blocks
+ oldChain types.Blocks
+ commonBlock *types.Block
+ oldStart = oldBlock
+ newStart = newBlock
+ deletedTxs types.Transactions
+ deletedLogs vm.Logs
// collectLogs collects the logs that were generated during the
// processing of the block that corresponds with the given hash.
// These logs are later announced as deleted.
collectLogs = func(h common.Hash) {
- // Coalesce logs
+ // Coalesce logs and set 'Removed'.
receipts := GetBlockReceipts(self.chainDb, h, self.hc.GetBlockNumber(h))
for _, receipt := range receipts {
- deletedLogs = append(deletedLogs, receipt.Logs...)
-
- deletedLogsByHash[h] = receipt.Logs
+ for _, log := range receipt.Logs {
+ del := *log
+ del.Removed = true
+ deletedLogs = append(deletedLogs, &del)
+ }
}
}
)
@@ -1173,7 +1174,7 @@ func (self *BlockChain) reorg(oldBlock, newBlock *types.Block) error {
if len(oldChain) > 0 {
go func() {
for _, block := range oldChain {
- self.eventMux.Post(ChainSideEvent{Block: block, Logs: deletedLogsByHash[block.Hash()]})
+ self.eventMux.Post(ChainSideEvent{Block: block})
}
}()
}
diff --git a/core/events.go b/core/events.go
index 322bcb769..414493fbf 100644
--- a/core/events.go
+++ b/core/events.go
@@ -61,7 +61,6 @@ type ChainEvent struct {
type ChainSideEvent struct {
Block *types.Block
- Logs vm.Logs
}
type PendingBlockEvent struct {
diff --git a/core/vm/log.go b/core/vm/log.go
index 06f941703..347bd6e5d 100644
--- a/core/vm/log.go
+++ b/core/vm/log.go
@@ -29,20 +29,42 @@ import (
var errMissingLogFields = errors.New("missing required JSON log fields")
-// Log represents a contract log event. These events are generated by the LOG
-// opcode and stored/indexed by the node.
+// Log represents a contract log event. These events are generated by the LOG opcode and
+// stored/indexed by the node.
type Log struct {
// Consensus fields.
Address common.Address // address of the contract that generated the event
Topics []common.Hash // list of topics provided by the contract.
Data []byte // supplied by the contract, usually ABI-encoded
- // Derived fields (don't reorder!).
+ // Derived fields. These fields are filled in by the node
+ // but not secured by consensus.
BlockNumber uint64 // block in which the transaction was included
TxHash common.Hash // hash of the transaction
TxIndex uint // index of the transaction in the block
BlockHash common.Hash // hash of the block in which the transaction was included
Index uint // index of the log in the receipt
+
+ // The Removed field is true if this log was reverted due to a chain reorganisation.
+ // You must pay attention to this field if you receive logs through a filter query.
+ Removed bool
+}
+
+type rlpLog struct {
+ Address common.Address
+ Topics []common.Hash
+ Data []byte
+}
+
+type rlpStorageLog struct {
+ Address common.Address
+ Topics []common.Hash
+ Data []byte
+ BlockNumber uint64
+ TxHash common.Hash
+ TxIndex uint
+ BlockHash common.Hash
+ Index uint
}
type jsonLog struct {
@@ -54,27 +76,26 @@ type jsonLog struct {
TxHash *common.Hash `json:"transactionHash"`
BlockHash *common.Hash `json:"blockHash"`
Index *hexutil.Uint `json:"logIndex"`
+ Removed bool `json:"removed"`
}
func NewLog(address common.Address, topics []common.Hash, data []byte, number uint64) *Log {
return &Log{Address: address, Topics: topics, Data: data, BlockNumber: number}
}
+// EncodeRLP implements rlp.Encoder.
func (l *Log) EncodeRLP(w io.Writer) error {
- return rlp.Encode(w, []interface{}{l.Address, l.Topics, l.Data})
+ return rlp.Encode(w, rlpLog{Address: l.Address, Topics: l.Topics, Data: l.Data})
}
+// DecodeRLP implements rlp.Decoder.
func (l *Log) DecodeRLP(s *rlp.Stream) error {
- var log struct {
- Address common.Address
- Topics []common.Hash
- Data []byte
+ var dec rlpLog
+ err := s.Decode(&dec)
+ if err == nil {
+ l.Address, l.Topics, l.Data = dec.Address, dec.Topics, dec.Data
}
- if err := s.Decode(&log); err != nil {
- return err
- }
- l.Address, l.Topics, l.Data = log.Address, log.Topics, log.Data
- return nil
+ return err
}
func (l *Log) String() string {
@@ -82,45 +103,88 @@ func (l *Log) String() string {
}
// MarshalJSON implements json.Marshaler.
-func (r *Log) MarshalJSON() ([]byte, error) {
- return json.Marshal(&jsonLog{
- Address: &r.Address,
- Topics: &r.Topics,
- Data: (*hexutil.Bytes)(&r.Data),
- BlockNumber: (*hexutil.Uint64)(&r.BlockNumber),
- TxIndex: (*hexutil.Uint)(&r.TxIndex),
- TxHash: &r.TxHash,
- BlockHash: &r.BlockHash,
- Index: (*hexutil.Uint)(&r.Index),
- })
+func (l *Log) MarshalJSON() ([]byte, error) {
+ jslog := &jsonLog{
+ Address: &l.Address,
+ Topics: &l.Topics,
+ Data: (*hexutil.Bytes)(&l.Data),
+ TxIndex: (*hexutil.Uint)(&l.TxIndex),
+ TxHash: &l.TxHash,
+ Index: (*hexutil.Uint)(&l.Index),
+ Removed: l.Removed,
+ }
+ // Set block information for mined logs.
+ if (l.BlockHash != common.Hash{}) {
+ jslog.BlockHash = &l.BlockHash
+ jslog.BlockNumber = (*hexutil.Uint64)(&l.BlockNumber)
+ }
+ return json.Marshal(jslog)
}
// UnmarshalJSON implements json.Umarshaler.
-func (r *Log) UnmarshalJSON(input []byte) error {
+func (l *Log) UnmarshalJSON(input []byte) error {
var dec jsonLog
if err := json.Unmarshal(input, &dec); err != nil {
return err
}
- if dec.Address == nil || dec.Topics == nil || dec.Data == nil || dec.BlockNumber == nil ||
- dec.TxIndex == nil || dec.TxHash == nil || dec.BlockHash == nil || dec.Index == nil {
+ if dec.Address == nil || dec.Topics == nil || dec.Data == nil ||
+ dec.TxIndex == nil || dec.TxHash == nil || dec.Index == nil {
return errMissingLogFields
}
- *r = Log{
- Address: *dec.Address,
- Topics: *dec.Topics,
- Data: *dec.Data,
- BlockNumber: uint64(*dec.BlockNumber),
- TxHash: *dec.TxHash,
- TxIndex: uint(*dec.TxIndex),
- BlockHash: *dec.BlockHash,
- Index: uint(*dec.Index),
+ declog := Log{
+ Address: *dec.Address,
+ Topics: *dec.Topics,
+ Data: *dec.Data,
+ TxHash: *dec.TxHash,
+ TxIndex: uint(*dec.TxIndex),
+ Index: uint(*dec.Index),
+ Removed: dec.Removed,
+ }
+ // Block information may be missing if the log is received through
+ // the pending log filter, so it's handled specially here.
+ if dec.BlockHash != nil && dec.BlockNumber != nil {
+ declog.BlockHash = *dec.BlockHash
+ declog.BlockNumber = uint64(*dec.BlockNumber)
}
+ *l = declog
return nil
}
type Logs []*Log
-// LogForStorage is a wrapper around a Log that flattens and parses the entire
-// content of a log, as opposed to only the consensus fields originally (by hiding
-// the rlp interface methods).
+// LogForStorage is a wrapper around a Log that flattens and parses the entire content of
+// a log including non-consensus fields.
type LogForStorage Log
+
+// EncodeRLP implements rlp.Encoder.
+func (l *LogForStorage) EncodeRLP(w io.Writer) error {
+ return rlp.Encode(w, rlpStorageLog{
+ Address: l.Address,
+ Topics: l.Topics,
+ Data: l.Data,
+ BlockNumber: l.BlockNumber,
+ TxHash: l.TxHash,
+ TxIndex: l.TxIndex,
+ BlockHash: l.BlockHash,
+ Index: l.Index,
+ })
+}
+
+// DecodeRLP implements rlp.Decoder.
+func (l *LogForStorage) DecodeRLP(s *rlp.Stream) error {
+ var dec rlpStorageLog
+ err := s.Decode(&dec)
+ if err == nil {
+ *l = LogForStorage{
+ Address: dec.Address,
+ Topics: dec.Topics,
+ Data: dec.Data,
+ BlockNumber: dec.BlockNumber,
+ TxHash: dec.TxHash,
+ TxIndex: dec.TxIndex,
+ BlockHash: dec.BlockHash,
+ Index: dec.Index,
+ }
+ }
+ return err
+}
diff --git a/core/vm/log_test.go b/core/vm/log_test.go
index 4d3189558..994753c62 100644
--- a/core/vm/log_test.go
+++ b/core/vm/log_test.go
@@ -18,18 +18,81 @@ package vm
import (
"encoding/json"
+ "reflect"
"testing"
+
+ "github.com/davecgh/go-spew/spew"
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/common/hexutil"
)
var unmarshalLogTests = map[string]struct {
input string
+ want *Log
wantError error
}{
"ok": {
- input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","blockHash":"0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056","blockNumber":"0x1ecfa4","data":"0x000000000000000000000000000000000000000000000001a055690d9db80000","logIndex":"0x2","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef","0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615","0x000000000000000000000000f9dff387dcb5cc4cca5b91adb07a95f54e9f1bb6"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3"}`,
+ input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","blockHash":"0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056","blockNumber":"0x1ecfa4","data":"0x000000000000000000000000000000000000000000000001a055690d9db80000","logIndex":"0x2","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef","0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3"}`,
+ want: &Log{
+ Address: common.HexToAddress("0xecf8f87f810ecf450940c9f60066b4a7a501d6a7"),
+ BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"),
+ BlockNumber: 2019236,
+ Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000001a055690d9db80000"),
+ Index: 2,
+ TxIndex: 3,
+ TxHash: common.HexToHash("0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e"),
+ Topics: []common.Hash{
+ common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
+ common.HexToHash("0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615"),
+ },
+ },
},
"empty data": {
- input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","blockHash":"0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056","blockNumber":"0x1ecfa4","data":"0x","logIndex":"0x2","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef","0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615","0x000000000000000000000000f9dff387dcb5cc4cca5b91adb07a95f54e9f1bb6"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3"}`,
+ input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","blockHash":"0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056","blockNumber":"0x1ecfa4","data":"0x","logIndex":"0x2","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef","0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3"}`,
+ want: &Log{
+ Address: common.HexToAddress("0xecf8f87f810ecf450940c9f60066b4a7a501d6a7"),
+ BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"),
+ BlockNumber: 2019236,
+ Data: []byte{},
+ Index: 2,
+ TxIndex: 3,
+ TxHash: common.HexToHash("0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e"),
+ Topics: []common.Hash{
+ common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
+ common.HexToHash("0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615"),
+ },
+ },
+ },
+ "missing block fields (pending logs)": {
+ input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","data":"0x","logIndex":"0x0","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3"}`,
+ want: &Log{
+ Address: common.HexToAddress("0xecf8f87f810ecf450940c9f60066b4a7a501d6a7"),
+ BlockHash: common.Hash{},
+ BlockNumber: 0,
+ Data: []byte{},
+ Index: 0,
+ TxIndex: 3,
+ TxHash: common.HexToHash("0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e"),
+ Topics: []common.Hash{
+ common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
+ },
+ },
+ },
+ "Removed: true": {
+ input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","blockHash":"0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056","blockNumber":"0x1ecfa4","data":"0x","logIndex":"0x2","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3","removed":true}`,
+ want: &Log{
+ Address: common.HexToAddress("0xecf8f87f810ecf450940c9f60066b4a7a501d6a7"),
+ BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"),
+ BlockNumber: 2019236,
+ Data: []byte{},
+ Index: 2,
+ TxIndex: 3,
+ TxHash: common.HexToHash("0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e"),
+ Topics: []common.Hash{
+ common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
+ },
+ Removed: true,
+ },
},
"missing data": {
input: `{"address":"0xecf8f87f810ecf450940c9f60066b4a7a501d6a7","blockHash":"0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056","blockNumber":"0x1ecfa4","logIndex":"0x2","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef","0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615","0x000000000000000000000000f9dff387dcb5cc4cca5b91adb07a95f54e9f1bb6"],"transactionHash":"0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e","transactionIndex":"0x3"}`,
@@ -38,10 +101,16 @@ var unmarshalLogTests = map[string]struct {
}
func TestUnmarshalLog(t *testing.T) {
+ dumper := spew.ConfigState{DisableMethods: true, Indent: " "}
for name, test := range unmarshalLogTests {
var log *Log
err := json.Unmarshal([]byte(test.input), &log)
checkError(t, name, err, test.wantError)
+ if test.wantError == nil && err == nil {
+ if !reflect.DeepEqual(log, test.want) {
+ t.Errorf("test %q:\nGOT %sWANT %s", name, dumper.Sdump(log), dumper.Sdump(test.want))
+ }
+ }
}
}