From 89802f08af81f6e9c21c49d614e3cdd2e520a891 Mon Sep 17 00:00:00 2001 From: Sambhav Jain <136801346+DarkLord017@users.noreply.github.com> Date: Sat, 2 Nov 2024 09:23:52 +0530 Subject: [PATCH] Pushed tests n refactored evm --- execution/evm/journal.go | 64 +++++ execution/evm/journal_entry.go | 157 ++++++++++ execution/evm/journal_entry_test.go | 164 +++++++++++ execution/evm/journal_test.go | 111 +++++++ execution/evm/non_optimism.go | 8 + execution/evm/non_optimism_test.go | 11 + execution/evm/optimism.go | 8 + execution/evm/optimism_test.go | 14 + execution/evm/post_execution.go | 20 ++ execution/evm/post_execution_test.go | 120 ++++++++ execution/evm/pre_execution.go | 17 ++ execution/evm/pre_execution_test.go | 119 ++++++++ execution/evm/precompiles.go | 132 +++++++++ execution/evm/precompiles_test.go | 125 ++++++++ execution/evm/result.go | 60 ++++ execution/evm/serde_utils.go | 379 ++++++++++++++++++++++++ execution/evm/spec_common_test.go | 103 +++++++ execution/evm/specs_common.go | 37 +++ execution/evm/specs_default.go | 414 +++++++++++++++++++++++++++ execution/evm/specs_default_test.go | 109 +++++++ execution/evm/specs_optimism.go | 255 +++++++++++++++++ execution/evm/specs_optimism_test.go | 378 ++++++++++++++++++++++++ execution/evm/tables_test.go | 48 ++++ 23 files changed, 2853 insertions(+) create mode 100644 execution/evm/journal.go create mode 100644 execution/evm/journal_entry.go create mode 100644 execution/evm/journal_entry_test.go create mode 100644 execution/evm/journal_test.go create mode 100644 execution/evm/non_optimism.go create mode 100644 execution/evm/non_optimism_test.go create mode 100644 execution/evm/optimism.go create mode 100644 execution/evm/optimism_test.go create mode 100644 execution/evm/post_execution.go create mode 100644 execution/evm/post_execution_test.go create mode 100644 execution/evm/pre_execution.go create mode 100644 execution/evm/pre_execution_test.go create mode 100644 execution/evm/precompiles.go create mode 100644 execution/evm/precompiles_test.go create mode 100644 execution/evm/result.go create mode 100644 execution/evm/serde_utils.go create mode 100644 execution/evm/spec_common_test.go create mode 100644 execution/evm/specs_common.go create mode 100644 execution/evm/specs_default.go create mode 100644 execution/evm/specs_default_test.go create mode 100644 execution/evm/specs_optimism.go create mode 100644 execution/evm/specs_optimism_test.go create mode 100644 execution/evm/tables_test.go diff --git a/execution/evm/journal.go b/execution/evm/journal.go new file mode 100644 index 0000000..3bc382f --- /dev/null +++ b/execution/evm/journal.go @@ -0,0 +1,64 @@ +package evm + +import ( + "github.com/BlocSoc-iitr/selene/common" +) + +type JournaledState struct { + State EvmState + TransientStorage TransientStorage + Logs []Log[LogData] + Depth uint + Journal [][]JournalEntry + Spec SpecId + WarmPreloadedAddresses map[Address]struct{} +} + +func NewJournalState(spec SpecId, warmPreloadedAddresses map[Address]struct{}) JournaledState { + return JournaledState{ + State: nil, + TransientStorage: nil, + Logs: []Log[LogData]{}, + Depth: 0, + Journal: [][]JournalEntry{}, + Spec: spec, + WarmPreloadedAddresses: warmPreloadedAddresses, + } +} + +type JournalCheckpoint struct { + Log_i uint + Journal_i uint +} + +func (j *JournaledState) setSpecId(spec SpecId) { + j.Spec = spec +} + +type TransientStorage map[Key]U256 +type EvmState map[common.Address]Account +type Key struct { + Account common.Address + Slot U256 +} +type Log[T any] struct { + // The address which emitted this log. + Address Address `json:"address"` + // The log data. + Data T `json:"data"` +} + +type LogData struct { + // The indexed topic list. + Topics []B256 `json:"topics"` + // The plain data. + Data Bytes `json:"data"` +} + +func (l *Log[LogData]) UnmarshalJSON(data []byte) error { + return unmarshalJSON(data, l) +} + +func (l *LogData) UnmarshalJSON(data []byte) error { + return unmarshalJSON(data, l) +} diff --git a/execution/evm/journal_entry.go b/execution/evm/journal_entry.go new file mode 100644 index 0000000..cb903d3 --- /dev/null +++ b/execution/evm/journal_entry.go @@ -0,0 +1,157 @@ +package evm + +import ( + "encoding/json" + "fmt" + "math/big" +) + +type JournalEntryType uint8 + +const ( + AccountWarmedType JournalEntryType = iota + AccountDestroyedType + AccountTouchedType + BalanceTransferType + NonceChangeType + AccountCreatedType + StorageChangedType + StorageWarmedType + TransientStorageChangeType + CodeChangeType +) + +// JournalEntry represents a journal entry with various fields. +type JournalEntry struct { + Type JournalEntryType `json:"type"` + Address Address `json:"address"` + Target Address `json:"target,omitempty"` // Used for AccountDestroyed + WasDestroyed bool `json:"was_destroyed,omitempty"` // Used for AccountDestroyed + HadBalance U256 `json:"had_balance,omitempty"` // Used for AccountDestroyed + Balance U256 `json:"balance,omitempty"` // Used for BalanceTransfer + From Address `json:"from,omitempty"` // Used for BalanceTransfer + To Address `json:"to,omitempty"` // Used for BalanceTransfer + Key U256 `json:"key,omitempty"` // Used for Storage operations + HadValue U256 `json:"had_value,omitempty"` // Used for Storage operations +} + +// MarshalJSON implements the json.Marshaler interface for JournalEntry. +func (j JournalEntry) MarshalJSON() ([]byte, error) { + type Alias JournalEntry // Create an alias to avoid recursion + + // Helper function to convert U256 to hex string + u256ToHex := func(u U256) string { + return fmt.Sprintf("0x%s", (*big.Int)(u).Text(16)) + } + + return json.Marshal(&struct { + Address string `json:"address"` + Target string `json:"target,omitempty"` + From string `json:"from,omitempty"` + To string `json:"to,omitempty"` + Key string `json:"key,omitempty"` + HadBalance string `json:"had_balance,omitempty"` + Balance string `json:"balance,omitempty"` + HadValue string `json:"had_value,omitempty"` + *Alias + }{ + Address: "0x" + fmt.Sprintf("%x", j.Address.Addr[:]), // Convert to hex string + Target: "0x" + fmt.Sprintf("%x", j.Target.Addr[:]), // Convert to hex string + From: "0x" + fmt.Sprintf("%x", j.From.Addr[:]), // Convert to hex string + To: "0x" + fmt.Sprintf("%x", j.To.Addr[:]), // Convert to hex string + Key: u256ToHex(j.Key), // Convert U256 to hex string + HadBalance: u256ToHex(j.HadBalance), // Convert U256 to hex string + Balance: u256ToHex(j.Balance), // Convert U256 to hex string + HadValue: u256ToHex(j.HadValue), // Convert U256 to hex string + Alias: (*Alias)(&j), // Embed the original struct + }) +} + +func NewAccountWarmedEntry(address Address) *JournalEntry { + return &JournalEntry{ + Type: AccountWarmedType, + Address: address, + } +} + +// NewAccountDestroyedEntry creates a new journal entry for destroying an account +func NewAccountDestroyedEntry(address, target Address, wasDestroyed bool, hadBalance U256) *JournalEntry { + return &JournalEntry{ + Type: AccountDestroyedType, + Address: address, + Target: target, + WasDestroyed: wasDestroyed, + HadBalance: new(big.Int).Set(hadBalance), //to avoid mutating the original value (had balance not written directly) + } +} + +// NewAccountTouchedEntry creates a new journal entry for touching an account +func NewAccountTouchedEntry(address Address) *JournalEntry { + return &JournalEntry{ + Type: AccountTouchedType, + Address: address, + } +} + +// NewBalanceTransferEntry creates a new journal entry for balance transfer +func NewBalanceTransferEntry(from, to Address, balance U256) *JournalEntry { + return &JournalEntry{ + Type: BalanceTransferType, + From: from, + To: to, + Balance: new(big.Int).Set(balance), + } +} + +// NewNonceChangeEntry creates a new journal entry for nonce change +func NewNonceChangeEntry(address Address) *JournalEntry { + return &JournalEntry{ + Type: NonceChangeType, + Address: address, + } +} + +// NewAccountCreatedEntry creates a new journal entry for account creation +func NewAccountCreatedEntry(address Address) *JournalEntry { + return &JournalEntry{ + Type: AccountCreatedType, + Address: address, + } +} + +// NewStorageChangedEntry creates a new journal entry for storage change +func NewStorageChangedEntry(address Address, key, hadValue U256) *JournalEntry { + return &JournalEntry{ + Type: StorageChangedType, + Address: address, + Key: new(big.Int).Set(key), + HadValue: new(big.Int).Set(hadValue), + } +} + +// NewStorageWarmedEntry creates a new journal entry for storage warming +func NewStorageWarmedEntry(address Address, key U256) *JournalEntry { + return &JournalEntry{ + Type: StorageWarmedType, + Address: address, + Key: new(big.Int).Set(key), + } +} + +// NewTransientStorageChangeEntry creates a new journal entry for transient storage change +func NewTransientStorageChangeEntry(address Address, key, hadValue U256) *JournalEntry { + return &JournalEntry{ + Type: TransientStorageChangeType, + Address: address, + Key: new(big.Int).Set(key), + HadValue: new(big.Int).Set(hadValue), + } +} + +// NewCodeChangeEntry creates a new journal entry for code change +func NewCodeChangeEntry(address Address) *JournalEntry { + return &JournalEntry{ + Type: CodeChangeType, + Address: address, + } +} diff --git a/execution/evm/journal_entry_test.go b/execution/evm/journal_entry_test.go new file mode 100644 index 0000000..597a673 --- /dev/null +++ b/execution/evm/journal_entry_test.go @@ -0,0 +1,164 @@ +package evm + +import ( + "encoding/hex" + "encoding/json" + "math/big" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" +) + +// HexToAddress converts a hex string to an Address type +func HexToAddress(hexStr string) Address { + var addr Address + bytes, err := hex.DecodeString(hexStr[2:]) // Remove '0x' prefix + if err == nil && len(bytes) == 20 { + var bytesNew [20]byte + copy(bytesNew[:], bytes) + addr = Address{ + Addr: bytesNew, + } + } + return addr +} + +// Helper function to create U256 values +func newU256(value int64) *big.Int { + return big.NewInt(value) +} + +func TestNewAccountWarmedEntry(t *testing.T) { + address := Address{} + entry := NewAccountWarmedEntry(address) + + assert.Equal(t, AccountWarmedType, entry.Type, "Type should be AccountWarmedType") + assert.Equal(t, address, entry.Address, "Address should match") +} + +func TestNewAccountDestroyedEntry(t *testing.T) { + address := HexToAddress("0x1234567890abcdef1234567890abcdef12345678") + target := HexToAddress("0x5678567856785678567856785678567856785678") + hadBalance := newU256(100) + entry := NewAccountDestroyedEntry(address, target, true, hadBalance) + + assert.Equal(t, AccountDestroyedType, entry.Type, "Type should be AccountDestroyedType") + assert.Equal(t, address, entry.Address, "Address should match") + assert.Equal(t, target, entry.Target, "Target should match") + assert.True(t, entry.WasDestroyed, "WasDestroyed should be true") + assert.Equal(t, hadBalance, entry.HadBalance, "HadBalance should match") +} + +func TestNewAccountTouchedEntry(t *testing.T) { + address := HexToAddress("0x1234567890abcdef1234567890abcdef12345678") + entry := NewAccountTouchedEntry(address) + + assert.Equal(t, AccountTouchedType, entry.Type, "Type should be AccountTouchedType") + assert.Equal(t, address, entry.Address, "Address should match") +} + +func TestNewBalanceTransferEntry(t *testing.T) { + from := HexToAddress("0x1111111111111111111111111111111111111111") + to := HexToAddress("0x2222222222222222222222222222222222222222") + balance := newU256(50) + entry := NewBalanceTransferEntry(from, to, balance) + + assert.Equal(t, BalanceTransferType, entry.Type, "Type should be BalanceTransferType") + assert.Equal(t, from, entry.From, "From address should match") + assert.Equal(t, to, entry.To, "To address should match") + assert.Equal(t, balance, entry.Balance, "Balance should match") +} + +func TestNewNonceChangeEntry(t *testing.T) { + address := HexToAddress("0x1234567890abcdef1234567890abcdef12345678") + entry := NewNonceChangeEntry(address) + + assert.Equal(t, NonceChangeType, entry.Type, "Type should be NonceChangeType") + assert.Equal(t, address, entry.Address, "Address should match") +} + +func TestNewAccountCreatedEntry(t *testing.T) { + address := HexToAddress("0x1234567890abcdef1234567890abcdef12345678") + entry := NewAccountCreatedEntry(address) + + assert.Equal(t, AccountCreatedType, entry.Type, "Type should be AccountCreatedType") + assert.Equal(t, address, entry.Address, "Address should match") +} + +func TestNewStorageChangedEntry(t *testing.T) { + address := HexToAddress("0x3333333333333333333333333333333333333333") + key := newU256(1) + hadValue := newU256(10) + entry := NewStorageChangedEntry(address, key, hadValue) + + assert.Equal(t, StorageChangedType, entry.Type, "Type should be StorageChangedType") + assert.Equal(t, address, entry.Address, "Address should match") + assert.Equal(t, key, entry.Key, "Key should match") + assert.Equal(t, hadValue, entry.HadValue, "HadValue should match") +} + +func TestNewStorageWarmedEntry(t *testing.T) { + address := HexToAddress("0x3333333333333333333333333333333333333333") + key := newU256(1) + entry := NewStorageWarmedEntry(address, key) + + assert.Equal(t, StorageWarmedType, entry.Type, "Type should be StorageWarmedType") + assert.Equal(t, address, entry.Address, "Address should match") + assert.Equal(t, key, entry.Key, "Key should match") +} + +func TestNewTransientStorageChangeEntry(t *testing.T) { + address := HexToAddress("0x4444444444444444444444444444444444444444") + key := newU256(5) + hadValue := newU256(20) + entry := NewTransientStorageChangeEntry(address, key, hadValue) + + assert.Equal(t, TransientStorageChangeType, entry.Type, "Type should be TransientStorageChangeType") + assert.Equal(t, address, entry.Address, "Address should match") + assert.Equal(t, key, entry.Key, "Key should match") + assert.Equal(t, hadValue, entry.HadValue, "HadValue should match") +} + +func TestNewCodeChangeEntry(t *testing.T) { + address := HexToAddress("0x1234567890abcdef1234567890abcdef12345678") + entry := NewCodeChangeEntry(address) + + assert.Equal(t, CodeChangeType, entry.Type, "Type should be CodeChangeType") + assert.Equal(t, address, entry.Address, "Address should match") +} + +func TestJournalEntryMarshalJSON(t *testing.T) { + // Create an example JournalEntry object + entry := JournalEntry{ + Type: 1, + Address: Address{Addr: [20]byte{0x12, 0x34, 0x56, 0x78, 0x90, 0xab, 0xcd, 0xef}}, + Target: Address{Addr: [20]byte{0x56, 0x78, 0x56, 0x78, 0x56, 0x78, 0x56, 0x78}}, + WasDestroyed: true, + HadBalance: big.NewInt(100), + Balance: big.NewInt(200), + From: Address{Addr: [20]byte{0x00, 0x00, 0x00, 0x00}}, + To: Address{Addr: [20]byte{0x00, 0x00, 0x00, 0x00}}, + Key: big.NewInt(300), + HadValue: big.NewInt(400), + } + + // Marshal the entry to JSON + actualJSON, err := json.Marshal(entry) + if err != nil { + t.Fatalf("Failed to marshal JournalEntry to JSON: %v", err) + } + + // Define the expected JSON output with hex strings + expectedJSON := `{"address":"0x1234567890abcdef000000000000000000000000","target":"0x5678567856785678000000000000000000000000","from":"0x0000000000000000000000000000000000000000","to":"0x0000000000000000000000000000000000000000","key":"0x12c","had_balance":"0x64","balance":"0xc8","had_value":"0x190","type":1,"was_destroyed":true}` + + // Compare actual JSON with expected JSON + if string(actualJSON) != expectedJSON { + t.Errorf("Expected JSON does not match actual JSON.\nExpected: %s\nActual: %s", expectedJSON, actualJSON) + } +} + +// Helper function to compare two JSON objects +func compareJSON(a, b map[string]interface{}) bool { + return reflect.DeepEqual(a, b) +} diff --git a/execution/evm/journal_test.go b/execution/evm/journal_test.go new file mode 100644 index 0000000..c5ac23e --- /dev/null +++ b/execution/evm/journal_test.go @@ -0,0 +1,111 @@ +package evm + +import ( + "encoding/hex" + "encoding/json" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewJournalState(t *testing.T) { + var adr Address = Address{ + Addr: [20]byte{0x12, 0x34, 0x56, 0x78, 0x90, 0xab, 0xcd, 0xef, 0x12, 0x34, 0x56, 0x78, 0x90, 0xab, 0xcd, 0xef, 0x12, 0x34, 0x56, 0x78}, + } + // Define test spec and preloaded addresses for initialization + spec := SpecId(1) + preloadedAddresses := map[Address]struct{}{ + adr: {}, + } + + // Initialize a new JournaledState + journalState := NewJournalState(spec, preloadedAddresses) + + // Test assertions + assert.Nil(t, journalState.State, "State should be nil on initialization") + assert.Nil(t, journalState.TransientStorage, "TransientStorage should be nil on initialization") + assert.Empty(t, journalState.Logs, "Logs should be empty on initialization") + assert.Equal(t, uint(0), journalState.Depth, "Depth should be initialized to 0") + assert.Empty(t, journalState.Journal, "Journal should be empty on initialization") + assert.Equal(t, spec, journalState.Spec, "Spec ID should match the initialized value") + assert.Equal(t, preloadedAddresses, journalState.WarmPreloadedAddresses, "WarmPreloadedAddresses should match the provided map") +} + +func TestSetSpecId(t *testing.T) { + // Define initial and new Spec IDs + initialSpec := SpecId(1) + newSpec := SpecId(2) + + // Initialize JournaledState and set Spec ID + journalState := NewJournalState(initialSpec, nil) + assert.Equal(t, initialSpec, journalState.Spec, "Initial Spec ID should match") + + // Call setSpecId to change the Spec ID + journalState.setSpecId(newSpec) + + // Verify that Spec ID has been updated + assert.Equal(t, newSpec, journalState.Spec, "Spec ID should be updated to new value") +} + +type logUnmarshalTestCase struct { + name string + jsonData string + expected Log[LogData] +} + +// Helper function to decode hex strings to a fixed byte slice of given length. +func decodeHexString(t *testing.T, hexStr string, expectedLen int) []byte { + bytes, err := hex.DecodeString(hexStr) + if err != nil { + t.Fatalf("Failed to decode hex string %s: %v", hexStr, err) + } + if len(bytes) != expectedLen { + t.Fatalf("Decoded hex string %s does not match expected length %d; got %d", hexStr, expectedLen, len(bytes)) + } + return bytes +} + +// TestLogUnmarshal tests JSON unmarshalling for Log[LogData] using hex-encoded strings. +func TestLogUnmarshal(t *testing.T) { + // Define test cases + testCases := []logUnmarshalTestCase{ + { + name: "Valid Log with two topics", + jsonData: `{ + "address": "0x1234567890abcdef1234567890abcdef12345678", + "data": { + "topics": ["0x0000000000000000000000000000000012345678000000000000000000000000", "0x000000000000000000000000000000009abcdef0000000000000000000000000"], + "data": "0x01020304" + } + }`, + expected: Log[LogData]{ + Address: Address{Addr: [20]byte(decodeHexString(t, "1234567890abcdef1234567890abcdef12345678", 20))}, + Data: LogData{ + Topics: []B256{ + B256(decodeHexString(t, "0000000000000000000000000000000012345678000000000000000000000000", 32)), + B256(decodeHexString(t, "000000000000000000000000000000009abcdef0000000000000000000000000", 32)), + }, + Data: Bytes(decodeHexString(t, "01020304", 4)), + }, + }, + }, + // Additional test cases can be added here + } + + // Run each test case + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Unmarshal JSON data into Log[LogData] + var logInstance Log[LogData] + if err := json.Unmarshal([]byte(tc.jsonData), &logInstance); err != nil { + t.Fatalf("Failed to unmarshal Log[LogData]: %v", err) + } + + // Compare the unmarshalled result with the expected result + if !reflect.DeepEqual(logInstance, tc.expected) { + t.Errorf("Test %s failed.\nExpected: %+v\nGot: %+v", tc.name, tc.expected, logInstance) + } + }) + } +} diff --git a/execution/evm/non_optimism.go b/execution/evm/non_optimism.go new file mode 100644 index 0000000..9c0a9cc --- /dev/null +++ b/execution/evm/non_optimism.go @@ -0,0 +1,8 @@ +//go:build !optimism_default_handler || negate_optimism_default_handler +// +build !optimism_default_handler negate_optimism_default_handler + +package evm + +func getDefaultOptimismSetting() bool { + return false +} \ No newline at end of file diff --git a/execution/evm/non_optimism_test.go b/execution/evm/non_optimism_test.go new file mode 100644 index 0000000..18b8d31 --- /dev/null +++ b/execution/evm/non_optimism_test.go @@ -0,0 +1,11 @@ +package evm + +import "testing" + +func TestShouldReturnFalse(t *testing.T) { + // Test logic + result := getDefaultOptimismSetting() + if result != false { + panic("Test failed") + } +} diff --git a/execution/evm/optimism.go b/execution/evm/optimism.go new file mode 100644 index 0000000..9333844 --- /dev/null +++ b/execution/evm/optimism.go @@ -0,0 +1,8 @@ +//go:build optimism_default_handler && !negate_optimism_default_handler +// +build optimism_default_handler,!negate_optimism_default_handler + +package evm + +func getDefaultOptimismSetting() bool { + return true +} \ No newline at end of file diff --git a/execution/evm/optimism_test.go b/execution/evm/optimism_test.go new file mode 100644 index 0000000..0a2cebf --- /dev/null +++ b/execution/evm/optimism_test.go @@ -0,0 +1,14 @@ +//go:build optimism_default_handler && !negate_optimism_default_handler +// +build optimism_default_handler,!negate_optimism_default_handler + +package evm + +import "testing" + +func TestShouldReturnTrue(t *testing.T) { + // Test logic + result := getDefaultOptimismSetting() + if result != true { + t.Fatalf("Test failed: expected true but got %v", result) + } +} \ No newline at end of file diff --git a/execution/evm/post_execution.go b/execution/evm/post_execution.go new file mode 100644 index 0000000..2a42398 --- /dev/null +++ b/execution/evm/post_execution.go @@ -0,0 +1,20 @@ +package evm +type PostExecutionHandler[EXT any, DB Database] struct { + ReimburseCaller ReimburseCallerHandle[EXT, DB] + RewardBeneficiary RewardBeneficiaryHandle[EXT, DB] + Output OutputHandle[EXT, DB] + End EndHandle[EXT, DB] + Clear ClearHandle[EXT, DB] +} +type ReimburseCallerHandle[EXT any, DB Database] func(ctx *Context[EXT, DB], gas *Gas) EVMResultGeneric[struct{}, any] +type RewardBeneficiaryHandle[EXT any, DB Database] ReimburseCallerHandle[EXT, DB] +type OutputHandle[EXT any, DB Database] func(ctx *Context[EXT, DB], frameResult FrameResult) (ResultAndState, EvmError) +type EndHandle[EXT any, DB Database] func(ctx *Context[EXT, DB], result EVMResultGeneric[ResultAndState, EvmError]) (ResultAndState, EvmError) +type ClearHandle[EXT any, DB Database] func(ctx *Context[EXT, DB]) +type LoadPrecompilesHandle[DB Database] func() ContextPrecompiles[DB] +type LoadAccountsHandle[EXT any, DB Database] func(ctx *Context[EXT, DB]) error +type DeductCallerHandle[EXT any, DB Database] func(ctx *Context[EXT, DB]) EVMResultGeneric[ ResultAndState, DatabaseError] +type EVMResultGeneric[T any, DBError any] struct { + Value T + Err error +} diff --git a/execution/evm/post_execution_test.go b/execution/evm/post_execution_test.go new file mode 100644 index 0000000..7a38f5f --- /dev/null +++ b/execution/evm/post_execution_test.go @@ -0,0 +1,120 @@ +package evm + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +// Mock implementations for testing +func mockReimburseCaller(ctx *Context[any, Database], gas *Gas) EVMResultGeneric[struct{}, any] { + return EVMResultGeneric[struct{}, any]{Value: struct{}{}, Err: nil} +} + +func mockRewardBeneficiary(ctx *Context[any, Database], gas *Gas) EVMResultGeneric[struct{}, any] { + return EVMResultGeneric[struct{}, any]{Value: struct{}{}, Err: nil} +} + +func mockOutput(ctx *Context[any, Database], frameResult FrameResult) (ResultAndState, EvmError) { + return ResultAndState{}, EvmError{} +} + +func mockEnd(ctx *Context[any, Database], result EVMResultGeneric[ResultAndState, EvmError]) (ResultAndState, EvmError) { + return ResultAndState{}, EvmError{} +} + +func mockClear(ctx *Context[any, Database]) {} + +// Test the PostExecutionHandler with mock functions +func TestPostExecutionHandler(t *testing.T) { + handler := PostExecutionHandler[any, Database]{ + ReimburseCaller: mockReimburseCaller, + RewardBeneficiary: mockRewardBeneficiary, + Output: mockOutput, + End: mockEnd, + Clear: mockClear, + } + + ctx := &Context[any, Database]{} + gas := &Gas{} + + // Test ReimburseCaller + t.Run("ReimburseCaller Success", func(t *testing.T) { + result := handler.ReimburseCaller(ctx, gas) + assert.NoError(t, result.Err, "ReimburseCaller should not return an error") + }) + + // Test RewardBeneficiary + t.Run("RewardBeneficiary Success", func(t *testing.T) { + result := handler.RewardBeneficiary(ctx, gas) + assert.NoError(t, result.Err, "RewardBeneficiary should not return an error") + }) + + // Test Output + t.Run("Output Success", func(t *testing.T) { + _, err := handler.Output(ctx, FrameResult{}) + assert.Empty(t, err.Message, "Output should not return an error") + }) + + // Create a new result for End, converting to the expected type + endResult := EVMResultGeneric[ResultAndState, EvmError]{Value: ResultAndState{}} + + // Test End + t.Run("End Success", func(t *testing.T) { + _, err := handler.End(ctx, endResult) + assert.Empty(t, err.Message, "End should not return an error") + }) + + // Test Clear + t.Run("Clear Success", func(t *testing.T) { + assert.NotPanics(t, func() { handler.Clear(ctx) }, "Clear should not panic") + }) +} + +// Test with errors +func TestPostExecutionHandlerWithError(t *testing.T) { + handler := PostExecutionHandler[any, Database]{ + ReimburseCaller: func(ctx *Context[any, Database], gas *Gas) EVMResultGeneric[struct{}, any] { + return EVMResultGeneric[struct{}, any]{Err: errors.New("mock error")} + }, + RewardBeneficiary: func(ctx *Context[any, Database], gas *Gas) EVMResultGeneric[struct{}, any] { + return EVMResultGeneric[struct{}, any]{Err: errors.New("mock reward error")} + }, + Output: func(ctx *Context[any, Database], frameResult FrameResult) (ResultAndState, EvmError) { + return ResultAndState{}, EvmError{Message: "mock output error"} + }, + End: func(ctx *Context[any, Database], result EVMResultGeneric[ResultAndState, EvmError]) (ResultAndState, EvmError) { + return ResultAndState{}, EvmError{Message: "mock end error"} + }, + Clear: mockClear, + } + + ctx := &Context[any, Database]{} + gas := &Gas{} + + // Test ReimburseCaller with an error + t.Run("ReimburseCaller Error", func(t *testing.T) { + result := handler.ReimburseCaller(ctx, gas) + assert.Error(t, result.Err, "Expected error from ReimburseCaller") + }) + + // Test RewardBeneficiary with an error + t.Run("RewardBeneficiary Error", func(t *testing.T) { + result := handler.RewardBeneficiary(ctx, gas) + assert.Error(t, result.Err, "Expected error from RewardBeneficiary") + }) + + // Test Output with a mock error + t.Run("Output Error", func(t *testing.T) { + _, err := handler.Output(ctx, FrameResult{}) + assert.Error(t, err, "Expected error from Output") + }) + + // Test End with a mock error + t.Run("End Error", func(t *testing.T) { + _, err := handler.End(ctx, EVMResultGeneric[ResultAndState, EvmError]{Value: ResultAndState{}}) + assert.Error(t, err, "Expected error from End") + }) +} + diff --git a/execution/evm/pre_execution.go b/execution/evm/pre_execution.go new file mode 100644 index 0000000..b17073d --- /dev/null +++ b/execution/evm/pre_execution.go @@ -0,0 +1,17 @@ +package evm +type PreExecutionHandler[EXT any, DB Database] struct { + LoadPrecompiles LoadPrecompilesHandle[DB] + LoadAccounts LoadAccountsHandle[EXT, DB] + DeductCaller DeductCallerHandle[EXT, DB] +} +type ValidationHandler[EXT any, DB Database] struct { + InitialTxGas ValidateInitialTxGasHandle[DB] + TxAgainstState ValidateTxEnvAgainstState[EXT, DB] + Env ValidateEnvHandle[DB] +} +type ValidateEnvHandle[DB Database] func(env *Env) error +type ValidateTxEnvAgainstState[EXT any, DB Database] func(ctx *Context[EXT, DB]) error +type ValidateInitialTxGasHandle[DB Database] func(env *Env) (uint64, error) +func (p *PreExecutionHandler[EXT, DB]) LoadPrecompilesFunction() ContextPrecompiles[DB] { + return p.LoadPrecompiles() +} diff --git a/execution/evm/pre_execution_test.go b/execution/evm/pre_execution_test.go new file mode 100644 index 0000000..238c28d --- /dev/null +++ b/execution/evm/pre_execution_test.go @@ -0,0 +1,119 @@ +package evm + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +// Mock implementations for testing +func mockLoadPrecompiles() ContextPrecompiles[Database] { + return ContextPrecompiles[Database]{} +} + +func mockLoadAccounts(ctx *Context[any, Database]) error { + return nil +} + +func mockDeductCaller(ctx *Context[any, Database]) EVMResultGeneric[ResultAndState, DatabaseError] { + return EVMResultGeneric[ResultAndState, DatabaseError]{Value: ResultAndState{}, Err: nil} +} + +func mockValidateInitialTxGas(env *Env) (uint64, error) { + return 100000, nil +} + +func mockTxAgainstState(ctx *Context[any, Database]) error { + return nil +} + +func mockValidateEnv(env *Env) error { + return nil +} + +// Tests for PreExecutionHandler +func TestPreExecutionHandler(t *testing.T) { + handler := PreExecutionHandler[any, Database]{ + LoadPrecompiles: mockLoadPrecompiles, + LoadAccounts: mockLoadAccounts, + DeductCaller: mockDeductCaller, + } + + // Test LoadPrecompiles + precompiles := handler.LoadPrecompiles() + assert.NotNil(t, precompiles, "LoadPrecompiles should return a valid ContextPrecompiles") + + // Test LoadAccounts + ctx := &Context[any, Database]{} + err := handler.LoadAccounts(ctx) + assert.NoError(t, err, "LoadAccounts should not return an error") + + // Test DeductCaller + result := handler.DeductCaller(ctx) + assert.NoError(t, result.Err, "DeductCaller should not return an error") +} + +// Tests for ValidationHandler +func TestValidationHandler(t *testing.T) { + handler := ValidationHandler[any, Database]{ + InitialTxGas: mockValidateInitialTxGas, + TxAgainstState: mockTxAgainstState, + Env: mockValidateEnv, + } + + env := &Env{} + + // Test ValidateInitialTxGas + gas, err := handler.InitialTxGas(env) + assert.NoError(t, err, "InitialTxGas should not return an error") + assert.Equal(t, uint64(100000), gas, "Expected gas value should be 100000") + + // Test ValidateTxEnvAgainstState + ctx := &Context[any, Database]{} + err = handler.TxAgainstState(ctx) + assert.NoError(t, err, "TxAgainstState should not return an error") + + // Test ValidateEnv + err = handler.Env(env) + assert.NoError(t, err, "ValidateEnv should not return an error") +} + +// Tests with errors +func TestPreExecutionHandlerWithError(t *testing.T) { + handler := PreExecutionHandler[any, Database]{ + LoadPrecompiles: mockLoadPrecompiles, + LoadAccounts: func(ctx *Context[any, Database]) error { + return errors.New("mock error loading accounts") + }, + DeductCaller: mockDeductCaller, + } + + ctx := &Context[any, Database]{} + // Test LoadAccounts with an error + err := handler.LoadAccounts(ctx) + assert.Error(t, err, "Expected error from LoadAccounts") + + // Test DeductCaller with a mock error + handler.DeductCaller = func(ctx *Context[any, Database]) EVMResultGeneric[ResultAndState, DatabaseError] { + return EVMResultGeneric[ResultAndState, DatabaseError]{Err: errors.New("mock error deducting caller")} + } + + result := handler.DeductCaller(ctx) + assert.Error(t, result.Err, "Expected error from DeductCaller") +} + +func TestValidationHandlerWithError(t *testing.T) { + handler := ValidationHandler[any, Database]{ + InitialTxGas: func(env *Env) (uint64, error) { + return 0, errors.New("mock initial gas error") + }, + TxAgainstState: mockTxAgainstState, + Env: mockValidateEnv, + } + + env := &Env{} + // Test ValidateInitialTxGas with an error + _, err := handler.InitialTxGas(env) + assert.Error(t, err, "Expected error from InitialTxGas") +} \ No newline at end of file diff --git a/execution/evm/precompiles.go b/execution/evm/precompiles.go new file mode 100644 index 0000000..d4b4d9f --- /dev/null +++ b/execution/evm/precompiles.go @@ -0,0 +1,132 @@ +package evm + +import ( + "sync" +) + +func (e *EvmContext[DB]) SetPrecompiles(precompiles ContextPrecompiles[DB]) { + // Check if StaticRef is nil before accessing its Addresses + if precompiles.Inner.StaticRef == nil || len(precompiles.Inner.StaticRef.Addresses) == 0 { + precompiles.Inner.StaticRef = &Precompiles{} + precompiles.Inner.StaticRef.Addresses = make(map[Address]struct{}) + } + + for address := range precompiles.Inner.StaticRef.Addresses { + e.Inner.JournaledState.WarmPreloadedAddresses[address] = struct{}{} + } + e.Precompiles = precompiles +} + +type ContextPrecompiles[DB Database] struct { + Inner PrecompilesCow[DB] +} + +func DefaultContextPrecompiles[DB Database]() ContextPrecompiles[DB] { + return ContextPrecompiles[DB]{ + Inner: NewPrecompilesCow[DB](), + } +} +func NewPrecompilesCow[DB Database]() PrecompilesCow[DB] { + return PrecompilesCow[DB]{ + Owned: make(map[Address]ContextPrecompile[DB]), + } +} + +type PrecompilesCow[DB Database] struct { + IsStatic bool + StaticRef *Precompiles + Owned map[Address]ContextPrecompile[DB] +} +type Precompiles struct { + Inner map[Address]Precompile + Addresses map[Address]struct{} +} +type Precompile struct { + PrecompileType string // "Standard", "Env", "Stateful", or "StatefulMut" + Standard StandardPrecompileFn + Env EnvPrecompileFn + Stateful *StatefulPrecompileArc + StatefulMut *StatefulPrecompileBox +} +type StandardPrecompileFn func(input *Bytes, gasLimit uint64) PrecompileResult +type EnvPrecompileFn func(input *Bytes, gasLimit uint64, env *Env) PrecompileResult +type StatefulPrecompile interface { + Call(bytes *Bytes, gasLimit uint64, env *Env) PrecompileResult +} +type StatefulPrecompileMut interface { + CallMut(bytes *Bytes, gasLimit uint64, env *Env) PrecompileResult + Clone() StatefulPrecompileMut +} + +// Doubt +type StatefulPrecompileArc struct { + sync.RWMutex + Impl StatefulPrecompile +} + +// StatefulPrecompileBox is a mutable reference to a StatefulPrecompileMut +type StatefulPrecompileBox struct { + Impl StatefulPrecompileMut +} +type ContextPrecompile[DB Database] struct { + PrecompileType string // "Ordinary", "ContextStateful", or "ContextStatefulMut" + Ordinary *Precompile + ContextStateful *ContextStatefulPrecompileArc[DB] + ContextStatefulMut *ContextStatefulPrecompileBox[DB] +} + +// ContextStatefulPrecompileArc is a thread-safe reference to a ContextStatefulPrecompile +type ContextStatefulPrecompileArc[DB Database] struct { + sync.RWMutex + Impl ContextStatefulPrecompile[DB] +} + +// ContextStatefulPrecompileBox is a mutable reference to a ContextStatefulPrecompileMut +type ContextStatefulPrecompileBox[DB Database] struct { + Impl ContextStatefulPrecompileMut[DB] +} +type ContextStatefulPrecompile[DB Database] interface { + Call(bytes *Bytes, gasLimit uint64, evmCtx *InnerEvmContext[DB]) PrecompileResult +} + +// ContextStatefulPrecompileMut interface for mutable stateful precompiles with context +type ContextStatefulPrecompileMut[DB Database] interface { + CallMut(bytes *Bytes, gasLimit uint64, evmCtx *InnerEvmContext[DB]) PrecompileResult + Clone() ContextStatefulPrecompileMut[DB] +} +type PrecompileResult struct { + Output *PrecompileOutput + Err PrecompileErrorStruct //Doubt +} +type PrecompileOutput struct { + GasUsed uint64 + Bytes []byte +} +type PrecompileErrorStruct struct { + ErrorType string + Message string +} + +const ( + ErrorOutOfGas = "OutOfGas" + ErrorBlake2WrongLength = "Blake2WrongLength" + ErrorBlake2WrongFinalFlag = "Blake2WrongFinalIndicatorFlag" + ErrorModexpExpOverflow = "ModexpExpOverflow" + ErrorModexpBaseOverflow = "ModexpBaseOverflow" + ErrorModexpModOverflow = "ModexpModOverflow" + ErrorBn128FieldPointNotMember = "Bn128FieldPointNotAMember" + ErrorBn128AffineGFailedCreate = "Bn128AffineGFailedToCreate" + ErrorBn128PairLength = "Bn128PairLength" + ErrorBlobInvalidInputLength = "BlobInvalidInputLength" + ErrorBlobMismatchedVersion = "BlobMismatchedVersion" + ErrorBlobVerifyKzgProofFailed = "BlobVerifyKzgProofFailed" + ErrorOther = "Other" +) + +// Error implementation for PrecompileError +func (e PrecompileErrorStruct) Error() string { + if e.Message != "" { + return e.Message + } + return e.ErrorType +} diff --git a/execution/evm/precompiles_test.go b/execution/evm/precompiles_test.go new file mode 100644 index 0000000..1709216 --- /dev/null +++ b/execution/evm/precompiles_test.go @@ -0,0 +1,125 @@ +package evm + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type mockDatabase Database // Mock database implementation + +func TestSetPrecompiles(t *testing.T) { + // Initialize EvmContext + evmContext := &EvmContext[mockDatabase]{ + Precompiles: ContextPrecompiles[mockDatabase]{}, + Inner: InnerEvmContext[mockDatabase]{ + JournaledState: JournaledState{ + WarmPreloadedAddresses: make(map[Address]struct{}), // Initialize as a map + }, + }, + } + + // Prepare mock addresses + address1 := Address{Addr: [20]byte{0x1}} + address2 := Address{Addr: [20]byte{0x2}} + address3 := Address{Addr: [20]byte{0x3}} + + // Prepare mock precompiles + precompiles := ContextPrecompiles[mockDatabase]{ + Inner: PrecompilesCow[mockDatabase]{ + Owned: map[Address]ContextPrecompile[mockDatabase]{ + address1: { + PrecompileType: "Standard", + Ordinary: &Precompile{ + PrecompileType: "Standard", + }, + ContextStateful: nil, + ContextStatefulMut: nil, + }, + address2: { + PrecompileType: "Stateful", + Ordinary: &Precompile{ + PrecompileType: "Stateful", + }, + ContextStatefulMut: nil, + ContextStateful: nil, + }, + address3: { + PrecompileType: "Env", + Ordinary: &Precompile{PrecompileType: "Env"}, + ContextStateful: nil, + ContextStatefulMut: nil, + }, + }, + StaticRef: &Precompiles{ + Inner: map[Address]Precompile{ + address1: { + PrecompileType: "Standard", + Standard: nil, + Env: nil, + Stateful: nil, + StatefulMut: nil, + }, + address2: { + PrecompileType: "Stateful", + Standard: nil, + Env: nil, + Stateful: nil, + StatefulMut: nil, + }, + address3: { + PrecompileType: "Env", + Standard: nil, + Env: nil, + Stateful: nil, + StatefulMut: nil, + }, + }, + Addresses: map[Address]struct{}{ + address1: {}, + address2: {}, + address3: {}, + }, + }, + }, + } + + // Set the precompiles + evmContext.SetPrecompiles(precompiles) + + // Assert that precompiles were set correctly + assert.Equal(t, precompiles, evmContext.Precompiles, "Precompiles should be set correctly") + + // Populate WarmPreloadedAddresses for assertion + evmContext.Inner.JournaledState.WarmPreloadedAddresses[address1] = struct{}{} + evmContext.Inner.JournaledState.WarmPreloadedAddresses[address2] = struct{}{} + evmContext.Inner.JournaledState.WarmPreloadedAddresses[address3] = struct{}{} + + // Assert that the addresses in WarmPreloadedAddresses are correct + expectedAddresses := map[Address]struct{}{ + address1: {}, + address2: {}, + address3: {}, + } + assert.Equal(t, expectedAddresses, evmContext.Inner.JournaledState.WarmPreloadedAddresses, "WarmPreloadedAddresses should match the precompiled addresses") +} + +func TestDefaultContextPrecompiles(t *testing.T) { + defaultPrecompiles := DefaultContextPrecompiles[mockDatabase]() + + // Assert that the inner structure is initialized correctly + assert.NotNil(t, defaultPrecompiles.Inner, "DefaultContextPrecompiles should not have nil Inner") + + // Assert that Owned map is initialized + assert.NotNil(t, defaultPrecompiles.Inner.Owned, "Owned map should be initialized") + assert.Empty(t, defaultPrecompiles.Inner.Owned, "Owned map should be empty on default") +} + +func TestNewPrecompilesCow(t *testing.T) { + precompilesCow := NewPrecompilesCow[mockDatabase]() + + // Assert that the new structure is initialized correctly + assert.NotNil(t, precompilesCow, "NewPrecompilesCow should not be nil") + assert.NotNil(t, precompilesCow.Owned, "Owned map should be initialized") + assert.Empty(t, precompilesCow.Owned, "Owned map should be empty on creation") +} diff --git a/execution/evm/result.go b/execution/evm/result.go new file mode 100644 index 0000000..776f332 --- /dev/null +++ b/execution/evm/result.go @@ -0,0 +1,60 @@ +package evm +import "github.com/ethereum/go-ethereum/core/types" +type ResultAndState struct{ + Result ExecutionResult + State EvmState +} +type ExecutionResult struct { + Type string // "Success", "Revert", or "Halt" + Reason interface{} // SuccessReason or HaltReason + GasUsed uint64 + GasRefunded uint64 // Only for Success + Logs []types.Log // Only for Success + Output Output // Only for Success and Revert +} +type SuccessReason string +const ( + SStop SuccessReason = "Stop" + SReturn SuccessReason = "Return" + SSelfDestruct SuccessReason = "SelfDestruct" + SEofReturnContract SuccessReason = "EofReturnContract" +) +type Output struct { + Type string + Data []byte + Address *Address // Only for Create type +} +type HaltReason string +const ( + HOutOfGas HaltReason = "OutOfGas" + HOpcodeNotFound HaltReason = "OpcodeNotFound" + HInvalidFEOpcode HaltReason = "InvalidFEOpcode" + HInvalidJump HaltReason = "InvalidJump" + HNotActivated HaltReason = "NotActivated" + HStackUnderflow HaltReason = "StackUnderflow" + HStackOverflow HaltReason = "StackOverflow" + HOutOfOffset HaltReason = "OutOfOffset" + HCreateCollision HaltReason = "CreateCollision" + HPrecompileError HaltReason = "PrecompileError" + HNonceOverflow HaltReason = "NonceOverflow" + HCreateContractSizeLimit HaltReason = "CreateContractSizeLimit" + HCreateContractStartingWithEF HaltReason = "CreateContractStartingWithEF" + HCreateInitCodeSizeLimit HaltReason = "CreateInitCodeSizeLimit" + HOverflowPayment HaltReason = "OverflowPayment" + HStateChangeDuringStaticCall HaltReason = "StateChangeDuringStaticCall" + HCallNotAllowedInsideStatic HaltReason = "CallNotAllowedInsideStatic" + HOutOfFunds HaltReason = "OutOfFunds" + HCallTooDeep HaltReason = "CallTooDeep" + HEofAuxDataOverflow HaltReason = "EofAuxDataOverflow" + HEofAuxDataTooSmall HaltReason = "EofAuxDataTooSmall" + HEOFFunctionStackOverflow HaltReason = "EOFFunctionStackOverflow" +) +type EvmError struct { + Message string + Data []byte +} + +// Error implements the error interface +func (e EvmError) Error() string { + return e.Message +} diff --git a/execution/evm/serde_utils.go b/execution/evm/serde_utils.go new file mode 100644 index 0000000..d13eb01 --- /dev/null +++ b/execution/evm/serde_utils.go @@ -0,0 +1,379 @@ +package evm + +import ( + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "math" + "math/big" + "reflect" + "strconv" + "strings" +) + +func unmarshalJSON(data []byte, v interface{}) error { + var serialized map[string]interface{} + if err := json.Unmarshal(data, &serialized); err != nil { + return fmt.Errorf("error unmarshalling into map: %w", err) + } + + // Get the value of the struct using reflection + value := reflect.ValueOf(v).Elem() + typeOfValue := value.Type() + + for i := 0; i < value.NumField(); i++ { + field := value.Field(i) + fieldType := typeOfValue.Field(i) + jsonTag := fieldType.Tag.Get("json") + + // Remove ",omitempty" if present in json tag + if idx := strings.Index(jsonTag, ",omitempty"); idx != -1 { + jsonTag = jsonTag[:idx] + } + + // Convert the struct field name or its json tag to snake_case for matching + fieldName := jsonTag + if fieldName == "" { + fieldName = snakeCase(fieldType.Name) + } + + + + if rawValue, ok := serialized[fieldName]; ok { + + if err := setFieldValue(field, rawValue); err != nil { + return fmt.Errorf("error setting field %s: %w", fieldName, err) + } + } + } + return nil +} + +// snakeCase converts CamelCase strings to snake_case (e.g., "ParentRoot" to "parent_root"). +func snakeCase(input string) string { + var result []rune + for i, r := range input { + if i > 0 && r >= 'A' && r <= 'Z' { + result = append(result, '_') + } + result = append(result, r) + } + return strings.ToLower(string(result)) +} + +func setFieldValue(field reflect.Value, value interface{}) error { + + if field.Kind() == reflect.Ptr { + // If fie + if field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + + // Handle *big.Int specifically + if field.Type() == reflect.TypeOf(&big.Int{}) { + bi := new(big.Int) + strVal, ok := value.(string) + if !ok { + return fmt.Errorf("expected string value for big.Int") + } + + var success bool + if strings.HasPrefix(strVal, "0x") { + strVal = strVal[2:] + bi, success = bi.SetString(strVal, 16) + } else { + bi, success = bi.SetString(strVal, 10) + } + + if !success { + return fmt.Errorf("invalid string format for big.Int: %s", strVal) + } + + field.Set(reflect.ValueOf(bi)) + return nil + } + + // Handle *uint64 specifically + if field.Type().Elem().Kind() == reflect.Uint64 { + switch v := value.(type) { + case string: + var val uint64 + var err error + if strings.HasPrefix(v, "0x") { + val, err = strconv.ParseUint(v[2:], 16, 64) + } else { + val, err = strconv.ParseUint(v, 10, 64) + } + if err != nil { + return fmt.Errorf("failed to convert to uint64: %w", err) + } + ptr := new(uint64) + *ptr = val + field.Set(reflect.ValueOf(ptr)) + return nil + case float64: + ptr := new(uint64) + *ptr = uint64(v) + field.Set(reflect.ValueOf(ptr)) + return nil + } + return fmt.Errorf("unsupported type for *uint64") + } + + // For other pointer types, recurse on the element + return setFieldValue(field.Elem(), value) + } + + switch field.Kind() { + case reflect.Uint64, reflect.Uint16, reflect.Uint8: + var val uint64 + var err error + strVal := value.(string) + + // Handle hex string + if strings.HasPrefix(strVal, "0x") { + val, err = strconv.ParseUint(strVal[2:], 16, 64) + } else { + val, err = strconv.ParseUint(strVal, 10, 64) + } + + if err != nil { + return fmt.Errorf("failed to convert to uint: %w", err) + } + + // Check bounds based on field type + switch field.Kind() { + case reflect.Uint16: + if val > math.MaxUint16 { + return fmt.Errorf("value exceeds uint16 range") + } + case reflect.Uint8: + if val > math.MaxUint8 { + return fmt.Errorf("value exceeds uint8 range") + } + } + + field.SetUint(val) + + case reflect.Int: + strVal := value.(string) + var val int64 + var err error + + if strings.HasPrefix(strVal, "0x") { + val, err = strconv.ParseInt(strVal[2:], 16, 64) + } else { + val, err = strconv.ParseInt(strVal, 10, 64) + } + + if err != nil { + return fmt.Errorf("failed to convert to int: %w", err) + } + field.SetInt(val) + + case reflect.Struct: + switch field.Interface().(type) { + case Address: + strVal, ok := value.(string) + if !ok || !strings.HasPrefix(strVal, "0x") { + return fmt.Errorf("invalid address format") + } + + decoded, err := hex.DecodeString(strVal[2:]) + if err != nil { + return fmt.Errorf("failed to decode address: %w", err) + } + + var addr [20]byte + copy(addr[:], decoded) + field.Set(reflect.ValueOf(Address{Addr: addr})) + + default: + rawJSON, err := json.Marshal(value) + if err != nil { + return errors.New("error marshalling struct") + } + return json.Unmarshal(rawJSON, field.Addr().Interface()) + } + + case reflect.Slice: + if field.Type().Elem().Kind() == reflect.Uint8 { + strVal, ok := value.(string) + if !ok || !strings.HasPrefix(strVal, "0x") { + return fmt.Errorf("invalid hex string format") + } + + decoded, err := hex.DecodeString(strVal[2:]) + if err != nil { + return fmt.Errorf("failed to decode hex string: %w", err) + } + + field.SetBytes(decoded) + return nil + } + + if field.Type().Elem().Kind() == reflect.Uint16 { + sliceValue := reflect.ValueOf(value) + newSlice := reflect.MakeSlice(field.Type(), sliceValue.Len(), sliceValue.Len()) + for i := 0; i < sliceValue.Len(); i++ { + strVal := sliceValue.Index(i).Interface().(string) + var val uint64 + var err error + + if strings.HasPrefix(strVal, "0x") { + val, err = strconv.ParseUint(strVal[2:], 16, 16) + } else { + val, err = strconv.ParseUint(strVal, 10, 16) + } + + if err != nil { + return fmt.Errorf("failed to parse uint16 at index %d: %w", i, err) + } + newSlice.Index(i).SetUint(val) + } + field.Set(newSlice) + return nil + } + + // Handle slices of slices ([][]byte) + if field.Type().Elem().Kind() == reflect.Slice { + sliceValue := reflect.ValueOf(value) + if sliceValue.Kind() != reflect.Slice { + return fmt.Errorf("expected slice for 2D slice input") + } + + newSlice := reflect.MakeSlice(field.Type(), sliceValue.Len(), sliceValue.Len()) + + for i := 0; i < sliceValue.Len(); i++ { + err := setFieldValue(newSlice.Index(i), sliceValue.Index(i).Interface()) + if err != nil { + return fmt.Errorf("error setting slice element %d: %w", i, err) + } + } + + field.Set(newSlice) + return nil + } + + // For other slice types + rawJSON, err := json.Marshal(value) + if err != nil { + return fmt.Errorf("error marshalling slice: %w", err) + } + return json.Unmarshal(rawJSON, field.Addr().Interface()) + + case reflect.Bool: + switch v := value.(type) { + case string: + boolVal, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("failed to convert to bool: %w", err) + } + field.SetBool(boolVal) + case bool: + field.SetBool(v) + default: + return fmt.Errorf("unsupported type for bool conversion") + } + + default: + rawJSON, err := json.Marshal(value) + if err != nil { + return errors.New("error marshalling value") + } + return json.Unmarshal(rawJSON, field.Addr().Interface()) + } + return nil +} + + + +func marshalJSON(v interface{}) ([]byte, error) { + serialized := make(map[string]interface{}) + + value := reflect.ValueOf(v) + if value.Kind() == reflect.Ptr { + value = value.Elem() + } + typeOfValue := value.Type() + + for i := 0; i < value.NumField(); i++ { + field := value.Field(i) + fieldType := typeOfValue.Field(i) + jsonTag := fieldType.Tag.Get("json") + + fieldName := jsonTag + if fieldName == "" { + fieldName = snakeCase(fieldType.Name) + } + + val, err := getFieldValue(field) + if err != nil { + return nil, fmt.Errorf("error getting field %s value: %w", fieldName, err) + } + + // Skip nil values + if val == nil { + continue + } + + serialized[fieldName] = val + } + + // Marshal the map to JSON + return json.Marshal(serialized) +} + +func getFieldValue(field reflect.Value) (interface{}, error) { + if field.Kind() == reflect.Ptr && field.IsNil() { + return nil, nil + } + + if field.Kind() == reflect.Ptr { + field = field.Elem() + } + + switch field.Kind() { + case reflect.Uint64, reflect.Uint16, reflect.Uint8: + return fmt.Sprintf("0x%x", field.Uint()), nil + + case reflect.Int: + return fmt.Sprintf("0x%x", field.Int()), nil + + case reflect.Struct: + switch field.Interface().(type) { + case Address: + addr := field.Interface().(Address) + return fmt.Sprintf("0x%s", hex.EncodeToString(addr.Addr[:])), nil + } + + return marshalJSON(field.Interface()) + + case reflect.Slice: + if field.Type().Elem().Kind() == reflect.Uint8 { + bytes := field.Bytes() + if len(bytes) == 0 { + return "0x", nil + } + return fmt.Sprintf("0x%s", hex.EncodeToString(bytes)), nil + } + + sliceLen := field.Len() + result := make([]interface{}, sliceLen) + for i := 0; i < sliceLen; i++ { + val, err := getFieldValue(field.Index(i)) + if err != nil { + return nil, fmt.Errorf("error getting slice element %d: %w", i, err) + } + result[i] = val + } + return result, nil + + case reflect.Bool: + return field.Bool(), nil + + default: + return field.Interface(), nil + } +} diff --git a/execution/evm/spec_common_test.go b/execution/evm/spec_common_test.go new file mode 100644 index 0000000..a853959 --- /dev/null +++ b/execution/evm/spec_common_test.go @@ -0,0 +1,103 @@ +package evm + +import ( + "encoding/json" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" +) + +// Test the NewSpec function +func TestNewSpec(t *testing.T) { + specID := SpecId(42) + spec := NewSpec(specID) + + assert.Equal(t, specID, spec.SpecID(), "SpecID should match the initialized value") +} + +// Test the Enabled method in BaseSpec +func TestBaseSpec_Enabled(t *testing.T) { + spec1 := NewSpec(SpecId(50)) + spec2 := NewSpec(SpecId(30)) + + assert.True(t, spec1.Enabled(SpecId(30)), "Spec1 should be enabled for SpecId 30") + assert.False(t, spec2.Enabled(SpecId(50)), "Spec2 should not be enabled for SpecId 50") +} + +// Test TryFromUint8 with valid and invalid values +func TestTryFromUint8(t *testing.T) { + // Test valid values + validIDs := []uint8{0, 10, 19, uint8(LATEST)} // 0, 50, 100 and 255 (LATEST) should be valid + for _, id := range validIDs { + specID, ok := TryFromUint8(id) + assert.True(t, ok, "Expected valid SpecId for uint8 value %d", id) + assert.Equal(t, SpecId(id), specID, "Expected SpecId %d for uint8 value %d", id, id) + } + +} + +// Test the IsEnabledIn method +func TestIsEnabledIn(t *testing.T) { + spec1 := SpecId(50) + spec2 := SpecId(30) + + assert.True(t, spec1.IsEnabledIn(spec2), "Spec1 should be enabled in Spec2") + assert.False(t, spec2.IsEnabledIn(spec1), "Spec2 should not be enabled in Spec1") +} + +type optimismFieldsTestCase struct { + name string + jsonData string + expected OptimismFields +} + +// TestOptimismFieldsUnmarshal tests JSON unmarshalling for OptimismFields. +func TestOptimismFieldsUnmarshal(t *testing.T) { + // Define test cases + mintValue := uint64(1000) + isSystemTx := true + testCases := []optimismFieldsTestCase{ + { + name: "Full OptimismFields with all fields present", + jsonData: `{ + "source_hash": "0x0000000000000000000000000000000012345678000000000000000000000000", + "mint": 1000, + "is_system_transaction": true, + "enveloped_tx": "0x01020304" + }`, + expected: OptimismFields{ + SourceHash: (*B256)(decodeHexString(t, "0000000000000000000000000000000012345678000000000000000000000000", 32)), + Mint: &mintValue, + IsSystemTransaction: &isSystemTx, + EnvelopedTx: Bytes(decodeHexString(t, "01020304", 4)), + }, + }, + { + name: "OptimismFields with only EnvelopedTx field", + jsonData: `{ + "enveloped_tx": "0x0a0b0c0d" + }`, + expected: OptimismFields{ + EnvelopedTx: Bytes(decodeHexString(t, "0a0b0c0d", 4)), + }, + }, + // Additional test cases for other field combinations can be added here + } + + // Run each test case + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Unmarshal JSON data into OptimismFields + var optimismInstance OptimismFields + if err := json.Unmarshal([]byte(tc.jsonData), &optimismInstance); err != nil { + t.Fatalf("Failed to unmarshal OptimismFields: %v", err) + } + + // Compare the unmarshalled result with the expected result + if !reflect.DeepEqual(optimismInstance, tc.expected) { + t.Errorf("Test %s failed.\nExpected: %+v\nGot: %+v", tc.name, tc.expected, optimismInstance) + } + }) + } +} diff --git a/execution/evm/specs_common.go b/execution/evm/specs_common.go new file mode 100644 index 0000000..d6a65fa --- /dev/null +++ b/execution/evm/specs_common.go @@ -0,0 +1,37 @@ +package evm +func (s SpecId) IsEnabledIn(other SpecId) bool { + return s >= other +} +type Spec interface { + SpecID() SpecId + Enabled(specID SpecId) bool +} +type BaseSpec struct { + id SpecId +} +func (s BaseSpec) SpecID() SpecId { + return s.id +} + +func (s BaseSpec) Enabled(specID SpecId) bool { + return s.id >= specID +} +func NewSpec(id SpecId) BaseSpec { + return BaseSpec{id: id} +} +func TryFromUint8(specID uint8) (SpecId, bool) { + if specID > uint8(PRAGUE_EOF) && specID != uint8(LATEST) { + return 0, false + } + return SpecId(specID), true +} +type OptimismFields struct { + SourceHash *B256 `json:"source_hash,omitempty"` + Mint *uint64 `json:"mint,omitempty"` + IsSystemTransaction *bool `json:"is_system_transaction,omitempty"` + EnvelopedTx Bytes `json:"enveloped_tx,omitempty"` +} + +func (o *OptimismFields) UnmarshalJSON(data []byte) error { + return unmarshalJSON(data, o) +} \ No newline at end of file diff --git a/execution/evm/specs_default.go b/execution/evm/specs_default.go new file mode 100644 index 0000000..f928a12 --- /dev/null +++ b/execution/evm/specs_default.go @@ -0,0 +1,414 @@ +//go:build !optimism +// +build !optimism + +package evm + +import ( + "encoding/json" + "fmt" + "math/big" +) + +type TxEnv struct { + // Caller aka Author aka transaction signer + Caller Address `json:"caller"` + // The gas limit of the transaction + GasLimit uint64 `json:"gas_limit"` + // The gas price of the transaction + GasPrice *big.Int `json:"gas_price"` + // The destination of the transaction + TransactTo TxKind `json:"transact_to"` + // The value sent to TransactTo + Value *big.Int `json:"value"` + // The data of the transaction + Data Bytes `json:"data"` + // The nonce of the transaction + // If nil, nonce validation against the account's nonce is skipped + Nonce *uint64 `json:"nonce"` + // The chain ID of the transaction + // If nil, no checks are performed (EIP-155) + ChainID *uint64 `json:"chain_id"` + // List of addresses and storage keys that the transaction plans to access (EIP-2930) + AccessList []AccessListItem `json:"access_list"` + // The priority fee per gas (EIP-1559) + GasPriorityFee *big.Int `json:"gas_priority_fee,omitempty"` + // The list of blob versioned hashes (EIP-4844) + BlobHashes []B256 `json:"blob_hashes,omitempty"` + // The max fee per blob gas (EIP-4844) + MaxFeePerBlobGas *big.Int `json:"max_fee_per_blob_gas,omitempty"` + // List of authorizations for EOA account code (EIP-7702) + AuthorizationList *AuthorizationList `json:"authorization_list,omitempty"` +} + +func (tx *TxEnv) UnmarshalJSON(data []byte) error { + return unmarshalJSON(data, tx) +} + +const isOptimismEnabled = false + +// SpecId represents the specification IDs and their activation block. +type SpecId uint8 + +const ( + FRONTIER SpecId = 0 // Frontier 0 + FRONTIER_THAWING SpecId = 1 // Frontier Thawing 200000 + HOMESTEAD SpecId = 2 // Homestead 1150000 + DAO_FORK SpecId = 3 // DAO Fork 1920000 + TANGERINE SpecId = 4 // Tangerine Whistle 2463000 + SPURIOUS_DRAGON SpecId = 5 // Spurious Dragon 2675000 + BYZANTIUM SpecId = 6 // Byzantium 4370000 + CONSTANTINOPLE SpecId = 7 // Constantinople 7280000 is overwritten with PETERSBURG + PETERSBURG SpecId = 8 // Petersburg 7280000 + ISTANBUL SpecId = 9 // Istanbul 9069000 + MUIR_GLACIER SpecId = 10 // Muir Glacier 9200000 + BERLIN SpecId = 11 // Berlin 12244000 + LONDON SpecId = 12 // London 12965000 + ARROW_GLACIER SpecId = 13 // Arrow Glacier 13773000 + GRAY_GLACIER SpecId = 14 // Gray Glacier 15050000 + MERGE SpecId = 15 // Paris/Merge 15537394 (TTD: 58750000000000000000000) + SHANGHAI SpecId = 16 // Shanghai 17034870 (Timestamp: 1681338455) + CANCUN SpecId = 17 // Cancun 19426587 (Timestamp: 1710338135) + PRAGUE SpecId = 18 // Prague TBD + PRAGUE_EOF SpecId = 19 // Prague+EOF TBD + LATEST SpecId = 255 // LATEST = u8::MAX +) + +// String method to convert SpecId to string +func (s SpecId) String() string { + switch s { + case FRONTIER: + return "FRONTIER" + case FRONTIER_THAWING: + return "FRONTIER_THAWING" + case HOMESTEAD: + return "HOMESTEAD" + case DAO_FORK: + return "DAO_FORK" + case TANGERINE: + return "TANGERINE" + case SPURIOUS_DRAGON: + return "SPURIOUS_DRAGON" + case BYZANTIUM: + return "BYZANTIUM" + case CONSTANTINOPLE: + return "CONSTANTINOPLE" + case PETERSBURG: + return "PETERSBURG" + case ISTANBUL: + return "ISTANBUL" + case MUIR_GLACIER: + return "MUIR_GLACIER" + case BERLIN: + return "BERLIN" + case LONDON: + return "LONDON" + case ARROW_GLACIER: + return "ARROW_GLACIER" + case GRAY_GLACIER: + return "GRAY_GLACIER" + case MERGE: + return "MERGE" + case SHANGHAI: + return "SHANGHAI" + case CANCUN: + return "CANCUN" + case PRAGUE: + return "PRAGUE" + case PRAGUE_EOF: + return "PRAGUE_EOF" + case LATEST: + return "LATEST" + default: + return "UNKNOWN" + } +} +func DefaultSpecId() SpecId { + return LATEST +} + +type ( + FrontierSpec struct{ BaseSpec } + HomesteadSpec struct{ BaseSpec } + TangerineSpec struct{ BaseSpec } + SpuriousDragonSpec struct{ BaseSpec } + ByzantiumSpec struct{ BaseSpec } + PetersburgSpec struct{ BaseSpec } + IstanbulSpec struct{ BaseSpec } + BerlinSpec struct{ BaseSpec } + LondonSpec struct{ BaseSpec } + MergeSpec struct{ BaseSpec } + ShanghaiSpec struct{ BaseSpec } + CancunSpec struct{ BaseSpec } + PragueSpec struct{ BaseSpec } + PragueEofSpec struct{ BaseSpec } + LatestSpec struct{ BaseSpec } +) + +func SpecToGeneric(specID SpecId) Spec { + switch specID { + case FRONTIER, FRONTIER_THAWING: + return FrontierSpec{NewSpec(FRONTIER)} + case HOMESTEAD, DAO_FORK: + return HomesteadSpec{NewSpec(HOMESTEAD)} + case TANGERINE: + return TangerineSpec{NewSpec(TANGERINE)} + case SPURIOUS_DRAGON: + return SpuriousDragonSpec{NewSpec(SPURIOUS_DRAGON)} + case BYZANTIUM: + return ByzantiumSpec{NewSpec(BYZANTIUM)} + case PETERSBURG, CONSTANTINOPLE: + return PetersburgSpec{NewSpec(PETERSBURG)} + case ISTANBUL, MUIR_GLACIER: + return IstanbulSpec{NewSpec(ISTANBUL)} + case BERLIN: + return BerlinSpec{NewSpec(BERLIN)} + case LONDON, ARROW_GLACIER, GRAY_GLACIER: + return LondonSpec{NewSpec(LONDON)} + case MERGE: + return MergeSpec{NewSpec(MERGE)} + case SHANGHAI: + return ShanghaiSpec{NewSpec(SHANGHAI)} + case CANCUN: + return CancunSpec{NewSpec(CANCUN)} + case PRAGUE: + return PragueSpec{NewSpec(PRAGUE)} + case PRAGUE_EOF: + return PragueEofSpec{NewSpec(PRAGUE_EOF)} + default: + return LatestSpec{NewSpec(LATEST)} + } +} + +// MarshalJSON implements the json.Marshaler interface for SpecId. +// MarshalJSON for serialization +func (s SpecId) MarshalJSON() ([]byte, error) { + return json.Marshal(s.String()) +} + +var specNameToIdMap = generateSpecIdMap() + +// generateSpecIdMap creates the mapping of SpecId constants to their string representations +func generateSpecIdMap() map[string]SpecId { + m := make(map[string]SpecId) + // Loop through each valid SpecId value and use its String() representation + for i := SpecId(0); i <= 24; i++ { + m[i.String()] = i + } + // Add the "LATEST" constant separately + m[LATEST.String()] = LATEST + return m +} + +func (s *SpecId) UnmarshalJSON(data []byte) error { + var name string + if err := json.Unmarshal(data, &name); err != nil { + return err + } + + if specID, ok := specNameToIdMap[name]; ok { + *s = specID + return nil + } + return fmt.Errorf("unknown SpecId: %s", name) +} + +// UnmarshalJSON for deserialization + +// func SpecToGeneric(specID SpecId) Spec { +// switch specID { +// case FRONTIER, FRONTIER_THAWING: +// return FrontierSpec{NewSpec(FRONTIER)} +// case HOMESTEAD, DAO_FORK: +// return HomesteadSpec{NewSpec(HOMESTEAD)} +// case TANGERINE: +// return TangerineSpec{NewSpec(TANGERINE)} +// case SPURIOUS_DRAGON: +// return SpuriousDragonSpec{NewSpec(SPURIOUS_DRAGON)} +// case BYZANTIUM: +// return ByzantiumSpec{NewSpec(BYZANTIUM)} +// case PETERSBURG, CONSTANTINOPLE: +// return PetersburgSpec{NewSpec(PETERSBURG)} +// case ISTANBUL, MUIR_GLACIER: +// return IstanbulSpec{NewSpec(ISTANBUL)} +// case BERLIN: +// return BerlinSpec{NewSpec(BERLIN)} +// case LONDON, ARROW_GLACIER, GRAY_GLACIER: +// return LondonSpec{NewSpec(LONDON)} +// case MERGE: +// return MergeSpec{NewSpec(MERGE)} +// case SHANGHAI: +// return ShanghaiSpec{NewSpec(SHANGHAI)} +// case CANCUN: +// return CancunSpec{NewSpec(CANCUN)} +// case PRAGUE: +// return PragueSpec{NewSpec(PRAGUE)} +// case PRAGUE_EOF: +// return PragueEofSpec{NewSpec(PRAGUE_EOF)} +// default: +// return LatestSpec{NewSpec(LATEST)} +// } +// } +/* + func specToGeneric[H Host, EXT any, DB Database]( + specID SpecId, + h *EvmHandler[H, EXT, DB], + isOptimism bool, + ) (*EvmHandler[H, EXT, DB], error) { + switch specID { + case FRONTIER, FRONTIER_THAWING: + return createSpecHandler[H, EXT, DB](h, "frontier", isOptimism) + case HOMESTEAD, DAO_FORK: + return createSpecHandler[H, EXT, DB](h, "homestead", isOptimism) + case TANGERINE: + return createSpecHandler[H, EXT, DB](h, "tangerine", isOptimism) + case SPURIOUS_DRAGON: + return createSpecHandler[H, EXT, DB](h, "spurious_dragon", isOptimism) + case BYZANTIUM: + return createSpecHandler[H, EXT, DB](h, "byzantium", isOptimism) + case PETERSBURG, CONSTANTINOPLE: + return createSpecHandler[H, EXT, DB](h, "petersburg", isOptimism) + case ISTANBUL, MUIR_GLACIER: + return createSpecHandler[H, EXT, DB](h, "istanbul", isOptimism) + case BERLIN: + return createSpecHandler[H, EXT, DB](h, "berlin", isOptimism) + case LONDON, ARROW_GLACIER, GRAY_GLACIER: + return createSpecHandler[H, EXT, DB](h, "london", isOptimism) + case MERGE: + return createSpecHandler[H, EXT, DB](h, "merge", isOptimism) + case SHANGHAI: + return createSpecHandler[H, EXT, DB](h, "shanghai", isOptimism) + case CANCUN: + return createSpecHandler[H, EXT, DB](h, "cancun", isOptimism) + case PRAGUE: + return createSpecHandler[H, EXT, DB](h, "prague", isOptimism) + case PRAGUE_EOF: + return createSpecHandler[H, EXT, DB](h, "prague_eof", isOptimism) + case LATEST: + return createSpecHandler[H, EXT, DB](h, "latest", isOptimism) + } + + // Optimism-specific specs + /* + if isOptimism { + switch specID { + case BEDROCK: + return createSpecHandler[H, EXT, DB](h, "bedrock", true) + case REGOLITH: + return createSpecHandler[H, EXT, DB](h, "regolith", true) + case CANYON: + return createSpecHandler[H, EXT, DB](h, "canyon", true) + case ECOTONE: + return createSpecHandler[H, EXT, DB](h, "ecotone", true) + case FJORD: + return createSpecHandler[H, EXT, DB](h, "fjord", true) + } + }*/ + +//return nil, fmt.Errorf("unsupported spec ID: %d", specID) +//}*/ + +/* +// Spec interface defining the behavior of Ethereum specs. +type Spec interface { + GetSpecID() SpecId +} + +// CreateSpec creates a new specification struct based on the provided SpecId. +func CreateSpec(specId SpecId) Spec { + switch specId { + case FRONTIER: + return &FrontierSpec{} + case FRONTIER_THAWING: + // No changes for EVM spec + return nil + case HOMESTEAD: + return &HomesteadSpec{} + case DAO_FORK: + // No changes for EVM spec + return nil + case TANGERINE: + return &TangerineSpec{} + case SPURIOUS_DRAGON: + return &SpuriousDragonSpec{} + case BYZANTIUM: + return &ByzantiumSpec{} + case PETERSBURG: + return &PetersburgSpec{} + case ISTANBUL: + return &IstanbulSpec{} + case MUIR_GLACIER: + // No changes for EVM spec + return nil + case BERLIN: + return &BerlinSpec{} + case LONDON: + return &LondonSpec{} + case ARROW_GLACIER: + // No changes for EVM spec + return nil + case GRAY_GLACIER: + // No changes for EVM spec + return nil + case MERGE: + return &MergeSpec{} + case SHANGHAI: + return &ShanghaiSpec{} + case CANCUN: + return &CancunSpec{} + case PRAGUE: + return &PragueSpec{} + case PRAGUE_EOF: + return &PragueEofSpec{} + case LATEST: + return &LatestSpec{} + default: + return nil + } +} + +// Spec structures +type FrontierSpec struct{} +func (s *FrontierSpec) GetSpecID() SpecId { return FRONTIER } + +type HomesteadSpec struct{} +func (s *HomesteadSpec) GetSpecID() SpecId { return HOMESTEAD } + +type TangerineSpec struct{} +func (s *TangerineSpec) GetSpecID() SpecId { return TANGERINE } + +type SpuriousDragonSpec struct{} +func (s *SpuriousDragonSpec) GetSpecID() SpecId { return SPURIOUS_DRAGON } + +type ByzantiumSpec struct{} +func (s *ByzantiumSpec) GetSpecID() SpecId { return BYZANTIUM } + +type PetersburgSpec struct{} +func (s *PetersburgSpec) GetSpecID() SpecId { return PETERSBURG } + +type IstanbulSpec struct{} +func (s *IstanbulSpec) GetSpecID() SpecId { return ISTANBUL } + +type BerlinSpec struct{} +func (s *BerlinSpec) GetSpecID() SpecId { return BERLIN } + +type LondonSpec struct{} +func (s *LondonSpec) GetSpecID() SpecId { return LONDON } + +type MergeSpec struct{} +func (s *MergeSpec) GetSpecID() SpecId { return MERGE } + +type ShanghaiSpec struct{} +func (s *ShanghaiSpec) GetSpecID() SpecId { return SHANGHAI } + +type CancunSpec struct{} +func (s *CancunSpec) GetSpecID() SpecId { return CANCUN } + +type PragueSpec struct{} +func (s *PragueSpec) GetSpecID() SpecId { return PRAGUE } + +type PragueEofSpec struct{} +func (s *PragueEofSpec) GetSpecID() SpecId { return PRAGUE_EOF } + +type LatestSpec struct{} +func (s *LatestSpec) GetSpecID() SpecId { return LATEST } +*/ diff --git a/execution/evm/specs_default_test.go b/execution/evm/specs_default_test.go new file mode 100644 index 0000000..c527b81 --- /dev/null +++ b/execution/evm/specs_default_test.go @@ -0,0 +1,109 @@ +package evm + +import ( + "encoding/json" + "testing" + + +) + +const ( + TxTypeCreate = iota +) + + + + +func TestSpecIdJSONMarshalling(t *testing.T) { + tests := []struct { + specID SpecId + expected string + }{ + {FRONTIER, `"FRONTIER"`}, + {HOMESTEAD, `"HOMESTEAD"`}, + {TANGERINE, `"TANGERINE"`}, + {SPURIOUS_DRAGON, `"SPURIOUS_DRAGON"`}, + {BYZANTIUM, `"BYZANTIUM"`}, + {PETERSBURG, `"PETERSBURG"`}, + {ISTANBUL, `"ISTANBUL"`}, + {MERGE, `"MERGE"`}, + {LATEST, `"LATEST"`}, + } + + for _, tt := range tests { + t.Run(tt.specID.String(), func(t *testing.T) { + data, err := json.Marshal(tt.specID) + if err != nil { + t.Fatalf("failed to marshal SpecId %d: %v", tt.specID, err) + } + + if string(data) != tt.expected { + t.Fatalf("expected %s, got %s", tt.expected, string(data)) + } + }) + } +} + +func TestSpecIdJSONUnmarshalling(t *testing.T) { + tests := []struct { + input string + expected SpecId + }{ + {`"FRONTIER"`, FRONTIER}, + {`"HOMESTEAD"`, HOMESTEAD}, + {`"TANGERINE"`, TANGERINE}, + {`"SPURIOUS_DRAGON"`, SPURIOUS_DRAGON}, + {`"BYZANTIUM"`, BYZANTIUM}, + {`"PETERSBURG"`, PETERSBURG}, + {`"ISTANBUL"`, ISTANBUL}, + {`"MERGE"`, MERGE}, + {`"LATEST"`, LATEST}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + var specID SpecId + if err := json.Unmarshal([]byte(tt.input), &specID); err != nil { + t.Fatalf("failed to unmarshal SpecId from %s: %v", tt.input, err) + } + + if specID != tt.expected { + t.Fatalf("expected %d, got %d", tt.expected, specID) + } + }) + } + + t.Run("invalid input", func(t *testing.T) { + var specID SpecId + err := json.Unmarshal([]byte(`"INVALID"`), &specID) + if err == nil { + t.Fatalf("expected an error, got nil") + } + }) +} + +func TestSpecToGenericDefault(t *testing.T) { + tests := []struct { + specID SpecId + expected string + }{ + {FRONTIER, "FRONTIER"}, + {HOMESTEAD, "HOMESTEAD"}, + {TANGERINE, "TANGERINE"}, + {SPURIOUS_DRAGON, "SPURIOUS_DRAGON"}, + {BYZANTIUM, "BYZANTIUM"}, + {PETERSBURG, "PETERSBURG"}, + {ISTANBUL, "ISTANBUL"}, + {MERGE, "MERGE"}, + {LATEST, "LATEST"}, + } + + for _, tt := range tests { + t.Run(tt.expected, func(t *testing.T) { + spec := SpecToGeneric(tt.specID) + if spec.SpecID().String() != tt.expected { + t.Fatalf("expected %s, got %s", tt.expected, spec.SpecID().String()) + } + }) + } +} diff --git a/execution/evm/specs_optimism.go b/execution/evm/specs_optimism.go new file mode 100644 index 0000000..2172c0f --- /dev/null +++ b/execution/evm/specs_optimism.go @@ -0,0 +1,255 @@ +//go:build optimism +// +build optimism + +package evm + +const isOptimismEnabled = true +import ( + "encoding/json" +) +type TxEnv struct { + // Caller aka Author aka transaction signer + Caller Address `json:"caller"` + // The gas limit of the transaction + GasLimit uint64 `json:"gas_limit"` + // The gas price of the transaction + GasPrice *big.Int `json:"gas_price"` + // The destination of the transaction + TransactTo TxKind `json:"transact_to"` + // The value sent to TransactTo + Value *big.Int `json:"value"` + // The data of the transaction + Data Bytes `json:"data"` + // The nonce of the transaction + // If nil, nonce validation against the account's nonce is skipped + Nonce *uint64 `json:"nonce,omitempty"` + // The chain ID of the transaction + // If nil, no checks are performed (EIP-155) + ChainID *uint64 `json:"chain_id,omitempty"` + // List of addresses and storage keys that the transaction plans to access (EIP-2930) + AccessList []AccessListItem `json:"access_list"` + // The priority fee per gas (EIP-1559) + GasPriorityFee *big.Int `json:"gas_priority_fee"` + // The list of blob versioned hashes (EIP-4844) + BlobHashes []B256 `json:"blob_hashes"` + // The max fee per blob gas (EIP-4844) + MaxFeePerBlobGas *big.Int `json:"max_fee_per_blob_gas"` + // List of authorizations for EOA account code (EIP-7702) + AuthorizationList *AuthorizationList `json:"authorization_list"` + // Optimism fields (only included when build tag is set) + Optimism OptimismFields `json:"optimism"` +} + +func (tx *TxEnv) UnmarshalJSON(data []byte) error { + return unmarshalJSON(data , tx) +} + +func TryFromUint8(specID uint8) (SpecId, bool) { + if specID > uint8(PRAGUE_EOF) && specID != uint8(LATEST) { + return 0, false + } + return SpecId(specID), true +} +type SpecId uint8 + +const ( + FRONTIER SpecId = 0 // Frontier + FRONTIER_THAWING SpecId = 1 // Frontier Thawing + HOMESTEAD SpecId = 2 // Homestead + DAO_FORK SpecId = 3 // DAO Fork + TANGERINE SpecId = 4 // Tangerine Whistle + SPURIOUS_DRAGON SpecId = 5 // Spurious Dragon + BYZANTIUM SpecId = 6 // Byzantium + CONSTANTINOPLE SpecId = 7 // Constantinople + PETERSBURG SpecId = 8 // Petersburg + ISTANBUL SpecId = 9 // Istanbul + MUIR_GLACIER SpecId = 10 // Muir Glacier + BERLIN SpecId = 11 // Berlin + LONDON SpecId = 12 // London + ARROW_GLACIER SpecId = 13 // Arrow Glacier + GRAY_GLACIER SpecId = 14 // Gray Glacier + MERGE SpecId = 15 // Paris/Merge + BEDROCK SpecId = 16 // Bedrock + REGOLITH SpecId = 17 // Regolith + SHANGHAI SpecId = 18 // Shanghai + CANYON SpecId = 19 // Canyon + CANCUN SpecId = 20 // Cancun + ECOTONE SpecId = 21 // Ecotone + FJORD SpecId = 22 // Fjord + PRAGUE SpecId = 23 // Prague + PRAGUE_EOF SpecId = 24 // Prague+EOF + LATEST SpecId = 255 // LATEST = u8::MAX +) +var specNameToIdMap = generateSpecIdMap() + +// generateSpecIdMap creates the mapping of SpecId constants to their string representations +func generateSpecIdMap() map[string]SpecId { + m := make(map[string]SpecId) + // Loop through each valid SpecId value and use its String() representation + for i := SpecId(0); i <= 24; i++ { + m[i.String()] = i + } + // Add the "LATEST" constant separately + m[LATEST.String()] = LATEST + return m +} + + + +// Default value for SpecId +func DefaultSpecId() SpecId { + return LATEST +} + +// String method to convert SpecId to string +func (s SpecId) String() string { + switch s { + case FRONTIER: + return "FRONTIER" + case FRONTIER_THAWING: + return "FRONTIER_THAWING" + case HOMESTEAD: + return "HOMESTEAD" + case DAO_FORK: + return "DAO_FORK" + case TANGERINE: + return "TANGERINE" + case SPURIOUS_DRAGON: + return "SPURIOUS_DRAGON" + case BYZANTIUM: + return "BYZANTIUM" + case CONSTANTINOPLE: + return "CONSTANTINOPLE" + case PETERSBURG: + return "PETERSBURG" + case ISTANBUL: + return "ISTANBUL" + case MUIR_GLACIER: + return "MUIR_GLACIER" + case BERLIN: + return "BERLIN" + case LONDON: + return "LONDON" + case ARROW_GLACIER: + return "ARROW_GLACIER" + case GRAY_GLACIER: + return "GRAY_GLACIER" + case MERGE: + return "MERGE" + case BEDROCK: + return "BEDROCK" + case REGOLITH: + return "REGOLITH" + case SHANGHAI: + return "SHANGHAI" + case CANYON: + return "CANYON" + case CANCUN: + return "CANCUN" + case ECOTONE: + return "ECOTONE" + case FJORD: + return "FJORD" + case PRAGUE: + return "PRAGUE" + case PRAGUE_EOF: + return "PRAGUE_EOF" + case LATEST: + return "LATEST" + } +} +type ( + FrontierSpec struct{ BaseSpec } + FrontierThawingSpec struct{ BaseSpec } + HomesteadSpec struct{ BaseSpec } + DaoForkSpec struct{ BaseSpec } + TangerineSpec struct{ BaseSpec } + SpuriousDragonSpec struct{ BaseSpec } + ByzantiumSpec struct{ BaseSpec } + ConstantinopleSpec struct{ BaseSpec } + PetersburgSpec struct{ BaseSpec } + IstanbulSpec struct{ BaseSpec } + MuirGlacierSpec struct{ BaseSpec } + BerlinSpec struct{ BaseSpec } + LondonSpec struct{ BaseSpec } + ArrowGlacierSpec struct{ BaseSpec } + GrayGlacierSpec struct{ BaseSpec } + MergeSpec struct{ BaseSpec } + BedrockSpec struct{ BaseSpec } + RegolithSpec struct{ BaseSpec } + ShanghaiSpec struct{ BaseSpec } + CanyonSpec struct{ BaseSpec } + CancunSpec struct{ BaseSpec } + EcotoneSpec struct{ BaseSpec } + FjordSpec struct{ BaseSpec } + PragueSpec struct{ BaseSpec } + PragueEofSpec struct{ BaseSpec } + LatestSpec struct{ BaseSpec } +) +// MarshalJSON for serialization +func (s SpecId) MarshalJSON() ([]byte, error) { + return json.Marshal(s.String()) +} +// SpecToGeneric converts a SpecId to a generic Spec +func SpecToGeneric(specID SpecId) interface{} { + switch specID { + case FRONTIER, FRONTIER_THAWING: + return FrontierSpec{NewSpec(FRONTIER)} + case HOMESTEAD, DAO_FORK: + return HomesteadSpec{NewSpec(HOMESTEAD)} + case TANGERINE: + return TangerineSpec{NewSpec(TANGERINE)} + case SPURIOUS_DRAGON: + return SpuriousDragonSpec{NewSpec(SPURIOUS_DRAGON)} + case BYZANTIUM: + return ByzantiumSpec{NewSpec(BYZANTIUM)} + case PETERSBURG, CONSTANTINOPLE: + return PetersburgSpec{NewSpec(PETERSBURG)} + case ISTANBUL, MUIR_GLACIER: + return IstanbulSpec{NewSpec(ISTANBUL)} + case BERLIN: + return BerlinSpec{NewSpec(BERLIN)} + case LONDON, ARROW_GLACIER, GRAY_GLACIER: + return LondonSpec{NewSpec(LONDON)} + case MERGE: + return MergeSpec{NewSpec(MERGE)} + case SHANGHAI: + return ShanghaiSpec{NewSpec(SHANGHAI)} + case CANCUN: + return CancunSpec{NewSpec(CANCUN)} + case PRAGUE: + return PragueSpec{NewSpec(PRAGUE)} + case PRAGUE_EOF: + return PragueEofSpec{NewSpec(PRAGUE_EOF)} + case BEDROCK: + return BedrockSpec{NewSpec(BEDROCK)} + case REGOLITH: + return RegolithSpec{NewSpec(REGOLITH)} + case CANYON: + return CanyonSpec{NewSpec(CANYON)} + case ECOTONE: + return EcotoneSpec{NewSpec(ECOTONE)} + case FJORD: + return FjordSpec{NewSpec(FJORD)} + case LATEST: + return LatestSpec{NewSpec(LATEST)} + default: + return nil + } +} + +func (s *SpecId) UnmarshalJSON(data []byte) error { + var name string + if err := json.Unmarshal(data, &name); err != nil { + return err + } + + + + if specID, ok := specNameToIdMap[name]; ok { + + s = specID + return nil + } + return fmt.Errorf("unknown SpecId: %s", name) +} diff --git a/execution/evm/specs_optimism_test.go b/execution/evm/specs_optimism_test.go new file mode 100644 index 0000000..60bb4ef --- /dev/null +++ b/execution/evm/specs_optimism_test.go @@ -0,0 +1,378 @@ +package evm + +import ( + "encoding/json" + "fmt" + "math/big" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSpecIdIsEnabledIn(t *testing.T) { + tests := []struct { + name string + specId SpecId + other SpecId + want bool + }{ + {"FRONTIER >= FRONTIER", FRONTIER, FRONTIER, true}, + {"HOMESTEAD >= FRONTIER", HOMESTEAD, FRONTIER, true}, + {"FRONTIER < HOMESTEAD", FRONTIER, HOMESTEAD, false}, + {"LATEST >= FRONTIER", LATEST, FRONTIER, true}, + {"FRONTIER < LATEST", FRONTIER, LATEST, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.specId.IsEnabledIn(tt.other)) + }) + } +} + +func TestTryFromUint8Optimism(t *testing.T) { + tests := []struct { + specID uint8 + want SpecId + ok bool + }{ + {0, FRONTIER, true}, + {1, FRONTIER_THAWING, true}, + {19, PRAGUE_EOF, true}, + {20, 0, false}, + {255, LATEST, true}, + } + + for _, tt := range tests { + got, ok := TryFromUint8(tt.specID) + assert.Equal(t, tt.ok, ok) + assert.Equal(t, tt.want, got) + } + +} + +func TestSpecToGeneric(t *testing.T) { + tests := []struct { + specID SpecId + want string + wantType string + }{ + {FRONTIER, "FRONTIER", "evm.FrontierSpec"}, + {HOMESTEAD, "HOMESTEAD", "evm.HomesteadSpec"}, + {TANGERINE, "TANGERINE", "evm.TangerineSpec"}, + {SPURIOUS_DRAGON, "SPURIOUS_DRAGON", "evm.SpuriousDragonSpec"}, + {BYZANTIUM, "BYZANTIUM", "evm.ByzantiumSpec"}, + {PETERSBURG, "PETERSBURG", "evm.PetersburgSpec"}, + {ISTANBUL, "ISTANBUL", "evm.IstanbulSpec"}, + {BERLIN, "BERLIN", "evm.BerlinSpec"}, + {LONDON, "LONDON", "evm.LondonSpec"}, + {MERGE, "MERGE", "evm.MergeSpec"}, + {SHANGHAI, "SHANGHAI", "evm.ShanghaiSpec"}, + {CANCUN, "CANCUN", "evm.CancunSpec"}, + {PRAGUE, "PRAGUE", "evm.PragueSpec"}, + {PRAGUE_EOF, "PRAGUE_EOF", "evm.PragueEofSpec"}, + {LATEST, "LATEST", "evm.LatestSpec"}, + } + + for _, tt := range tests { + t.Run(tt.want, func(t *testing.T) { + spec := SpecToGeneric(tt.specID) + + // Check if the type matches + if typeName := fmt.Sprintf("%T", spec); typeName != tt.wantType { + t.Errorf("expected type %s, got %s", tt.wantType, typeName) + } + + // Type assertion for SpecID method + switch specTyped := spec.(type) { + case interface{ SpecID() SpecId }: + if got := specTyped.SpecID().String(); got != tt.want { + t.Errorf("expected %s, got %s", tt.want, got) + } + default: + t.Errorf("spec does not implement SpecID method for specID %d", tt.specID) + } + }) + } +} + +func TestSpecIdMarshalJSON(t *testing.T) { + tests := []struct { + specId SpecId + want string + }{ + {FRONTIER, `"FRONTIER"`}, + {HOMESTEAD, `"HOMESTEAD"`}, + {LATEST, `"LATEST"`}, + } + + for _, tt := range tests { + t.Run(tt.want, func(t *testing.T) { + data, err := json.Marshal(tt.specId) + assert.NoError(t, err) + assert.JSONEq(t, tt.want, string(data)) + }) + } +} + +func TestSpecIdUnmarshalJSON(t *testing.T) { + tests := []struct { + name string + input string + want SpecId + wantErr bool + }{ + {"Valid FRONTIER", `"FRONTIER"`, FRONTIER, false}, + {"Valid LATEST", `"LATEST"`, LATEST, false}, + {"Invalid SpecId", `"INVALID"`, 0, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var specId SpecId + err := json.Unmarshal([]byte(tt.input), &specId) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.want, specId) + } + }) + } +} +func TestTxEnvUnmarshalJSON(t *testing.T) { + tests := []struct { + name string + input string + want TxEnv + wantErr bool + }{ + { + name: "basic transaction", + input: `{ + "caller": "0x1234567890123456789012345678901234567890", + "gas_limit": "0x5208", + "gas_price": "0x4a817c800", + "transact_to": {"type": "1", "address": "0x2345678901234567890123456789012345678901"}, + "value": "0xde0b6b3a7640000", + "data": "0x", + "access_list": [] + }`, + want: TxEnv{ + Caller: Address{Addr: [20]byte{0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90}}, + GasLimit: 21000, + GasPrice: big.NewInt(20000000000), + TransactTo: TxKind{ + Type: Call2, + Address: &Address{Addr: [20]byte{0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01}}, + }, + Value: big.NewInt(1000000000000000000), + Data: []byte{}, + // ChainID: pointer(uint64(1)), + AccessList: []AccessListItem{}, + }, + wantErr: false, + }, + { + name: "full transaction with all fields", + input: `{ + "caller": "0x1234567890123456789012345678901234567890", + "gas_limit": "0x5208", + "gas_price": "0x4a817c800", + "transact_to": {"type": "0"}, + "value": "0x0", + "data": "0x1234", + + "gas_priority_fee": "0x1234", + "blob_hashes": [ + "0x1234567890123456789012345678901234567890123456789012345678901234" + ], + "max_fee_per_blob_gas": "0x5678", + "authorization_list": {}, + "access_list": [ + { + "address": "0x3456789012345678901234567890123456789012", + "storage_keys": [ + "0x1234567890123456789012345678901234567890123456789012345678901234" + ] + } + ] + }`, + want: TxEnv{ + Caller: Address{Addr: [20]byte{0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90}}, + GasLimit: 21000, + GasPrice: big.NewInt(20000000000), + TransactTo: TxKind{ + Type: Create2, + }, + Value: big.NewInt(0), + Data: []byte{0x12, 0x34}, + // Nonce: pointer(uint64(1)), + // ChainID: pointer(uint64(1)), + + GasPriorityFee: big.NewInt(4660), + BlobHashes: []B256{{0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34}}, + MaxFeePerBlobGas: big.NewInt(22136), + AuthorizationList: &AuthorizationList{}, + AccessList: []AccessListItem{ + { + Address: Address{Addr: [20]byte{0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12}}, + StorageKeys: []B256{ + {0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34}, + }, + }, + }, + }, + wantErr: false, + }, + { + name: "invalid caller address", + input: `{ + "caller": "invalid", + "gas_limit": "0x5208", + "gas_price": "0x4a817c800", + "transact_to": {"type": "1", "address": "0x2345678901234567890123456789012345678901"}, + "value": "0x0", + "data": "0x", + "access_list": [] + }`, + wantErr: true, + }, + { + name: "invalid gas limit", + input: `{ + "caller": "0x1234567890123456789012345678901234567890", + "gas_limit": "invalid", + "gas_price": "0x4a817c800", + "transact_to": {"type": "1", "address": "0x2345678901234567890123456789012345678901"}, + "value": "0x0", + "data": "0x", + "access_list": [] + }`, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got TxEnv + err := json.Unmarshal([]byte(tt.input), &got) + + if (err != nil) != tt.wantErr { + t.Errorf("TxEnv.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr { + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("TxEnv.UnmarshalJSON() = %v, want %v", got, tt.want) + } + } + }) + } +} + + +func TestTxEnvUnmarshalJSONEdgeCases(t *testing.T) { + tests := []struct { + name string + input string + want TxEnv + wantErr bool + }{ + { + name: "empty access list", + input: `{ + "caller": "0x1234567890123456789012345678901234567890", + "gas_limit": "0x5208", + "gas_price": "0x4a817c800", + "transact_to": {"type": "1", "address": "0x2345678901234567890123456789012345678901"}, + "value": "0x0", + "data": "0x", + "access_list": [] + }`, + want: TxEnv{ + Caller: Address{Addr: [20]byte{0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90}}, + GasLimit: 21000, + GasPrice: big.NewInt(20000000000), + TransactTo: TxKind{ + Type: Call2, + Address: &Address{Addr: [20]byte{0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01}}, + }, + Value: big.NewInt(0), + Data: []byte{}, + AccessList: []AccessListItem{}, + }, + wantErr: false, + }, + { + name: "zero values", + input: `{ + "caller": "0x0000000000000000000000000000000000000000", + "gas_limit": "0x0", + "gas_price": "0x0", + "transact_to": {"type": "1", "address": "0x0000000000000000000000000000000000000000"}, + "value": "0x0", + "data": "0x", + "access_list": [] + }`, + want: TxEnv{ + Caller: Address{}, + GasLimit: 0, + GasPrice: big.NewInt(0), + TransactTo: TxKind{ + Type: Call2, + Address: &Address{}, + }, + Value: big.NewInt(0), + Data: []byte{}, + AccessList: []AccessListItem{}, + }, + wantErr: false, + }, + { + name: "very large numbers", + input: `{ + "caller": "0x1234567890123456789012345678901234567890", + "gas_limit": "0xffffffffffffffff", + "gas_price": "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "transact_to": {"type": "1", "address": "0x2345678901234567890123456789012345678901"}, + "value": "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "data": "0x", + "access_list": [] + }`, + want: TxEnv{ + Caller: Address{Addr: [20]byte{0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90}}, + GasLimit: ^uint64(0), + GasPrice: new(big.Int).Sub(new(big.Int).Lsh(big.NewInt(1), 256), big.NewInt(1)), + TransactTo: TxKind{ + Type: Call2, + Address: &Address{Addr: [20]byte{0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01, 0x23, 0x45, 0x67, 0x89, 0x01}}, + }, + Value: new(big.Int).Sub(new(big.Int).Lsh(big.NewInt(1), 256), big.NewInt(1)), + Data: []byte{}, + AccessList: []AccessListItem{}, + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got TxEnv + err := json.Unmarshal([]byte(tt.input), &got) + + if (err != nil) != tt.wantErr { + t.Errorf("TxEnv.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr { + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("TxEnv.UnmarshalJSON() = %v, want %v", got, tt.want) + } + } + }) + } +} diff --git a/execution/evm/tables_test.go b/execution/evm/tables_test.go new file mode 100644 index 0000000..a42f9e5 --- /dev/null +++ b/execution/evm/tables_test.go @@ -0,0 +1,48 @@ +package evm + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// TestNewPlainInstructionTable verifies that NewPlainInstructionTable initializes correctly. +func TestNewPlainInstructionTable(t *testing.T) { + // Define a sample InstructionTable + var sampleTable InstructionTable[any] + + // Initialize with NewPlainInstructionTable + instructionTables := NewPlainInstructionTable(sampleTable) + + // Ensure PlainTable is set and BoxedTable is nil + assert.NotNil(t, instructionTables.PlainTable) + assert.Nil(t, instructionTables.BoxedTable) + assert.Equal(t, PlainTableMode, instructionTables.Mode) +} + +// TestNewBoxedInstructionTable verifies that NewBoxedInstructionTable initializes correctly. +func TestNewBoxedInstructionTable(t *testing.T) { + // Define a sample BoxedInstructionTable + var sampleBoxedTable BoxedInstructionTable[any] + + // Initialize with NewBoxedInstructionTable + instructionTables := NewBoxedInstructionTable(sampleBoxedTable) + + // Ensure BoxedTable is set and PlainTable is nil + assert.NotNil(t, instructionTables.BoxedTable) + assert.Nil(t, instructionTables.PlainTable) + assert.Equal(t, BoxedTableMode, instructionTables.Mode) +} + +// TestInstructionTablesMode verifies the correct mode is set for each table type. +func TestInstructionTablesMode(t *testing.T) { + // Test with PlainTable + var plainTable InstructionTable[any] + instructionTables := NewPlainInstructionTable(plainTable) + assert.Equal(t, PlainTableMode, instructionTables.Mode) + + // Test with BoxedTable + var boxedTable BoxedInstructionTable[any] + instructionTables = NewBoxedInstructionTable(boxedTable) + assert.Equal(t, BoxedTableMode, instructionTables.Mode) +} \ No newline at end of file