Skip to content

Commit

Permalink
fix: database migrations and hardhat security
Browse files Browse the repository at this point in the history
- Added migration for ghost_contract_address column in transactions table
- Fixed security issues in hardhat docker configuration
- Ensured proper migration sequence (0d9538be0318 -> cb34b6b353ed)

Migration changes:
- Added new column ghost_contract_address (String, nullable) to transactions table
- Provided proper upgrade and downgrade paths
- Verified migration execution and current head state

TODO: Frontend issues to fix:
- Methods panel not opening after transaction acceptance
- Need to investigate UI response after successful transactions
  • Loading branch information
mpaya5 committed Nov 28, 2024
2 parents aa10179 + 663a89e commit 02df53c
Show file tree
Hide file tree
Showing 134 changed files with 3,069 additions and 2,809 deletions.
114 changes: 56 additions & 58 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,77 +1,75 @@
# Location and credentials of the postgres database
DBHOST = 'postgres'
DBNAME = 'genlayer_state'
DBUSER = 'postgres'
DBPASSWORD = 'postgres'
DBPORT = '5432'
# Postgres Database Configuration
DBHOST = 'postgres'
DBNAME = 'genlayer_state'
DBUSER = 'postgres'
DBPASSWORD = 'postgres'
DBPORT = '5432'

# JSON array of initial validators to be created on startup.
# Example: [{"stake": 100, "provider": "openai", "model": "gpt-4o", "amount": 2}, {"stake": 200, "provider": "anthropic", "model": "claude-3-haiku-20240307", "amount": 1}]
VALIDATORS_CONFIG_JSON = ''

LOGCONFIG = 'dev' # dev/prod
FLASK_LOG_LEVEL = 'ERROR' # DEBUG/INFO/WARNING/ERROR/CRITICAL
# Logging Configuration
LOGCONFIG = 'dev' # dev/prod
FLASK_LOG_LEVEL = 'ERROR' # DEBUG/INFO/WARNING/ERROR/CRITICAL
DISABLE_INFO_LOGS_ENDPOINTS = '["ping", "eth_getTransactionByHash","gen_getContractSchemaForCode","gen_getContractSchema"]'

# JsonRPC server details
# JsonRPC Server Configuration
RPCPROTOCOL = 'http'
RPCHOST = 'jsonrpc'
RPCPORT = '4000'
# (the port debugpy is listening on)
RPCDEBUGPORT = '4678'

# GenVM server details
GENVMPROTOCOL = 'http'
GENVMHOST = 'genvm'
GENVMPORT = '6000'
# Location of file excuted inside the GenVM
GENVMCONLOC = '/tmp'
# TODO: Will be removed with the new logging
GENVMDEBUG = 1
# (the port debugpy is listening on)
GENVMDEBUGPORT = '6678'

# (enables debuggin in VScode)
VSCODEDEBUG = "false" # "true" or "false"

# Ollama server details
OLAMAPROTOCOL = 'http'
OLAMAHOST = 'ollama'
OLAMAPORT = '11434'
RPCDEBUGPORT = '4678' # debugpy listening port
JSONRPC_REPLICAS = '1' # number of JsonRPC container replicas to run, used to scale up for production

# WebRequest server details
# (container for getting web pages)
WEBREQUESTPROTOCOL = 'http'
WEBREQUESTHOST = 'webrequest'
WEBREQUESTPORT = '5000'
# GenVM Configuration
GENVM_BIN = "/genvm/bin"

# If you want to use OpenAI add your key here
OPENAIKEY = '<add_your_openai_api_key_here>'
# VSCode Debug Configuration
VSCODEDEBUG = "false" # "true" or "false"

# Heurist AI Details
HEURISTAIURL = 'https://llm-gateway.heurist.xyz'
HEURISTAIMODELSURL = 'https://raw.githubusercontent.com/heurist-network/heurist-models/main/models.json'
# If you want to use Heurist AI add your key here
HEURISTAIAPIKEY = '<add_your_heuristai_api_key_here>'
# Ollama Server Configuration
OLAMAPROTOCOL = 'http'
OLAMAHOST = 'ollama'
OLAMAPORT = '11434'

# If you want to use Anthropic (Claude AI) add your key here and uncomment the line
# ANTHROPIC_API_KEY = '<add_your_anthropic_api_key_here>'
# WebRequest Server Configuration
WEBREQUESTPROTOCOL = 'http'
WEBREQUESTHOST = 'webrequest'
WEBREQUESTPORT = '5000'
WEBREQUESTSELENIUMPORT = '5001'

# Front end container details
VITE_JSON_RPC_SERVER_URL = 'http://127.0.0.1:4000/api' # if VITE_PROXY_ENABLED = 'true' change to '/api'
VITE_WS_SERVER_URL = 'ws://127.0.0.1:4000' # if VITE_PROXY_ENABLED = 'true' change to '/'
VITE_PLAUSIBLE_DOMAIN = 'studio.genlayer.com'
VITE_IS_HOSTED=false
# NGINX Server Configuration
SERVER_NAME = 'studio.genlayer.com'

FRONTEND_PORT = '8080'
FRONTEND_BUILD_TARGET = 'final' # change to 'dev' to run in dev mode
# Frontend Configuration
# If you want to run the frontend in production, change http to https and ws to wss
VITE_JSON_RPC_SERVER_URL = 'http://127.0.0.1:4000/api' # if VITE_PROXY_ENABLED = 'true' change to '/api'
VITE_WS_SERVER_URL = 'ws://127.0.0.1:4000' # if VITE_PROXY_ENABLED = 'true' change to '/'
VITE_PLAUSIBLE_DOMAIN = 'studio.genlayer.com'
FRONTEND_PORT = '8080'
FRONTEND_BUILD_TARGET = 'final' # change to 'dev' to run in dev mode

VITE_PROXY_ENABLED = 'false'
# Vite Proxy Configuration (for local development)
VITE_PROXY_ENABLED = 'false'
VITE_PROXY_JSON_RPC_SERVER_URL = 'http://jsonrpc:4000'
VITE_PROXY_WS_SERVER_URL = 'ws://jsonrpc:4000'
VITE_PROXY_WS_SERVER_URL = 'ws://jsonrpc:4000'
VITE_IS_HOSTED = 'false'

FRONTEND_BUILD_TARGET = 'final' # change to 'dev' to run in dev mode

# Hardhat port
HARDHAT_URL = 'http://hardhat'
HARDHATPORT = '8545'
HARDHAT_PRIVATE_KEY = '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
HARDHAT_PRIVATE_KEY = '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
# LLM Providers Configuration
# If you want to use OpenAI LLMs, add your key here
OPENAIKEY = '<add_your_openai_api_key_here>'

# If you want to use Anthropic AI LLMs, add your key here
ANTHROPIC_API_KEY = '<add_your_anthropic_api_key_here>'

# If you want to use Heurist AI LLMs, add your key here
HEURISTAIURL = 'https://llm-gateway.heurist.xyz'
HEURISTAIMODELSURL = 'https://raw.githubusercontent.com/heurist-network/heurist-models/main/models.json'
HEURISTAIAPIKEY = '<add_your_heuristai_api_key_here>'

# Validator Configuration
# JSON array of initial validators to be created on startup.
# Example: VALIDATORS_CONFIG_JSON = '[{"stake": 100, "provider": "openai", "model": "gpt-4o", "amount": 2}, {"stake": 200, "provider": "anthropic", "model": "claude-3-haiku-20240307", "amount": 1}]'
VALIDATORS_CONFIG_JSON = ''
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
text=auto eol=lf
1 change: 1 addition & 0 deletions .github/workflows/docker-build-and-push-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ jobs:
with:
context: ${{ inputs.docker_build_context }}
file: ${{ inputs.dockerfile }}
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release-from-main.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: Create releases from main every day
name: Create releases from main
on:
schedule:
- cron: "00 08 * * 1-4" # 08:00 AM (UTC) from Monday to Thursday

workflow_dispatch:
permissions:
contents: read # for checkout

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unit-tests-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.11
python-version: 3.12.4
cache: pip
- run: pip install -r backend/protocol_rpc/requirements.txt
- run: pip install pytest-cov
Expand Down
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,9 @@ dmypy.json
receipt.json
.DS_Store

# Docker Volume
data/

# LLM models
.ollama

Expand All @@ -146,6 +149,10 @@ node_modules/
package-lock.json
/package.json

# Nginx TLS certificates and keys
nginx/ssl/*.key
nginx/ssl/*.crt

# Hardhat files
hardhat/cache
hardhat/artifacts
Expand Down
91 changes: 59 additions & 32 deletions backend/consensus/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
Validator,
)
from backend.node.base import Node
from backend.node.genvm.types import ExecutionMode, Receipt, Vote
from backend.node.types import ExecutionMode, Receipt, Vote, ExecutionResultStatus
from backend.protocol_rpc.message_handler.base import MessageHandler
from backend.protocol_rpc.message_handler.types import (
LogEvent,
Expand Down Expand Up @@ -110,18 +110,34 @@ async def _run_consensus(self):
for queue in [q for q in self.queues.values() if not q.empty()]:
# sessions cannot be shared between coroutines, we need to create a new session for each coroutine
# https://docs.sqlalchemy.org/en/20/orm/session_basics.html#is-the-session-thread-safe-is-asyncsession-safe-to-share-in-concurrent-tasks
transaction = await queue.get()
transaction: Transaction = await queue.get()
with self.get_session() as session:

def contract_snapshot_factory(
contract_address,
session=session,
transaction=transaction,
):
if (
transaction.type == TransactionType.DEPLOY_CONTRACT
and contract_address == transaction.to_address
):
ret = ContractSnapshot(None, session)
ret.contract_address = transaction.to_address
ret.contract_code = transaction.data[
"contract_code"
]
ret.encoded_state = {}
return ret
return ContractSnapshot(contract_address, session)

async def exec_transaction_with_session_handling():
await self.exec_transaction(
transaction,
TransactionsProcessor(session),
ChainSnapshot(session),
AccountsManager(session),
lambda contract_address, session=session: ContractSnapshot(
contract_address, session
),
contract_snapshot_factory,
)
session.commit()

Expand Down Expand Up @@ -211,20 +227,25 @@ async def exec_transaction(

num_validators = len(remaining_validators) + 1

contract_snapshot = contract_snapshot_factory(transaction.to_address)
contract_snapshot_supplier = lambda: contract_snapshot_factory(
transaction.to_address
)

leaders_contract_snapshot = contract_snapshot_supplier()

# Create Leader
leader_node = node_factory(
leader,
ExecutionMode.LEADER,
contract_snapshot,
leaders_contract_snapshot,
None,
msg_handler,
contract_snapshot_factory,
)

# Leader executes transaction
leader_receipt = await leader_node.exec_transaction(transaction)

votes = {leader["address"]: leader_receipt.vote.value}
consensus_data.votes = votes
consensus_data.leader_receipt = leader_receipt
Expand All @@ -246,7 +267,7 @@ async def exec_transaction(
node_factory(
validator,
ExecutionMode.VALIDATOR,
contract_snapshot,
contract_snapshot_supplier(),
leader_receipt,
msg_handler,
contract_snapshot_factory,
Expand Down Expand Up @@ -286,7 +307,7 @@ async def exec_transaction(

if (
len([vote for vote in votes.values() if vote == Vote.AGREE.value])
>= num_validators // 2
>= (num_validators + 1) // 2
):
break # Consensus reached

Expand All @@ -302,6 +323,7 @@ async def exec_transaction(
EventType.ERROR,
EventScope.CONSENSUS,
"Failed to reach consensus",
transaction_hash=transaction.hash,
)
)
ConsensusAlgorithm.dispatch_transaction_status_update(
Expand Down Expand Up @@ -332,34 +354,38 @@ async def exec_transaction(
EventScope.CONSENSUS,
"Reached consensus",
consensus_data.to_dict(),
transaction_hash=transaction.hash,
)
)

# Register contract if it is a new contract
if transaction.type == TransactionType.DEPLOY_CONTRACT:
new_contract = {
"id": transaction.data["contract_address"],
"data": {
"state": leader_receipt.contract_state,
"code": transaction.data["contract_code"],
"ghost_contract_address": transaction.ghost_contract_address,
},
}
contract_snapshot.register_contract(new_contract)

msg_handler.send_message(
LogEvent(
"deployed_contract",
EventType.SUCCESS,
EventScope.GENVM,
"Contract deployed",
new_contract,
if leader_receipt.execution_result == ExecutionResultStatus.SUCCESS:
# Register contract if it is a new contract
if transaction.type == TransactionType.DEPLOY_CONTRACT:
new_contract = {
"id": transaction.data["contract_address"],
"data": {
"state": leader_receipt.contract_state,
"code": transaction.data["contract_code"],
"ghost_contract_address": transaction.ghost_contract_address,
},
}
leaders_contract_snapshot.register_contract(new_contract)
msg_handler.send_message(
LogEvent(
"deployed_contract",
EventType.SUCCESS,
EventScope.GENVM,
"Contract deployed",
new_contract,
transaction_hash=transaction.hash,
)
)
)

# Update contract state if it is an existing contract
else:
contract_snapshot.update_contract_state(leader_receipt.contract_state)
# Update contract state if it is an existing contract
else:
leaders_contract_snapshot.update_contract_state(
leader_receipt.contract_state
)

# Finalize transaction
consensus_data.final = True
Expand Down Expand Up @@ -469,6 +495,7 @@ def dispatch_transaction_status_update(
"hash": str(transaction_hash),
"new_status": str(new_status.value),
},
transaction_hash=transaction_hash,
)
)

Expand Down
14 changes: 9 additions & 5 deletions backend/database_handler/contract_snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,14 @@ class ContractSnapshot:
"""
Warning: if you initialize this class with a contract_address:
- The contract_address must exist in the database.
- `self.contract_data`, `self.contract_code`, `self.encoded_state` and `self.ghost_contract_address` will be loaded from the database **only once** at initialization.
- `self.contract_data`, `self.contract_code` and `self.encoded_state` will be loaded from the database **only once** at initialization.
"""

def __init__(self, contract_address: str, session: Session):
contract_address: str
contract_code: str
encoded_state: dict[str, dict[str, str]]

def __init__(self, contract_address: str | None, session: Session):
self.session = session

if contract_address is not None:
Expand Down Expand Up @@ -47,15 +51,15 @@ def register_contract(self, contract: dict):
current_contract.data = contract["data"]
self.session.commit()

def update_contract_state(self, new_state: str):
def update_contract_state(self, new_state: dict[str, str]):
"""Update the state of the contract in the database."""
new_contract_nada = {
new_contract_data = {
"code": self.contract_data["code"],
"state": new_state,
}

contract = (
self.session.query(CurrentState).filter_by(id=self.contract_address).one()
)
contract.data = new_contract_nada
contract.data = new_contract_data
self.session.commit()
Loading

0 comments on commit 02df53c

Please sign in to comment.