Skip to content

Commit

Permalink
Squashed Orbit Progress Commit
Browse files Browse the repository at this point in the history
bumped arbitrum nitro to v2.3.3

modify some module versions

Bump EigenDA version to v0.6.1

Update nitro-contracts to include fixes.

fix header byte related bug

feat(Arbitrator): Add support for EigenDA preimage targetting

feat(Arbitrator): Add support for EigenDA preimage targetting - fmt & rm dbug stmts

feat(Arbitrator): Add support for EigenDA preimage targetting - fmt & rm dbug stmts

init so can debug

commit and reinstall the repo

bump kzg library version, new encoding issues aaa

generates proof now but in a really gross way, probably incorrect

update rest to use updated

hash now matches preimage oracle hash

serialize uncompressed bytes, not compressed

rename variables appropriately

the sanity checks work now holy shit

clean up

update submodules

Update submodules to point to private develop branches

update submodules

rm use of bn256 kzg deps

rm use of bn256 kzg deps

Set submodule versions correctly, fix go dependency conflicts

Revert brotli submodule to correct version

Update contracts repo version
  • Loading branch information
afkbyte authored and teddyknox committed May 29, 2024
1 parent 6a1c1a7 commit 83279f6
Show file tree
Hide file tree
Showing 42 changed files with 920 additions and 235 deletions.
73 changes: 73 additions & 0 deletions .github/workflows/docker-eigenda.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: Build nitro-eigenda Docker Image
on:
push:
tags: ['*']

jobs:
docker:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- name: build and push nitro-eigenda
image: nitro-eigenda
dockerfile: Dockerfile
context: .
buildargs: ''
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: 'recursive'

- # Add support for more platforms with QEMU (optional)
# https://github.com/docker/setup-qemu-action
name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Prepare Environment Variables
run: |
echo "SHORT_SHA=${GITHUB_SHA::7}" | tee -a $GITHUB_ENV
GIT_TAG=$(git tag --points-at HEAD)
echo "GIT_TAG=$GIT_TAG" | tee -a $GITHUB_ENV
echo "REF_NAME=$(echo ${GIT_TAG:-$GITHUB_REF_NAME} | sed 's/[^a-zA-Z0-9._]/-/g')" | tee -a $GITHUB_ENV
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.ECR_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.ECR_ACCESS_KEY }}
aws-region: us-west-2

- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2

- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ secrets.ECR_REGISTRY }}/${{ matrix.image }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=ref,event=tag
type=ref,event=tag,suffix={{sha}}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha,prefix=,format=short,enable=true
type=raw,value=latest,enable={{is_default_branch}}
type=match,pattern=^(v\d+\.\d+\.\d+-\w*)\..*$,value=$1,enable={{is_default_branch}}
- name: ${{ matrix.name }}
uses: docker/build-push-action@v5
with:
context: ${{ matrix.context }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
file: ${{ matrix.dockerfile }}
provenance: false
11 changes: 6 additions & 5 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,14 @@
[submodule "brotli"]
path = brotli
url = https://github.com/google/brotli.git
[submodule "contracts"]
path = contracts
url = https://github.com/OffchainLabs/nitro-contracts.git
branch = develop
[submodule "arbitrator/wasm-testsuite/testsuite"]
path = arbitrator/wasm-testsuite/testsuite
url = https://github.com/WebAssembly/testsuite.git
[submodule "contracts"]
path = contracts
url = [email protected]:Layr-Labs/nitro-contracts-private.git
branch = develop
[submodule "nitro-testnode"]
path = nitro-testnode
url = https://github.com/OffchainLabs/nitro-testnode.git
url = [email protected]:Layr-Labs/nitro-testnode-private.git
branch = develop
1 change: 1 addition & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ COPY ./blsSignatures ./blsSignatures
COPY ./cmd/chaininfo ./cmd/chaininfo
COPY ./cmd/replay ./cmd/replay
COPY ./das/dastree ./das/dastree
COPY ./das/eigenda ./das/eigenda
COPY ./precompiles ./precompiles
COPY ./statetransfer ./statetransfer
COPY ./util ./util
Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,16 @@
<img src="https://arbitrum.io/assets/arbitrum/logo_color.png" alt="Logo" width="80" height="80">
</a>

<h3 align="center">Arbitrum Nitro</h3>
<h3 align="center">Arbitrum Nitro + EigenDA</h3>

<p align="center">
<a href="https://developer.arbitrum.io/"><strong>Next Generation Ethereum L2 Technology »</strong></a>
<br />
</p>
</p>

This is a fork of Arbitrum Nitro developed by [AltLayer](https://altlayer.io/), in a technical partnership with EigenLabs.

## About Arbitrum Nitro

<img src="https://arbitrum.io/assets/arbitrum/logo_color.png" alt="Logo" width="80" height="80">
Expand Down
2 changes: 1 addition & 1 deletion arbitrator/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions arbitrator/arbutil/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,5 @@ pub enum PreimageType {
Keccak256,
Sha2_256,
EthVersionedHash,
EigenDAHash,
}
7 changes: 0 additions & 7 deletions arbitrator/jit/Cargo.lock

This file was deleted.

4 changes: 3 additions & 1 deletion arbitrator/jit/src/gostack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ impl MemoryViewContainer {
fn closure<'a>(
store: &'a StoreRef,
) -> impl (for<'b> FnOnce(&'b Memory) -> MemoryView<'b>) + 'a {
move |memory: &Memory| memory.view(&store)
move |memory: &Memory| {
memory.view(&store.clone())
}
}

let store = env.as_store_ref();
Expand Down
8 changes: 8 additions & 0 deletions arbitrator/prover/src/host.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ pub enum Hostio {
WavmReadKeccakPreimage,
WavmReadSha256Preimage,
WavmReadEthVersionedHashPreimage,
WavmReadEigenDAHashPreimage,
WavmReadInboxMessage,
WavmReadDelayedInboxMessage,
WavmHaltAndSetFinished,
Expand All @@ -76,6 +77,7 @@ impl FromStr for Hostio {
("env", "wavm_read_keccak_256_preimage") => WavmReadKeccakPreimage,
("env", "wavm_read_sha2_256_preimage") => WavmReadSha256Preimage,
("env", "wavm_read_eth_versioned_hash_preimage") => WavmReadEthVersionedHashPreimage,
("env", "wavm_read_eigen_da_hash_preimage") => WavmReadEigenDAHashPreimage,
("env", "wavm_read_inbox_message") => WavmReadInboxMessage,
("env", "wavm_read_delayed_inbox_message") => WavmReadDelayedInboxMessage,
("env", "wavm_halt_and_set_finished") => WavmHaltAndSetFinished,
Expand Down Expand Up @@ -114,6 +116,7 @@ impl Hostio {
WavmReadKeccakPreimage => func!([I32, I32], [I32]),
WavmReadSha256Preimage => func!([I32, I32], [I32]),
WavmReadEthVersionedHashPreimage => func!([I32, I32], [I32]),
WavmReadEigenDAHashPreimage => func!([I32, I32], [I32]),
WavmReadInboxMessage => func!([I64, I32, I32], [I32]),
WavmReadDelayedInboxMessage => func!([I64, I32, I32], [I32]),
WavmHaltAndSetFinished => func!(),
Expand Down Expand Up @@ -188,6 +191,11 @@ impl Hostio {
opcode!(LocalGet, 1);
opcode!(ReadPreImage, PreimageType::EthVersionedHash);
}
WavmReadEigenDAHashPreimage => {
opcode!(LocalGet, 0);
opcode!(LocalGet, 1);
opcode!(ReadPreImage, PreimageType::EigenDAHash);
}
WavmReadInboxMessage => {
opcode!(LocalGet, 0);
opcode!(LocalGet, 1);
Expand Down
170 changes: 170 additions & 0 deletions arbitrator/prover/src/kzgbn254.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@

use crate::utils::Bytes32;
// use ark_ec::{AffineRepr, CurveGroup,pairing::Pairing};
// use kzgbn254::{
// kzg::Kzg,
// blob::Blob,
// helpers::{remove_empty_byte_from_padded_bytes, to_fr_array}
// };
use eyre::{ensure, Result};
// use ark_bn254::{Bn254, G1Affine, G1Projective, G2Affine};
use num::BigUint;
use sha2::{Digest, Sha256};
use std::{convert::TryFrom, io::Write};
// use ark_serialize::CanonicalSerialize;
use num::Zero;

lazy_static::lazy_static! {

// note that we are loading 3000 for testing purposes atm, but for production use these values:
// g1 and g2 points from the operator setup guide
// srs_order = 268435456
// srs_points_to_load = 131072

// pub static ref KZG: Kzg = Kzg::setup(
// "./arbitrator/prover/src/test-files/g1.point",
// "./arbitrator/prover/src/test-files/g2.point",
// "./arbitrator/prover/src/test-files/g2.point.powerOf2",
// 3000,
// 3000
// ).unwrap();

// modulus for the underlying field F_r of the elliptic curve
// see https://docs.eigenlayer.xyz/eigenda/integrations-guides/dispersal/blob-serialization-requirements
pub static ref BLS_MODULUS: BigUint = "21888242871839275222246405745257275088548364400416034343698204186575808495617".parse().unwrap();

// (2*1024*1024)/32 = 65536
pub static ref FIELD_ELEMENTS_PER_BLOB: usize = 65536;
}

// pub fn prove_kzg_preimage_bn254(
// hash: Bytes32,
// preimage: &[u8],
// offset: u32,
// out: &mut impl Write,
// ) -> Result<()> {

// let mut kzg = KZG.clone();

// // expand the roots of unity, should work as long as it's longer than chunk length and chunks
// // from my understanding the data_setup_mins pads both min_chunk_len and min_num_chunks to
// // the next power of 2 so we can load a max of 2048 from the test values here
// // then we can take the roots of unity we actually need (len polynomial) and pass them in
// // @anup, this is a really gross way to do this, pls tell better way
// kzg.data_setup_mins(1, 2048)?;

// // we are expecting the preimage to be unpadded when turned into a blob function so need to unpad it first
// let unpadded_preimage_vec: Vec<u8> = remove_empty_byte_from_padded_bytes(preimage);
// let unpadded_preimage = unpadded_preimage_vec.as_slice();

// // repad it here, TODO: need to ask to change the interface for this
// let blob = Blob::from_bytes_and_pad(unpadded_preimage);
// let blob_polynomial = blob.to_polynomial().unwrap();
// let blob_commitment = kzg.commit(&blob_polynomial).unwrap();

// let mut commitment_bytes = Vec::new();
// blob_commitment.serialize_uncompressed(&mut commitment_bytes).unwrap();

// let mut expected_hash: Bytes32 = Sha256::digest(&*commitment_bytes).into();
// expected_hash[0] = 1;

// ensure!(
// hash == expected_hash,
// "Trying to prove versioned hash {} preimage but recomputed hash {}",
// hash,
// expected_hash,
// );

// ensure!(
// offset % 32 == 0,
// "Cannot prove blob preimage at unaligned offset {}",
// offset,
// );

// let offset_usize = usize::try_from(offset)?;
// let mut proving_offset = offset;

// // address proving past end edge case later
// let proving_past_end = offset_usize >= preimage.len();
// if proving_past_end {
// // Proving any offset proves the length which is all we need here,
// // because we're past the end of the preimage.
// proving_offset = 0;
// }

// let proving_offset_bytes = proving_offset.to_le_bytes();
// let mut padded_proving_offset_bytes = [0u8; 32];
// padded_proving_offset_bytes[32 - proving_offset_bytes.len()..].copy_from_slice(&proving_offset_bytes);

// // in production we will first need to perform an IFFT on the blob data to get the expected y value
// let mut proven_y = blob.get_blob_data();
// let offset_usize = offset as usize; // Convert offset to usize
// proven_y = proven_y[offset_usize..(offset_usize + 32)].to_vec();

// let proven_y_fr = to_fr_array(&proven_y);

// let polynomial = blob.to_polynomial().unwrap();

// let g2_generator = G2Affine::generator();
// let z_g2= (g2_generator * proven_y_fr[0]).into_affine();

// let g2_tau: G2Affine = kzg.get_g2_points().get(1).unwrap().clone();
// let g2_tau_minus_g2_z = (g2_tau - z_g2).into_affine();

// // required roots of unity are the first polynomial length roots in the expanded set
// let roots_of_unity = kzg.get_expanded_roots_of_unity();
// let required_roots_of_unity = &roots_of_unity[0..polynomial.len()];
// // TODO: ask for interface alignment later
// let kzg_proof = match kzg.compute_kzg_proof(&blob_polynomial, offset as u64, &required_roots_of_unity.to_vec()) {
// Ok(proof) => proof,
// Err(err) => return Err(err.into()),
// };

// let mut kzg_proof_uncompressed_bytes = Vec::new();
// kzg_proof.serialize_uncompressed(&mut kzg_proof_uncompressed_bytes).unwrap();

// let xminusz_x0: BigUint = g2_tau_minus_g2_z.x.c0.into();
// let xminusz_x1: BigUint = g2_tau_minus_g2_z.x.c1.into();
// let xminusz_y0: BigUint = g2_tau_minus_g2_z.y.c0.into();
// let xminusz_y1: BigUint = g2_tau_minus_g2_z.y.c1.into();

// // turn each element of xminusz into bytes, then pad each to 32 bytes, then append in order x1,x0,y1,y0
// let mut xminusz_encoded_bytes = Vec::with_capacity(128);
// append_left_padded_biguint_be(&mut xminusz_encoded_bytes, &xminusz_x1);
// append_left_padded_biguint_be(&mut xminusz_encoded_bytes, &xminusz_x0);
// append_left_padded_biguint_be(&mut xminusz_encoded_bytes, &xminusz_y1);
// append_left_padded_biguint_be(&mut xminusz_encoded_bytes, &xminusz_y0);

// // encode the commitment
// let commitment_x_bigint: BigUint = blob_commitment.x.into();
// let commitment_y_bigint: BigUint = blob_commitment.y.into();
// let mut commitment_encoded_bytes = Vec::with_capacity(32);
// append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_x_bigint);
// append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_y_bigint);


// // encode the proof
// let proof_x_bigint: BigUint = kzg_proof.x.into();
// let proof_y_bigint: BigUint = kzg_proof.y.into();
// let mut proof_encoded_bytes = Vec::with_capacity(64);
// append_left_padded_biguint_be(&mut proof_encoded_bytes, &proof_x_bigint);
// append_left_padded_biguint_be(&mut proof_encoded_bytes, &proof_y_bigint);

// out.write_all(&*hash)?; // hash [:32]
// out.write_all(&padded_proving_offset_bytes)?; // evaluation point [32:64]
// out.write_all(&*proven_y)?; // expected output [64:96]
// out.write_all(&xminusz_encoded_bytes)?; // g2TauMinusG2z [96:224]
// out.write_all(&*commitment_encoded_bytes)?; // kzg commitment [224:288]
// out.write_all(&proof_encoded_bytes)?; // proof [288:352]


// Ok(())
// }
// // Helper function to append BigUint bytes into the vector with padding; left padded big endian bytes to 32
// fn append_left_padded_biguint_be(vec: &mut Vec<u8>, biguint: &BigUint) {
// let bytes = biguint.to_bytes_be();
// let padding = 32 - bytes.len();
// vec.extend_from_slice(&vec![0; padding]);
// vec.extend_from_slice(&bytes);
// }

1 change: 1 addition & 0 deletions arbitrator/prover/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ pub mod binary;
mod host;
mod kzg;
pub mod machine;
mod kzgbn254;
/// cbindgen:ignore
mod memory;
mod merkle;
Expand Down
Loading

0 comments on commit 83279f6

Please sign in to comment.