Skip to content

Commit

Permalink
fix: EigenDA challenge test - fix all bugs, things work but ethereum …
Browse files Browse the repository at this point in the history
…needs better translation for bytecode --> abi types
  • Loading branch information
epociask committed Aug 17, 2024
1 parent 35ddd66 commit 80db0f0
Show file tree
Hide file tree
Showing 14 changed files with 447 additions and 345 deletions.
37 changes: 27 additions & 10 deletions arbitrator/prover/src/kzgbn254.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,33 +8,50 @@ use kzgbn254::{blob::Blob, kzg::Kzg, polynomial::PolynomialFormat};
use num::BigUint;
use sha2::{Digest, Sha256};
use sha3::Keccak256;
use std::env;
use std::io::Write;
use std::path::PathBuf;

lazy_static::lazy_static! {

// note that we are loading 3000 for testing purposes atm, but for production use these values:
// g1 and g2 points from the operator setup guide
// srs_order = 268435456
// srs_points_to_load = 131072 (65536 is enough)

pub static ref KZG: Kzg = Kzg::setup(
"./arbitrator/prover/src/mainnet-files/g1.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.powerOf2",
pub static ref KZG_BN254_SETTINGS: Kzg = Kzg::setup(
&load_directory_with_prefix("src/mainnet-files/g1.point.65536"),
&load_directory_with_prefix("src/mainnet-files/g2.point.65536"),
&load_directory_with_prefix("src/mainnet-files/g2.point.powerOf2"),
268435456,
65536
).unwrap();
}

// Necessary helper function for understanding if srs is being loaded for normal node operation
// or for challenge testing.
fn load_directory_with_prefix(directory_name: &str) -> String {
let cwd = env::current_dir().expect("Failed to get current directory");
return match cwd {
cwd if cwd.ends_with("system_tests") => {
return PathBuf::from("../arbitrator/prover/")
.join(directory_name)
.to_string_lossy()
.into_owned();
}
_ => {
return PathBuf::from("./arbitrator/prover/")
.join(directory_name)
.to_string_lossy()
.into_owned();
}
};
}

/// Creates a KZG preimage proof consumable by the point evaluation precompile.
pub fn prove_kzg_preimage_bn254(
hash: Bytes32,
preimage: &[u8],
offset: u32,
out: &mut impl Write,
) -> Result<()> {
let mut kzg = KZG.clone();

let mut kzg = KZG_BN254_SETTINGS.clone();
// expand roots of unity
kzg.calculate_roots_of_unity(preimage.len() as u64)?;

Expand Down
2 changes: 1 addition & 1 deletion arbitrator/prover/src/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2448,7 +2448,7 @@ impl Machine {
hash.red(),
);
self.print_backtrace(true);
bail!("missing requested preimage for hash when stepping machine forward {}", hash);
bail!("missing requested preimage for hash {}", hash);
};

if preimage_ty == PreimageType::EthVersionedHash
Expand Down
67 changes: 31 additions & 36 deletions arbitrator/prover/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@

#[cfg(feature = "native")]
use crate::kzg::ETHEREUM_KZG_SETTINGS;
use crate::kzgbn254::KZG_BN254_SETTINGS;
use arbutil::PreimageType;
use ark_serialize::CanonicalSerialize;
#[cfg(feature = "native")]
use c_kzg::{Blob, KzgCommitment};
use digest::Digest;
use eyre::{eyre, Result};
use kzgbn254::{blob::Blob as EigenDABlob, kzg::Kzg as KzgBN254, polynomial::PolynomialFormat};
use kzgbn254::{blob::Blob as EigenDABlob, polynomial::PolynomialFormat};
use num::BigUint;
use serde::{Deserialize, Serialize};
use sha2::Sha256;
Expand Down Expand Up @@ -201,6 +202,33 @@ pub fn append_left_padded_biguint_be(vec: &mut Vec<u8>, biguint: &BigUint) {
vec.extend_from_slice(&bytes);
}

pub fn append_left_padded_uint32_be(vec: &mut Vec<u8>, uint32: &u32) {
let bytes = uint32.to_be_bytes();
vec.extend_from_slice(&bytes);
}

pub fn hash_eigenda_preimage(preimage: &[u8]) -> Result<[u8; 32]> {
let blob = EigenDABlob::from_padded_bytes_unchecked(preimage);

let blob_polynomial = blob.to_polynomial(PolynomialFormat::InCoefficientForm)?;
let blob_commitment = KZG_BN254_SETTINGS.commit(&blob_polynomial)?;

let commitment_x_bigint: BigUint = blob_commitment.x.into();
let commitment_y_bigint: BigUint = blob_commitment.y.into();
let length_uint32: u32 = blob.len() as u32;

let mut commitment_length_encoded_bytes = Vec::with_capacity(68);
append_left_padded_biguint_be(&mut commitment_length_encoded_bytes, &commitment_x_bigint);
append_left_padded_biguint_be(&mut commitment_length_encoded_bytes, &commitment_y_bigint);
append_left_padded_uint32_be(&mut commitment_length_encoded_bytes, &length_uint32);

let mut keccak256_hasher = Keccak256::new();
keccak256_hasher.update(&commitment_length_encoded_bytes);
let commitment_hash: [u8; 32] = keccak256_hasher.finalize().into();

Ok(commitment_hash)
}

#[cfg(feature = "native")]
pub fn hash_preimage(preimage: &[u8], ty: PreimageType) -> Result<[u8; 32]> {
match ty {
Expand All @@ -216,42 +244,9 @@ pub fn hash_preimage(preimage: &[u8], ty: PreimageType) -> Result<[u8; 32]> {
Ok(commitment_hash)
}
PreimageType::EigenDAHash => {
let kzg_bn254: KzgBN254 = KzgBN254::setup(
"./arbitrator/prover/src/mainnet-files/g1.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.powerOf2",
268435456,
65536,
)
.unwrap();

let blob = EigenDABlob::from_padded_bytes_unchecked(preimage);

let blob_polynomial = blob.to_polynomial(PolynomialFormat::InCoefficientForm)?;
let blob_commitment = kzg_bn254.commit(&blob_polynomial)?;

let commitment_x_bigint: BigUint = blob_commitment.x.into();
let commitment_y_bigint: BigUint = blob_commitment.y.into();
let length_bigint: BigUint = blob.len().into();
// 32 bytes per each commitment coordinate (64 bytes)
// 25 bits for length considering 32mb blobs padded to nearest power of 2 (2^25)
// pad to 32 bits or 4 bytes so 68 bytes total
let mut commitment_length_encoded_bytes = Vec::with_capacity(68);
append_left_padded_biguint_be(
&mut commitment_length_encoded_bytes,
&commitment_x_bigint,
);
append_left_padded_biguint_be(
&mut commitment_length_encoded_bytes,
&commitment_y_bigint,
);
append_left_padded_biguint_be(&mut commitment_length_encoded_bytes, &length_bigint);

let mut keccak256_hasher = Keccak256::new();
keccak256_hasher.update(&commitment_length_encoded_bytes);
let commitment_hash: [u8; 32] = keccak256_hasher.finalize().into();
let hash = hash_eigenda_preimage(preimage)?;

Ok(commitment_hash)
Ok(hash)
}
}
}
69 changes: 9 additions & 60 deletions arbnode/batch_poster.go
Original file line number Diff line number Diff line change
Expand Up @@ -955,10 +955,10 @@ func (b *BatchPoster) encodeAddBatch(
methodName := sequencerBatchPostMethodName
if use4844 {
methodName = sequencerBatchPostWithBlobsMethodName
}
if useEigenDA {
} else if useEigenDA {
methodName = sequencerBatchPostWithEigendaMethodName
}

method, ok := b.seqInboxABI.Methods[methodName]
if !ok {
return nil, nil, errors.New("failed to find add batch method")
Expand All @@ -981,54 +981,6 @@ func (b *BatchPoster) encodeAddBatch(
)
} else if useEigenDA {

blobVerificationProofType, err := abi.NewType("tuple", "", []abi.ArgumentMarshaling{
{Name: "batchID", Type: "uint32"},
{Name: "blobIndex", Type: "uint32"},
{Name: "batchMetadata", Type: "tuple",
Components: []abi.ArgumentMarshaling{
{Name: "batchHeader", Type: "tuple",
Components: []abi.ArgumentMarshaling{
{Name: "blobHeadersRoot", Type: "bytes32"},
{Name: "quorumNumbers", Type: "bytes"},
{Name: "signedStakeForQuorums", Type: "bytes"},
{Name: "referenceBlockNumber", Type: "uint32"},
},
},
{Name: "signatoryRecordHash", Type: "bytes32"},
{Name: "confirmationBlockNumber", Type: "uint32"},
},
},
{
Name: "inclusionProof",
Type: "bytes",
},
{
Name: "quorumIndices",
Type: "bytes",
},
})

if err != nil {
return nil, nil, err
}

blobHeaderType, err := abi.NewType("tuple", "", []abi.ArgumentMarshaling{
{Name: "commitment", Type: "tuple", Components: []abi.ArgumentMarshaling{
{Name: "X", Type: "uint256"},
{Name: "Y", Type: "uint256"},
}},
{Name: "dataLength", Type: "uint32"},
{Name: "quorumBlobParams", Type: "tuple[]", Components: []abi.ArgumentMarshaling{
{Name: "quorumNumber", Type: "uint8"},
{Name: "adversaryThresholdPercentage", Type: "uint8"},
{Name: "confirmationThresholdPercentage", Type: "uint8"},
{Name: "chunkLength", Type: "uint32"},
}},
})
if err != nil {
return nil, nil, err
}

addressType, err := abi.NewType("address", "", nil)
if err != nil {
return nil, nil, err
Expand All @@ -1042,23 +994,20 @@ func (b *BatchPoster) encodeAddBatch(
// Create ABI arguments
arguments := abi.Arguments{
{Type: uint256Type},
{Type: blobVerificationProofType},
{Type: blobHeaderType},
{Type: eigenda.DACertTypeABI},
{Type: addressType},
{Type: uint256Type},
{Type: uint256Type},
{Type: uint256Type},
}

// define values array
values := make([]interface{}, 7)
values := make([]interface{}, 6)
values[0] = seqNum
values[1] = eigenDaBlobInfo.BlobVerificationProof
values[2] = eigenDaBlobInfo.BlobHeader
values[3] = b.config().gasRefunder
values[4] = new(big.Int).SetUint64(delayedMsg)
values[5] = new(big.Int).SetUint64(uint64(prevMsgNum))
values[6] = new(big.Int).SetUint64(uint64(newMsgNum))
values[1] = eigenDaBlobInfo
values[2] = b.config().gasRefunder
values[3] = new(big.Int).SetUint64(delayedMsg)
values[4] = new(big.Int).SetUint64(uint64(prevMsgNum))
values[5] = new(big.Int).SetUint64(uint64(newMsgNum))

calldata, err = arguments.PackValues(values)

Expand Down
45 changes: 42 additions & 3 deletions arbnode/sequencer_inbox.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ package arbnode
import (
"context"
"encoding/binary"
"encoding/json"
"errors"
"fmt"
"math/big"
Expand All @@ -17,6 +18,7 @@ import (
"github.com/ethereum/go-ethereum/core/types"
"github.com/offchainlabs/nitro/arbstate/daprovider"
"github.com/offchainlabs/nitro/arbutil"
"github.com/offchainlabs/nitro/eigenda"

"github.com/offchainlabs/nitro/solgen/go/bridgegen"
)
Expand All @@ -25,6 +27,7 @@ var sequencerBridgeABI *abi.ABI
var batchDeliveredID common.Hash
var addSequencerL2BatchFromOriginCallABI abi.Method
var sequencerBatchDataABI abi.Event
var addSequencerBatchFromEigenDACallABI abi.Method

const sequencerBatchDataEvent = "SequencerBatchData"

Expand All @@ -46,6 +49,7 @@ func init() {
}
batchDeliveredID = sequencerBridgeABI.Events["SequencerBatchDelivered"].ID
sequencerBatchDataABI = sequencerBridgeABI.Events[sequencerBatchDataEvent]
addSequencerBatchFromEigenDACallABI = sequencerBridgeABI.Methods["addSequencerL2BatchFromEigenDA"]
addSequencerL2BatchFromOriginCallABI = sequencerBridgeABI.Methods["addSequencerL2BatchFromOrigin0"]
}

Expand Down Expand Up @@ -174,15 +178,50 @@ func (m *SequencerInboxBatch) getSequencerData(ctx context.Context, client arbut
}

calldata := tx.Data()
data := []byte{daprovider.EigenDAMessageHeaderFlag}
data = append(data, calldata...)

return data, nil
args := make(map[string]interface{})
err = addSequencerBatchFromEigenDACallABI.Inputs.UnpackIntoMap(args, calldata[4:])
if err != nil {
return nil, err
}

certBytes, err := interfaceToBytesJSON(args["cert"])
if err != nil {
return nil, err
}

var blobInfo eigenda.EigenDABlobInfo
err = json.Unmarshal(certBytes, &blobInfo)
if err != nil {
return nil, err
}

arguments := abi.Arguments{
{Type: eigenda.DACertTypeABI},
}

b, err := arguments.Pack(blobInfo)
if err != nil {
return nil, err
}

msgData := []byte{daprovider.EigenDAMessageHeaderFlag}
msgData = append(msgData, b...)

return msgData, nil
default:
return nil, fmt.Errorf("batch has invalid data location %v", m.dataLocation)
}
}

func interfaceToBytesJSON(data interface{}) ([]byte, error) {
bytes, err := json.Marshal(data)
if err != nil {
return nil, err
}
return bytes, nil
}

func (m *SequencerInboxBatch) Serialize(ctx context.Context, client arbutil.L1Interface) ([]byte, error) {
if m.serialized != nil {
return m.serialized, nil
Expand Down
14 changes: 0 additions & 14 deletions eigenda/eigenda.go

Large diffs are not rendered by default.

Loading

0 comments on commit 80db0f0

Please sign in to comment.