Skip to content

Commit

Permalink
Merge branch 'master' into diagnose
Browse files Browse the repository at this point in the history
  • Loading branch information
joshuacolvin0 authored Oct 16, 2024
2 parents 90ee8d9 + 5c1904d commit 56ec3aa
Show file tree
Hide file tree
Showing 8 changed files with 222 additions and 80 deletions.
20 changes: 20 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,26 @@ jobs:
run: |
echo "Running redis tests" >> full.log
TEST_REDIS=redis://localhost:6379/0 gotestsum --format short-verbose -- -p 1 -run TestRedis ./arbnode/... ./system_tests/... -coverprofile=coverage-redis.txt -covermode=atomic -coverpkg=./...
- name: create block input json file
if: matrix.test-mode == 'defaults'
run: |
gotestsum --format short-verbose -- -run TestProgramStorage$ ./system_tests/... --count 1 --recordBlockInputs.WithBaseDir="${{ github.workspace }}/target" --recordBlockInputs.WithTimestampDirEnabled=false --recordBlockInputs.WithBlockIdInFileNameEnabled=false
- name: run arbitrator prover on block input json
if: matrix.test-mode == 'defaults'
run: |
make build-prover-bin
target/bin/prover target/machines/latest/machine.wavm.br -b --json-inputs="${{ github.workspace }}/target/TestProgramStorage/block_inputs.json"
- name: run jit prover on block input json
if: matrix.test-mode == 'defaults'
run: |
make build-jit
if [ -n "$(target/bin/jit --binary target/machines/latest/replay.wasm --cranelift --json-inputs='${{ github.workspace }}/target/TestProgramStorage/block_inputs.json')" ]; then
echo "Error: Command produced output."
exit 1
fi
- name: run challenge tests
if: matrix.test-mode == 'challenge'
Expand Down
120 changes: 62 additions & 58 deletions arbitrator/jit/src/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
// For license information, see https://github.com/nitro/blob/master/LICENSE

use crate::{
arbcompress, caller_env::GoRuntimeState, program, socket, stylus_backend::CothreadHandler,
wasip1_stub, wavmio, Opts,
arbcompress, caller_env::GoRuntimeState, prepare::prepare_env, program, socket,
stylus_backend::CothreadHandler, wasip1_stub, wavmio, Opts,
};
use arbutil::{Bytes32, Color, PreimageType};
use eyre::{bail, ErrReport, Result, WrapErr};
Expand Down Expand Up @@ -215,72 +215,76 @@ pub struct WasmEnv {

impl WasmEnv {
pub fn cli(opts: &Opts) -> Result<Self> {
let mut env = WasmEnv::default();
env.process.forks = opts.forks;
env.process.debug = opts.debug;
if let Some(json_inputs) = opts.json_inputs.clone() {
prepare_env(json_inputs, opts.debug)
} else {
let mut env = WasmEnv::default();
env.process.forks = opts.forks;
env.process.debug = opts.debug;

let mut inbox_position = opts.inbox_position;
let mut delayed_position = opts.delayed_inbox_position;
let mut inbox_position = opts.inbox_position;
let mut delayed_position = opts.delayed_inbox_position;

for path in &opts.inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.sequencer_messages.insert(inbox_position, msg);
inbox_position += 1;
}
for path in &opts.delayed_inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.delayed_messages.insert(delayed_position, msg);
delayed_position += 1;
}
for path in &opts.inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.sequencer_messages.insert(inbox_position, msg);
inbox_position += 1;
}
for path in &opts.delayed_inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.delayed_messages.insert(delayed_position, msg);
delayed_position += 1;
}

if let Some(path) = &opts.preimages {
let mut file = BufReader::new(File::open(path)?);
let mut preimages = Vec::new();
let filename = path.to_string_lossy();
loop {
let mut size_buf = [0u8; 8];
match file.read_exact(&mut size_buf) {
Ok(()) => {}
Err(err) if err.kind() == ErrorKind::UnexpectedEof => break,
Err(err) => bail!("Failed to parse {filename}: {}", err),
if let Some(path) = &opts.preimages {
let mut file = BufReader::new(File::open(path)?);
let mut preimages = Vec::new();
let filename = path.to_string_lossy();
loop {
let mut size_buf = [0u8; 8];
match file.read_exact(&mut size_buf) {
Ok(()) => {}
Err(err) if err.kind() == ErrorKind::UnexpectedEof => break,
Err(err) => bail!("Failed to parse {filename}: {}", err),
}
let size = u64::from_le_bytes(size_buf) as usize;
let mut buf = vec![0u8; size];
file.read_exact(&mut buf)?;
preimages.push(buf);
}
let keccak_preimages = env.preimages.entry(PreimageType::Keccak256).or_default();
for preimage in preimages {
let mut hasher = Keccak256::new();
hasher.update(&preimage);
let hash = hasher.finalize().into();
keccak_preimages.insert(hash, preimage);
}
let size = u64::from_le_bytes(size_buf) as usize;
let mut buf = vec![0u8; size];
file.read_exact(&mut buf)?;
preimages.push(buf);
}
let keccak_preimages = env.preimages.entry(PreimageType::Keccak256).or_default();
for preimage in preimages {
let mut hasher = Keccak256::new();
hasher.update(&preimage);
let hash = hasher.finalize().into();
keccak_preimages.insert(hash, preimage);
}
}

fn parse_hex(arg: &Option<String>, name: &str) -> Result<Bytes32> {
match arg {
Some(arg) => {
let mut arg = arg.as_str();
if arg.starts_with("0x") {
arg = &arg[2..];
fn parse_hex(arg: &Option<String>, name: &str) -> Result<Bytes32> {
match arg {
Some(arg) => {
let mut arg = arg.as_str();
if arg.starts_with("0x") {
arg = &arg[2..];
}
let mut bytes32 = [0u8; 32];
hex::decode_to_slice(arg, &mut bytes32)
.wrap_err_with(|| format!("failed to parse {} contents", name))?;
Ok(bytes32.into())
}
let mut bytes32 = [0u8; 32];
hex::decode_to_slice(arg, &mut bytes32)
.wrap_err_with(|| format!("failed to parse {} contents", name))?;
Ok(bytes32.into())
None => Ok(Bytes32::default()),
}
None => Ok(Bytes32::default()),
}
}

let last_block_hash = parse_hex(&opts.last_block_hash, "--last-block-hash")?;
let last_send_root = parse_hex(&opts.last_send_root, "--last-send-root")?;
env.small_globals = [opts.inbox_position, opts.position_within_message];
env.large_globals = [last_block_hash, last_send_root];
Ok(env)
let last_block_hash = parse_hex(&opts.last_block_hash, "--last-block-hash")?;
let last_send_root = parse_hex(&opts.last_send_root, "--last-send-root")?;
env.small_globals = [opts.inbox_position, opts.position_within_message];
env.large_globals = [last_block_hash, last_send_root];
Ok(env)
}
}

pub fn send_results(&mut self, error: Option<String>, memory_used: Pages) {
Expand Down
5 changes: 5 additions & 0 deletions arbitrator/jit/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use structopt::StructOpt;
mod arbcompress;
mod caller_env;
mod machine;
mod prepare;
mod program;
mod socket;
mod stylus_backend;
Expand Down Expand Up @@ -46,6 +47,10 @@ pub struct Opts {
debug: bool,
#[structopt(long)]
require_success: bool,
// JSON inputs supercede any of the command-line inputs which could
// be specified in the JSON file.
#[structopt(long)]
json_inputs: Option<PathBuf>,
}

fn main() -> Result<()> {
Expand Down
73 changes: 73 additions & 0 deletions arbitrator/jit/src/prepare.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
// Copyright 2022-2024, Offchain Labs, Inc.
// For license information, see https://github.com/nitro/blob/master/LICENSE

use crate::WasmEnv;
use arbutil::{Bytes32, PreimageType};
use eyre::Ok;
use prover::parse_input::FileData;
use std::env;
use std::fs::File;
use std::io::BufReader;
use std::path::PathBuf;

// local_target matches rawdb.LocalTarget() on the go side.
// While generating json_inputs file, one should make sure user_wasms map
// has entry for the system's arch that jit validation is being run on
pub fn local_target() -> String {
if env::consts::OS == "linux" {
match env::consts::ARCH {
"aarch64" => "arm64".to_string(),
"x86_64" => "amd64".to_string(),
_ => "host".to_string(),
}
} else {
"host".to_string()
}
}

pub fn prepare_env(json_inputs: PathBuf, debug: bool) -> eyre::Result<WasmEnv> {
let file = File::open(json_inputs)?;
let reader = BufReader::new(file);

let data = FileData::from_reader(reader)?;

let mut env = WasmEnv::default();
env.process.forks = false; // Should be set to false when using json_inputs
env.process.debug = debug;

let block_hash: [u8; 32] = data.start_state.block_hash.try_into().unwrap();
let block_hash: Bytes32 = block_hash.into();
let send_root: [u8; 32] = data.start_state.send_root.try_into().unwrap();
let send_root: Bytes32 = send_root.into();
let bytes32_vals: [Bytes32; 2] = [block_hash, send_root];
let u64_vals: [u64; 2] = [data.start_state.batch, data.start_state.pos_in_batch];
env.small_globals = u64_vals;
env.large_globals = bytes32_vals;

for batch_info in data.batch_info.iter() {
env.sequencer_messages
.insert(batch_info.number, batch_info.data_b64.clone());
}

if data.delayed_msg_nr != 0 && !data.delayed_msg_b64.is_empty() {
env.delayed_messages
.insert(data.delayed_msg_nr, data.delayed_msg_b64.clone());
}

for (ty, inner_map) in data.preimages_b64 {
let preimage_ty = PreimageType::try_from(ty as u8)?;
let map = env.preimages.entry(preimage_ty).or_default();
for (hash, preimage) in inner_map {
map.insert(hash, preimage);
}
}

if let Some(user_wasms) = data.user_wasms.get(&local_target()) {
for (module_hash, module_asm) in user_wasms.iter() {
env.module_asms
.insert(*module_hash, module_asm.as_vec().into());
}
}

Ok(env)
}
4 changes: 2 additions & 2 deletions staker/stateless_block_validator.go
Original file line number Diff line number Diff line change
Expand Up @@ -511,12 +511,12 @@ func (v *StatelessBlockValidator) ValidateResult(
return true, &entry.End, nil
}

func (v *StatelessBlockValidator) ValidationInputsAt(ctx context.Context, pos arbutil.MessageIndex, target ethdb.WasmTarget) (server_api.InputJSON, error) {
func (v *StatelessBlockValidator) ValidationInputsAt(ctx context.Context, pos arbutil.MessageIndex, targets ...ethdb.WasmTarget) (server_api.InputJSON, error) {
entry, err := v.CreateReadyValidationEntry(ctx, pos)
if err != nil {
return server_api.InputJSON{}, err
}
input, err := entry.ToInput([]ethdb.WasmTarget{target})
input, err := entry.ToInput(targets)
if err != nil {
return server_api.InputJSON{}, err
}
Expand Down
30 changes: 27 additions & 3 deletions system_tests/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"encoding/binary"
"encoding/hex"
"encoding/json"
"flag"
"io"
"math/big"
"net"
Expand Down Expand Up @@ -1716,11 +1717,23 @@ func logParser[T any](t *testing.T, source string, name string) func(*types.Log)
}
}

var (
recordBlockInputsEnable = flag.Bool("recordBlockInputs.enable", true, "Whether to record block inputs as a json file")
recordBlockInputsWithSlug = flag.String("recordBlockInputs.WithSlug", "", "Slug directory for validationInputsWriter")
recordBlockInputsWithBaseDir = flag.String("recordBlockInputs.WithBaseDir", "", "Base directory for validationInputsWriter")
recordBlockInputsWithTimestampDirEnabled = flag.Bool("recordBlockInputs.WithTimestampDirEnabled", true, "Whether to add timestamp directory while recording block inputs")
recordBlockInputsWithBlockIdInFileNameEnabled = flag.Bool("recordBlockInputs.WithBlockIdInFileNameEnabled", true, "Whether to record block inputs using test specific block_id")
)

// recordBlock writes a json file with all of the data needed to validate a block.
//
// This can be used as an input to the arbitrator prover to validate a block.
func recordBlock(t *testing.T, block uint64, builder *NodeBuilder) {
func recordBlock(t *testing.T, block uint64, builder *NodeBuilder, targets ...ethdb.WasmTarget) {
t.Helper()
flag.Parse()
if !*recordBlockInputsEnable {
return
}
ctx := builder.ctx
inboxPos := arbutil.MessageIndex(block)
for {
Expand All @@ -1733,9 +1746,20 @@ func recordBlock(t *testing.T, block uint64, builder *NodeBuilder) {
break
}
}
validationInputsWriter, err := inputs.NewWriter(inputs.WithSlug(t.Name()))
var options []inputs.WriterOption
options = append(options, inputs.WithTimestampDirEnabled(*recordBlockInputsWithTimestampDirEnabled))
options = append(options, inputs.WithBlockIdInFileNameEnabled(*recordBlockInputsWithBlockIdInFileNameEnabled))
if *recordBlockInputsWithBaseDir != "" {
options = append(options, inputs.WithBaseDir(*recordBlockInputsWithBaseDir))
}
if *recordBlockInputsWithSlug != "" {
options = append(options, inputs.WithSlug(*recordBlockInputsWithSlug))
} else {
options = append(options, inputs.WithSlug(t.Name()))
}
validationInputsWriter, err := inputs.NewWriter(options...)
Require(t, err)
inputJson, err := builder.L2.ConsensusNode.StatelessBlockValidator.ValidationInputsAt(ctx, inboxPos, rawdb.TargetWavm)
inputJson, err := builder.L2.ConsensusNode.StatelessBlockValidator.ValidationInputsAt(ctx, inboxPos, targets...)
if err != nil {
Fatal(t, "failed to get validation inputs", block, err)
}
Expand Down
6 changes: 3 additions & 3 deletions system_tests/program_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -423,9 +423,9 @@ func storageTest(t *testing.T, jit bool) {

validateBlocks(t, 2, jit, builder)

// Captures a block_input_<id>.json file for the block that included the
// storage write transaction.
recordBlock(t, receipt.BlockNumber.Uint64(), builder)
// Captures a block_inputs json file for the block that included the
// storage write transaction. Include wasm targets necessary for arbitrator prover and jit binaries
recordBlock(t, receipt.BlockNumber.Uint64(), builder, rawdb.TargetWavm, rawdb.LocalTarget())
}

func TestProgramTransientStorage(t *testing.T) {
Expand Down
Loading

0 comments on commit 56ec3aa

Please sign in to comment.