Skip to content
This repository has been archived by the owner on Jun 11, 2024. It is now read-only.

Commit

Permalink
Fix statedb to differentiate empty value and delete (#126)
Browse files Browse the repository at this point in the history
* 🐛 Fix statedb to differentiate empty value and delete

* Apply suggestions from code review

Co-authored-by: hatef <[email protected]>

* 🐛 Fix state writer update then delete

* ♻️ Refactor test to use new interface

---------

Co-authored-by: hatef <[email protected]>
  • Loading branch information
shuse2 and hrmhatef authored Aug 3, 2023
1 parent 101a43c commit b112cb3
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 84 deletions.
43 changes: 30 additions & 13 deletions src/diff.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/// diff provides data structure to revert the state for StateDB.
use crate::batch;
use crate::codec;
use crate::types::{Cache, KVPair, KVPairCodec, NestedVec};
use crate::types::{Cache, HashKind, HashWithKind, KVPair, KVPairCodec, NestedVec};

/// Diff maintains difference between each state changes, and it is used when reverting the state.
/// When updating state to next state, it maintains:
Expand Down Expand Up @@ -78,18 +78,24 @@ impl Diff {
writer.result().to_vec()
}

/// revert_update returns cache value with original data.
/// revert_hashed_update returns cache value with original data.
/// Deleting data is represented as empty bytes.
pub fn revert_update(&self) -> Cache {
pub fn revert_hashed_update(&self) -> Cache {
let mut result = Cache::new();
for kv in self.updated.iter() {
result.insert(kv.key_as_vec(), kv.value_as_vec());
result.insert(
kv.key_as_vec().hash_with_kind(HashKind::Key),
kv.value_as_vec().hash_with_kind(HashKind::Value),
);
}
for kv in self.deleted.iter() {
result.insert(kv.key_as_vec(), kv.value_as_vec());
result.insert(
kv.key_as_vec().hash_with_kind(HashKind::Key),
kv.value_as_vec().hash_with_kind(HashKind::Value),
);
}
for key in self.created.iter() {
result.insert(key.to_vec(), vec![]);
result.insert(key.to_vec().hash_with_kind(HashKind::Key), vec![]);
}
result
}
Expand Down Expand Up @@ -149,22 +155,33 @@ mod tests {
}

#[test]
fn test_diff_revert_update() {
fn test_diff_revert_hashed_update() {
let created = vec![b"test_key".to_vec()];
let updated = vec![KVPair::new(b"test_key_updated", b"test_value_updated")];
let deleted = vec![KVPair::new(b"test_key_deleted", b"test_value_deleted")];
let diff = Diff::new(created, updated, deleted);

let cache = diff.revert_update();
let cache = diff.revert_hashed_update();

assert_eq!(cache.get(&b"test_key".to_vec()), Some(&vec![]));
assert_eq!(
cache.get(&b"test_key_updated".to_vec()),
Some(&b"test_value_updated".to_vec())
cache.get(&b"test_key".to_vec().hash_with_kind(HashKind::Key)),
Some(&vec![])
);
assert_eq!(
cache.get(&b"test_key_deleted".to_vec()),
Some(&b"test_value_deleted".to_vec())
cache.get(&b"test_key_updated".to_vec().hash_with_kind(HashKind::Key)),
Some(
&b"test_value_updated"
.to_vec()
.hash_with_kind(HashKind::Value)
)
);
assert_eq!(
cache.get(&b"test_key_deleted".to_vec().hash_with_kind(HashKind::Key)),
Some(
&b"test_value_deleted"
.to_vec()
.hash_with_kind(HashKind::Value)
)
);
}

Expand Down
55 changes: 0 additions & 55 deletions src/sparse_merkle_tree/smt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -504,19 +504,6 @@ impl UpdateData {
Self { data }
}

pub fn new_with_hash(data: Cache) -> Self {
let mut new_data = Cache::new();
for (k, v) in data {
let mut value: Vec<u8> = vec![];
if !v.is_empty() {
value = v.hash_with_kind(HashKind::Value);
}
let key = k.hash_with_kind(HashKind::Key);
new_data.insert(key, value);
}
Self { data: new_data }
}

pub fn insert(&mut self, kv: SharedKVPair) {
self.data.insert(kv.key_as_vec(), kv.value_as_vec());
}
Expand Down Expand Up @@ -2316,48 +2303,6 @@ mod tests {
assert_eq!(data.data.get(&vec![7, 8, 9]).unwrap(), &vec![10, 11, 12]);
}

#[test]
fn test_update_data_new_with_hash() {
let test_data = vec![
(
"e52d9c508c502347344d8c07ad91cbd6068afc75ff6292f062a09ca381c89e71",
vec![4, 5, 6],
vec![
8, 79, 237, 8, 185, 120, 100, 174, 10, 85, 175, 109, 20, 186, 226, 221, 213,
152, 208, 206, 28, 136, 213, 68, 247, 175, 123, 58, 165, 185, 85, 84, 232,
135, 165, 146, 179, 229,
],
vec![
120, 124, 121, 142, 57, 165, 188, 25, 16, 53, 91, 174, 109, 12, 216, 122, 54,
178, 225, 15, 208, 32, 42, 131, 227, 187, 107, 0, 93, 168, 52, 114,
],
),
(
"084fed08b978af4d7d196a7446a86b58009e636b611db16211b65a9aadff29c5",
vec![],
vec![
229, 45, 156, 80, 140, 80, 243, 18, 13, 123, 218, 140, 149, 81, 32, 198, 79,
128, 242, 66, 193, 113, 167, 120, 215, 24, 182, 43, 79, 99, 118, 214, 214,
172, 252, 124, 176, 184,
],
vec![],
),
];

let mut cache = Cache::new();
for data in test_data.iter() {
cache.insert(hex::decode(data.0).unwrap(), data.1.clone());
}

let data = UpdateData::new_with_hash(cache);
let (keys, values) = data.entries();

test_data.iter().for_each(|data| {
assert!(keys.contains(&data.2.as_slice()));
assert!(values.contains(&data.3.as_slice()));
});
}

#[test]
fn test_query_proof_with_proof() {
let pair = Arc::new(KVPair(
Expand Down
4 changes: 2 additions & 2 deletions src/state/state_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ impl StateDB {

let diff = diff::Diff::decode(&diff_bytes)
.map_err(|err| DataStoreError::Unknown(err.to_string()))?;
let data = smt::UpdateData::new_with_hash(diff.revert_update());
let data = smt::UpdateData::new_from(diff.revert_hashed_update());
let mut smt_db = smt_db::SmtDB::new(conn);
let mut tree = smt::SparseMerkleTree::new(state_root, key_length, consts::SUBTREE_HEIGHT);
let prev_root = tree
Expand Down Expand Up @@ -261,7 +261,7 @@ impl StateDB {
) -> Result<(), mpsc::SendError<DbMessage>> {
let key_length = self.options.key_length();
let w = writer.lock().unwrap();
let data = smt::UpdateData::new_with_hash(w.get_updated());
let data = smt::UpdateData::new_from(w.get_hashed_updated());
let mut smt_db = smt_db::SmtDB::new(&self.common);
let mut tree =
smt::SparseMerkleTree::new(&commit_data.prev_root, key_length, consts::SUBTREE_HEIGHT);
Expand Down
67 changes: 54 additions & 13 deletions src/state/state_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::database::options::IterationOption;
use crate::database::traits::{DatabaseKind, JsNewWithArcMutex, NewDBWithKeyLength};
use crate::database::types::{JsArcMutex, Kind as DBKind};
use crate::diff;
use crate::types::{Cache, KVPair, KeyLength, SharedKVPair, VecOption};
use crate::types::{Cache, HashKind, HashWithKind, KVPair, KeyLength, SharedKVPair, VecOption};
use crate::utils;

pub type SendableStateWriter = JsArcMutex<StateWriter>;
Expand Down Expand Up @@ -170,6 +170,7 @@ impl StateWriter {
self.cache.remove(key);
return;
}
cached.dirty = false;
cached.deleted = true;
}

Expand All @@ -192,17 +193,20 @@ impl StateWriter {
Ok(())
}

/// get_updated returns all the updated key-value pairs.
/// get_hashed_updated returns all the updated key-value pairs.
/// if the key is removed, value will be empty slice.
pub fn get_updated(&self) -> Cache {
pub fn get_hashed_updated(&self) -> Cache {
let mut result = Cache::new();
for (key, value) in self.cache.iter() {
if value.init.is_none() || value.dirty {
result.insert(key.clone(), value.value.clone());
result.insert(
key.hash_with_kind(HashKind::Key),
value.value.hash_with_kind(HashKind::Value),
);
continue;
}
if value.deleted {
result.insert(key.clone(), vec![]);
result.insert(key.hash_with_kind(HashKind::Key), vec![]);
}
}
result
Expand Down Expand Up @@ -446,17 +450,44 @@ mod tests {
#[test]
fn test_state_writer_update() {
let mut writer = StateWriter::default();
writer.cache_new(&SharedKVPair::new(&[1, 2, 3, 4], &[5, 6, 7, 8]));

writer
.update(&KVPair::new(&[1, 2, 3, 4], &[9, 10, 11, 12]))
.unwrap();
let key = &[1, 2, 3, 4, 5, 6, 7, 8];
writer.cache_new(&SharedKVPair::new(key, &[5, 6, 7, 8]));
writer.update(&KVPair::new(key, &[9, 10, 11, 12])).unwrap();

let empty_key = &[2, 2, 3, 4, 5, 6, 7, 8];
writer.cache_new(&SharedKVPair::new(empty_key, &[]));

let result = writer.get_updated();
assert_eq!(result.len(), 1);
let deleting_key = &[9, 2, 3, 4, 5, 6, 7, 8];
writer.cache_existing(&SharedKVPair::new(deleting_key, &[7, 7, 7]));
writer.delete(deleting_key);

let result = writer.get_hashed_updated();
assert_eq!(result.len(), 3);
assert_eq!(
result
.get(
&[1, 2, 3, 4, 5, 6, 7, 8]
.to_vec()
.hash_with_kind(HashKind::Key)
)
.unwrap(),
&[9, 10, 11, 12].to_vec().hash_with_kind(HashKind::Value),
"non-empty value must return hashed value",
);
assert_eq!(
result.get(&[1, 2, 3, 4].to_vec()).unwrap(),
&[9, 10, 11, 12]
result
.get(&empty_key.to_vec().hash_with_kind(HashKind::Key))
.unwrap(),
&[].to_vec().hash_with_kind(HashKind::Value),
"empty value must return hashed empty slice",
);
assert_eq!(
result
.get(&deleting_key.to_vec().hash_with_kind(HashKind::Key))
.unwrap(),
&[].to_vec(),
"deleted key must return empty slice",
);
}

Expand All @@ -474,8 +505,18 @@ mod tests {
let mut writer = StateWriter::default();
writer.cache_existing(&SharedKVPair::new(&[1, 2, 3, 4], &[5, 6, 7, 8]));

writer
.update(&KVPair::new(&[1, 2, 3, 4], &[7, 7, 7, 7]))
.unwrap();
assert!(writer.cache.get(&[1, 2, 3, 4].to_vec()).unwrap().dirty);

writer.delete(&[1, 2, 3, 4]);
let result = writer.get(&[1, 2, 3, 4]);
assert!(!writer.cache.get(&[1, 2, 3, 4].to_vec()).unwrap().dirty);
assert_eq!(
writer.cache.get(&[1, 2, 3, 4].to_vec()).unwrap().dirty,
!writer.cache.get(&[1, 2, 3, 4].to_vec()).unwrap().deleted
);
assert_eq!(result.0, &[]);
assert!(result.1);
assert!(result.2);
Expand Down
1 change: 0 additions & 1 deletion test/sparse_merkle_tree.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
*
* Removal or modification of this copyright notice is prohibited.
*/
const os = require('os');
const { SparseMerkleTree } = require('../main');
const { getRandomBytes } = require('./utils');
const { isInclusionProofForQueryKey } = require('../utils');
Expand Down
43 changes: 43 additions & 0 deletions test/statedb.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,12 @@ const crypto = require('crypto');
const { StateDB, NotFoundError } = require('../main');
const { getRandomBytes } = require('./utils');

const sha256 = val => {
const hasher = crypto.createHash('sha256');
hasher.update(val);
return hasher.digest();
};

describe('statedb', () => {
const initState = [
{
Expand Down Expand Up @@ -662,5 +668,42 @@ describe('statedb', () => {
await expect(db.calculateRoot(proof)).resolves.not.toEqual(root);
});
});

describe('delete and set empty bytes', () => {
it('should remove deleted key from SMT when key is deleted', async () => {
const currentState = await db.getCurrentState();

const writer = db.newReadWriter();
await writer.del(initState[7].key)

const nextRoot = await db.commit(writer, currentState.version + 1, currentState.root);

expect(nextRoot).not.toEqual(currentState.root);

const proof = await db.prove(nextRoot, [Buffer.concat([initState[7].key.subarray(0, 6), sha256(initState[7].key.subarray(6))])])

// deleted key should not be included. ie: non-inclusion proof
const isValidNonInclusionProof = await db.verifyNonInclusionProof(nextRoot, [Buffer.concat([initState[7].key.subarray(0, 6), sha256(initState[7].key.subarray(6))])], proof);
await expect(isValidNonInclusionProof).toBe(true);
});

it('should should not delete the key for SMT when value is empty bytes', async () => {
const currentState = await db.getCurrentState();

const writer = db.newReadWriter();
await writer.set(initState[6].key, Buffer.alloc(0));

const nextRoot = await db.commit(writer, currentState.version + 1, currentState.root);

expect(nextRoot).not.toEqual(currentState.root);

const key = [Buffer.concat([initState[6].key.subarray(0, 6), sha256(initState[6].key.subarray(6))])];
const proof = await db.prove(nextRoot, key)

// key with empty value should result in inclusion proof
const isValidInclusionProof = await db.verifyInclusionProof(nextRoot, key, proof);
await expect(isValidInclusionProof).toBe(true);
});
});
});
});

0 comments on commit b112cb3

Please sign in to comment.