Skip to content

Commit

Permalink
Merge pull request #57 from pshenmic/feat/scheme-hardening
Browse files Browse the repository at this point in the history
Improve database schema
  • Loading branch information
pshenmic authored Oct 3, 2023
2 parents 079467b + b73cb9e commit a3c4777
Show file tree
Hide file tree
Showing 15 changed files with 63 additions and 47 deletions.
26 changes: 13 additions & 13 deletions packages/api/src/controllers/BlockController.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ class BlockController {

getBlockByHash = async (request, response) => {
const results = await this.knex
.select('blocks.id', 'blocks.hash as hash', 'state_transitions.hash as st_hash',
'blocks.block_height as height', 'blocks.timestamp as timestamp',
.select('blocks.hash as hash', 'state_transitions.hash as st_hash',
'blocks.height as height', 'blocks.timestamp as timestamp',
'blocks.block_version as block_version', 'blocks.app_version as app_version',
'blocks.l1_locked_height as l1_locked_height', 'blocks.chain as chain')
'blocks.l1_locked_height as l1_locked_height')
.from('blocks')
.leftJoin('state_transitions', 'state_transitions.block_id', 'blocks.id')
.leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash')
.where('blocks.hash', request.params.hash);

const [block] = results
Expand All @@ -36,27 +36,27 @@ class BlockController {
const {from, to, order = 'desc'} = request.query

const subquery = this.knex('blocks')
.select('blocks.id', 'blocks.hash as hash',
'blocks.block_height as height', 'blocks.timestamp as timestamp',
.select('blocks.hash as hash',
'blocks.height as height', 'blocks.timestamp as timestamp',
'blocks.block_version as block_version', 'blocks.app_version as app_version',
'blocks.l1_locked_height as l1_locked_height', 'blocks.chain as chain').as('blocks')
'blocks.l1_locked_height as l1_locked_height').as('blocks')
.where(function () {
if (from && to) {
this.where('block_height', '>=', from)
this.where('block_height', '<=', to)
this.where('height', '>=', from)
this.where('height', '<=', to)
}
})
.limit(30)
.orderBy('id', order);
.orderBy('blocks.height', order);

const rows = await this.knex(subquery)
.select('blocks.hash as hash',
'height', 'timestamp',
'block_version', 'app_version',
'l1_locked_height', 'chain', 'state_transitions.hash as st_hash')
.leftJoin('state_transitions', 'state_transitions.block_id', 'blocks.id')
'l1_locked_height', 'state_transitions.hash as st_hash')
.leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash')
.groupBy('blocks.hash', 'height', 'blocks.timestamp', 'block_version', 'app_version',
'l1_locked_height', 'chain', 'state_transitions.hash')
'l1_locked_height', 'state_transitions.hash')
.orderBy('height', 'desc')

// map-reduce Blocks with Transactions
Expand Down
2 changes: 1 addition & 1 deletion packages/api/src/controllers/MainController.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ class MainController {
}

getStatus = async (request, response) => {
const [result] = await this.knex('blocks').max('block_height')
const [result] = await this.knex('blocks').max('height')

const {max} = result

Expand Down
8 changes: 4 additions & 4 deletions packages/api/src/controllers/TransactionController.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,17 @@ class TransactionController {

const rows = await this.knex
.select('state_transitions.hash as hash', 'state_transitions.data as data', 'state_transitions.type as type',
'state_transitions.index as index', 'blocks.block_height as block_height', 'blocks.timestamp as timestamp')
'state_transitions.index as index', 'blocks.height as block_height', 'blocks.timestamp as timestamp')
.from('state_transitions')
.leftJoin('blocks', 'blocks.id', 'state_transitions.block_id')
.leftJoin('blocks', 'blocks.hash', 'state_transitions.block_hash')
.where((builder) => {
if (from && to) {
builder.where('block_height', '<', to);
builder.where('block_height', '>', from);
}
})
.limit(30)
.orderBy('blocks.id', 'desc')
.orderBy('blocks.height', 'desc')

const transactions = rows.map((row) => Transaction.fromJSON(row))

Expand All @@ -34,7 +34,7 @@ class TransactionController {

const [row] = await this.knex('state_transitions')
.select('state_transitions.hash as hash', 'state_transitions.data as data', 'state_transitions.type as type',
'state_transitions.index as index', 'blocks.block_height as block_height', 'blocks.timestamp as timestamp')
'state_transitions.index as index', 'blocks.height as block_height', 'blocks.timestamp as timestamp')
.where('state_transitions.hash', txHash)
.leftJoin('blocks', 'blocks.id', 'state_transitions.block_id')

Expand Down
8 changes: 3 additions & 5 deletions packages/api/src/models/BlockHeader.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,17 @@ module.exports = class BlockHeader {
block_version
app_version
l1_locked_height
chain

constructor(hash, height, timestamp, blockVersion, appVersion, l1LockedHeight, chain) {
constructor(hash, height, timestamp, blockVersion, appVersion, l1LockedHeight) {
this.hash = hash;
this.height = height;
this.timestamp = timestamp;
this.blockVersion = blockVersion;
this.appVersion = appVersion;
this.l1LockedHeight = l1LockedHeight;
this.chain = chain;
}

static fromJSON({hash, height, timestamp, block_version, app_version, l1_locked_height, chain}) {
return new BlockHeader(hash, height, new Date(timestamp), block_version, app_version, l1_locked_height, chain)
static fromJSON({hash, height, timestamp, block_version, app_version, l1_locked_height}) {
return new BlockHeader(hash, height, new Date(timestamp), block_version, app_version, l1_locked_height)
}
}
2 changes: 2 additions & 0 deletions packages/indexer/migrations/V10__drop_chain_field.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ALTER TABLE blocks
DROP COLUMN chain;
13 changes: 13 additions & 0 deletions packages/indexer/migrations/V4__drop_block_id.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
ALTER TABLE state_transitions
DROP CONSTRAINT state_transitions_block_id_fkey;

ALTER TABLE state_transitions
DROP COLUMN block_id;

ALTER TABLE blocks
DROP COLUMN id;

ALTER TABLE state_transitions
ADD COLUMN block_hash char(64) NOT NULL
CONSTRAINT state_transitions_block_hash_fkey REFERENCES blocks(hash)
ON UPDATE RESTRICT ON DELETE RESTRICT;
2 changes: 2 additions & 0 deletions packages/indexer/migrations/V5__rename_block_height.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ALTER TABLE blocks
RENAME block_height TO height;
1 change: 1 addition & 0 deletions packages/indexer/migrations/V6__add_block_height_index.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
CREATE INDEX block_height ON blocks (height);
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ALTER TABLE data_contracts
ALTER COLUMN identifier TYPE varchar(44);
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
CREATE INDEX state_transition_block_hash ON state_transitions(block_hash);
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
CREATE INDEX data_contract_identifier ON data_contracts(identifier);
8 changes: 3 additions & 5 deletions packages/indexer/src/entities/block_header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,25 @@ use tokio_postgres::Row;
#[derive(Clone)]
pub struct BlockHeader {
pub hash: String,
pub block_height: i32,
pub height: i32,
pub tx_count: i32,
pub timestamp: DateTime<Utc>,
pub block_version: i32,
pub app_version: i32,
pub l1_locked_height: i32,
pub chain: String,
}

impl From<Row> for BlockHeader {
fn from(row: Row) -> Self {
// hash,block_height,timestamp,block_version,app_version,l1_locked_height,chain
let hash: String = row.get(0);
let block_height: i32 = row.get(1);
let height: i32 = row.get(1);
let timestamp:SystemTime = row.get(2);
let block_version: i32 = row.get(3);
let app_version: i32 = row.get(4);
let l1_locked_height: i32 = row.get(5);
let chain = row.get(6);

return BlockHeader { hash, block_height, tx_count: 0 , timestamp: timestamp.into(), block_version, app_version, l1_locked_height, chain};
return BlockHeader { hash, height, tx_count: 0 , timestamp: timestamp.into(), block_version, app_version, l1_locked_height };
}
}

4 changes: 1 addition & 3 deletions packages/indexer/src/indexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,18 +120,16 @@ impl Indexer {
let block_version = resp.block.header.version.block.parse::<i32>()?;
let app_version = resp.block.header.version.app.parse::<i32>()?;
let core_chain_locked_height = resp.block.header.core_chain_locked_height;
let chain = resp.block.header.chain_id;

let block = Block {
header: BlockHeader {
hash,
block_height: block_height.clone(),
height: block_height.clone(),
tx_count: txs.len() as i32,
timestamp,
block_version,
app_version,
l1_locked_height: core_chain_locked_height,
chain
},
txs
};
Expand Down
18 changes: 9 additions & 9 deletions packages/indexer/src/processor/psql/dao/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,16 @@ impl PostgresDAO {
return PostgresDAO { connection_pool };
}

pub async fn create_state_transition(&self, block_id: i32, st_type: i32, index:i32, bytes: Vec<u8>) {
pub async fn create_state_transition(&self, block_hash: String, st_type: i32, index:i32, bytes: Vec<u8>) {
let data = general_purpose::STANDARD.encode(&bytes);
let hash = digest(bytes.clone()).to_uppercase();

let query = "INSERT INTO state_transitions(hash, data, type, index, block_id) VALUES ($1, $2, $3, $4, $5);";
let query = "INSERT INTO state_transitions(hash, data, type, index, block_hash) VALUES ($1, $2, $3, $4, $5);";

let client = self.connection_pool.get().await.unwrap();
let stmt = client.prepare_cached(query).await.unwrap();

client.query(&stmt, &[&hash, &data, &st_type, &index, &block_id]).await.unwrap();
client.query(&stmt, &[&hash, &data, &st_type, &index, &block_hash]).await.unwrap();
}

pub async fn create_data_contract(&self, state_transition: DataContractCreateTransition) {
Expand All @@ -66,7 +66,7 @@ impl PostgresDAO {
pub async fn get_block_header_by_height(&self, block_height: i32) -> Result<Option<BlockHeader>, PoolError> {
let client = self.connection_pool.get().await?;

let stmt = client.prepare_cached("SELECT hash,block_height,timestamp,block_version,app_version,l1_locked_height,chain FROM blocks where block_height = $1;").await.unwrap();
let stmt = client.prepare_cached("SELECT hash,height,timestamp,block_version,app_version,l1_locked_height FROM blocks where height = $1;").await.unwrap();

let rows: Vec<Row> = client.query(&stmt, &[&block_height])
.await.unwrap();
Expand All @@ -82,16 +82,16 @@ impl PostgresDAO {
return Ok(block.cloned());
}

pub async fn create_block(&self, block_header: BlockHeader) -> i32 {
pub async fn create_block(&self, block_header: BlockHeader) -> String {
let client = self.connection_pool.get().await.unwrap();

let stmt = client.prepare_cached("INSERT INTO blocks(hash, block_height, timestamp, block_version, app_version, l1_locked_height, chain) VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING id;").await.unwrap();
let stmt = client.prepare_cached("INSERT INTO blocks(hash, height, timestamp, block_version, app_version, l1_locked_height) VALUES ($1, $2, $3, $4, $5, $6) RETURNING hash;").await.unwrap();

let rows = client.query(&stmt, &[&block_header.hash, &block_header.block_height, &SystemTime::from(block_header.timestamp), &block_header.block_version, &block_header.app_version, &block_header.l1_locked_height, &block_header.chain]).await.unwrap();
let rows = client.query(&stmt, &[&block_header.hash, &block_header.height, &SystemTime::from(block_header.timestamp), &block_header.block_version, &block_header.app_version, &block_header.l1_locked_height]).await.unwrap();

let block_id: i32 = rows[0].get(0);
let block_hash:String = rows[0].get(0);

return block_id;
return block_hash;
}
}

14 changes: 7 additions & 7 deletions packages/indexer/src/processor/psql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ impl PSQLProcessor {
return block;
}

pub async fn handle_st(&self, block_id: i32, index: i32,state_transition: StateTransition) -> () {
pub async fn handle_st(&self, block_hash: String, index: i32,state_transition: StateTransition) -> () {
let mut st_type: i32 = 999;
let mut bytes: Vec<u8> = Vec::new();

Expand Down Expand Up @@ -124,19 +124,19 @@ impl PSQLProcessor {
}
}

self.dao.create_state_transition(block_id, st_type, index, bytes).await;
self.dao.create_state_transition(block_hash, st_type, index, bytes).await;
}

pub async fn handle_block(&self, block: Block) -> Result<(), ProcessorError> {
let processed = self.dao.get_block_header_by_height(block.header.block_height.clone()).await?;
let processed = self.dao.get_block_header_by_height(block.header.height.clone()).await?;

match processed {
None => {
// TODO IMPLEMENT PSQL TRANSACTION

let block_height = block.header.block_height.clone();
let block_height = block.header.height.clone();

let block_id = self.dao.create_block(block.header).await;
let block_hash = self.dao.create_block(block.header).await;

if block.txs.len() as i32 == 0 {
println!("No platform transactions at block height {}", block_height.clone());
Expand All @@ -148,15 +148,15 @@ impl PSQLProcessor {

let state_transition = st_result.unwrap();

self.handle_st(block_id, i as i32, state_transition).await;
self.handle_st(block_hash.clone(), i as i32, state_transition).await;

println!("Processed DataContractCreate at height {}", block_height);
}

Ok(())
}
Some(st) => {
println!("Block at the height {} has been already processed", &block.header.block_height);
println!("Block at the height {} has been already processed", &block.header.height);
Ok(())
}
}
Expand Down

0 comments on commit a3c4777

Please sign in to comment.