Skip to content
Closed

test #265

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions .github/workflows/cache.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: Clear cache

on:
workflow_dispatch:

permissions:
actions: write

jobs:
clear-cache:
runs-on: ubuntu-latest
steps:
- name: Clear cache
uses: actions/github-script@v7
with:
script: |
console.log("About to clear")
const caches = await github.rest.actions.getActionsCacheList({
owner: context.repo.owner,
repo: context.repo.repo,
})
for (const cache of caches.data.actions_caches) {
console.log(cache)
github.rest.actions.deleteActionsCacheById({
owner: context.repo.owner,
repo: context.repo.repo,
cache_id: cache.id,
})
}
console.log("Clear completed")
4 changes: 4 additions & 0 deletions orm/migrations/2025-01-27-130323_gas_estimation_ibc/down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
-- This file should undo anything in `up.sql`
ALTER TABLE gas_estimations DROP COLUMN ibc_unshielding_transfer;
ALTER TABLE gas_estimations DROP COLUMN ibc_shielding_transfer;
ALTER TABLE gas_estimations DROP COLUMN token;
11 changes: 11 additions & 0 deletions orm/migrations/2025-01-27-130323_gas_estimation_ibc/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
-- Your SQL goes here
ALTER TABLE gas_estimations ADD COLUMN ibc_unshielding_transfer INT NOT NULL DEFAULT 0;
ALTER TABLE gas_estimations ADD COLUMN ibc_shielding_transfer INT NOT NULL DEFAULT 0;

ALTER TABLE gas_estimations ADD COLUMN token VARCHAR;

UPDATE gas_estimations
SET token = (SELECT address FROM "token" WHERE token_type = 'native' LIMIT 1)
WHERE token IS NULL;

ALTER TABLE gas_estimations ALTER COLUMN token SET NOT NULL;
6 changes: 6 additions & 0 deletions orm/src/gas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ pub struct GasEstimationDb {
pub shielded_transfer: i32,
pub shielding_transfer: i32,
pub unshielding_transfer: i32,
pub ibc_unshielding_transfer: i32,
pub ibc_shielding_transfer: i32,
pub ibc_msg_transfer: i32,
pub bond: i32,
pub redelegation: i32,
Expand All @@ -52,6 +54,7 @@ pub struct GasEstimationDb {
pub reveal_pk: i32,
pub signatures: i32,
pub tx_size: i32,
pub token: String,
}

pub type GasEstimationInsertDb = GasEstimationDb;
Expand All @@ -65,6 +68,8 @@ impl From<GasEstimation> for GasEstimationInsertDb {
shielding_transfer: value.shielding_transfer as i32,
unshielding_transfer: value.unshielding_transfer as i32,
ibc_msg_transfer: value.ibc_msg_transfer as i32,
ibc_unshielding_transfer: value.ibc_unshielding_transfer as i32,
ibc_shielding_transfer: value.ibc_shielding_transfer as i32,
bond: value.bond as i32,
redelegation: value.redelegation as i32,
unbond: value.unbond as i32,
Expand All @@ -74,6 +79,7 @@ impl From<GasEstimation> for GasEstimationInsertDb {
reveal_pk: value.reveal_pk as i32,
signatures: value.signatures as i32,
tx_size: value.size as i32,
token: value.token.to_string(),
}
}
}
19 changes: 11 additions & 8 deletions orm/src/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,32 +38,32 @@ pub mod sql_types {
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "payment_kind"))]
pub struct PaymentKind;
#[diesel(postgres_type(name = "history_kind"))]
pub struct HistoryKind;

#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "payment_recurrence"))]
pub struct PaymentRecurrence;
#[diesel(postgres_type(name = "ibc_status"))]
pub struct IbcStatus;

#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "history_kind"))]
pub struct HistoryKind;
#[diesel(postgres_type(name = "payment_kind"))]
pub struct PaymentKind;

#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "ibc_status"))]
pub struct IbcStatus;
#[diesel(postgres_type(name = "payment_recurrence"))]
pub struct PaymentRecurrence;

#[derive(
diesel::query_builder::QueryId,
Expand Down Expand Up @@ -202,6 +202,9 @@ diesel::table! {
reveal_pk -> Int4,
tx_size -> Int4,
signatures -> Int4,
ibc_unshielding_transfer -> Int4,
ibc_shielding_transfer -> Int4,
token -> Varchar,
}
}

Expand Down
137 changes: 99 additions & 38 deletions shared/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use crate::header::BlockHeader;
use crate::id::Id;
use crate::proposal::{GovernanceProposal, GovernanceProposalKind};
use crate::public_key::PublicKey;
use crate::ser;
use crate::token::{IbcToken, Token};
use crate::transaction::{
InnerTransaction, Transaction, TransactionExitStatus, TransactionKind,
Expand Down Expand Up @@ -941,47 +942,61 @@ impl Block {
) -> Option<Vec<BalanceChange>> {
let change = match &tx.kind {
TransactionKind::IbcMsgTransfer(data) => {
let data = data.clone().and_then(|d| {
Self::ibc_msg_recv_packet(d.0).and_then(|msg| {
serde_json::from_slice::<PacketData>(&msg.packet.data)
.map(|p| (msg, p))
.ok()
})
});

let (msg, packet_data) = data?;
let denom = packet_data.token.denom.to_string();
Self::parse_ibc_tx_balances(data, native_token)
.unwrap_or_default()
}
TransactionKind::ShieldedTransfer(data) => {
let data = data.as_ref()?;

let ibc_trace = format!(
"{}/{}/{}",
msg.packet.port_id_on_b,
msg.packet.chan_id_on_b,
packet_data.token.denom
);
[&data.sources]
.iter()
.flat_map(|transfer_changes| {
transfer_changes.0.keys().map(|account| {
BalanceChange::new(
Id::from(account.owner.clone()),
Token::Native(Id::from(account.token.clone())),
)
})
})
.collect()
}
TransactionKind::UnshieldingTransfer(data) => {
let data = data.as_ref()?;

let trace = Id::IbcTrace(ibc_trace.clone());
let address = namada_ibc::trace::convert_to_address(ibc_trace)
.expect("Failed to convert IBC trace to address");
[&data.targets]
.iter()
.flat_map(|transfer_changes| {
transfer_changes.0.keys().map(|account| {
BalanceChange::new(
Id::from(account.owner.clone()),
Token::Native(Id::from(account.token.clone())),
)
})
})
.collect()
}
TransactionKind::MixedTransfer(data) => {
let data = data.as_ref()?;

let mut balances = vec![BalanceChange::new(
Id::Account(String::from(packet_data.receiver.as_ref())),
Token::Ibc(IbcToken {
address: Id::from(address.clone()),
trace,
}),
)];

// If the denom contains the namada native token, try to fetch
// the balance
if denom.contains(&native_token.to_string()) {
balances.push(BalanceChange::new(
Id::Account(String::from(
packet_data.receiver.as_ref(),
)),
Token::Native(native_token.clone()),
))
}
balances
[&data.sources, &data.targets]
.iter()
.flat_map(|transfer_changes| {
transfer_changes.0.keys().map(|account| {
BalanceChange::new(
Id::from(account.owner.clone()),
Token::Native(Id::from(account.token.clone())),
)
})
})
.collect()
}
TransactionKind::IbcShieldingTransfer((data, _)) => {
Self::parse_ibc_tx_balances(data, native_token)
.unwrap_or_default()
}
TransactionKind::IbcUnshieldingTransfer((data, _)) => {
Self::parse_ibc_tx_balances(data, native_token)
.unwrap_or_default()
}
TransactionKind::TransparentTransfer(data) => {
let data = data.as_ref()?;
Expand Down Expand Up @@ -1317,4 +1332,50 @@ impl Block {
})
.collect()
}

fn parse_ibc_tx_balances(
data: &Option<ser::IbcMessage<Transfer>>,
native_token: &Id,
) -> Option<Vec<BalanceChange>> {
// IbcShieldingTransfer((Option<IbcMessage<Transfer>>, TransferData)),
let data = data.clone().and_then(|d| {
Self::ibc_msg_recv_packet(d.0).and_then(|msg| {
serde_json::from_slice::<PacketData>(&msg.packet.data)
.map(|p| (msg, p))
.ok()
})
});

let (msg, packet_data) = data?;
let denom = packet_data.token.denom.to_string();

let ibc_trace = format!(
"{}/{}/{}",
msg.packet.port_id_on_b,
msg.packet.chan_id_on_b,
packet_data.token.denom
);

let trace = Id::IbcTrace(ibc_trace.clone());
let address = namada_ibc::trace::convert_to_address(ibc_trace)
.expect("Failed to convert IBC trace to address");

let mut balances = vec![BalanceChange::new(
Id::Account(String::from(packet_data.receiver.as_ref())),
Token::Ibc(IbcToken {
address: Id::from(address.clone()),
trace,
}),
)];

// If the denom contains the namada native token, try to fetch
// the balance
if denom.contains(&native_token.to_string()) {
balances.push(BalanceChange::new(
Id::Account(String::from(packet_data.receiver.as_ref())),
Token::Native(native_token.clone()),
))
}
Some(balances)
}
}
59 changes: 57 additions & 2 deletions shared/src/block_result.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use std::collections::BTreeMap;
use std::collections::{BTreeMap, HashMap};
use std::fmt;
use std::str::FromStr;

use namada_sdk::events::extend::{IndexedMaspData, MaspTxRef};
use namada_tx::data::TxResult;
use tendermint_rpc::endpoint::block_results::Response as TendermintBlockResultResponse;

Expand Down Expand Up @@ -108,14 +110,35 @@ impl BatchResults {
}
}

#[derive(Debug, Clone, Default)]
#[derive(Clone)]
pub struct TxApplied {
pub code: TxEventStatusCode,
pub gas: u64,
pub hash: Id,
pub height: u64,
pub batch: BatchResults,
pub info: String,
pub masp_refs: HashMap<u64, Vec<MaspTxRef>>,
}

impl fmt::Debug for TxApplied {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TxApplied")
.field("code", &self.code)
.field("gas", &self.gas)
.field("hash", &self.hash)
.field("height", &self.height)
.field("batch", &self.batch)
.field("info", &self.info)
.field("masp_refs_len", &self.masp_refs.len())
.finish()
}
}

#[derive(Debug, Clone)]
pub enum MaspRef {
Native(String),
Ibc(String),
}

#[derive(Debug, Clone, Default)]
Expand Down Expand Up @@ -189,6 +212,19 @@ impl TxAttributesType {
.map(|height| u64::from_str(height).unwrap())
.unwrap()
.to_owned(),
masp_refs: attributes
.get("masp_data_refs")
.map(|data| {
if let Ok(data) =
serde_json::from_str::<IndexedMaspData>(data)
{
let refs = data.masp_refs.0.to_vec();
HashMap::from_iter([(data.tx_index.0 as u64, refs)])
} else {
HashMap::default()
}
})
.unwrap_or_default(),
batch: attributes
.get("batch")
.map(|batch_result| {
Expand Down Expand Up @@ -323,4 +359,23 @@ impl BlockResult {
});
exit_status.unwrap_or(TransactionExitStatus::Rejected)
}

pub fn masp_refs(&self, wrapper_hash: &Id, index: u64) -> Vec<MaspTxRef> {
self.end_events
.iter()
.filter_map(|event| {
if let Some(TxAttributesType::TxApplied(data)) =
&event.attributes
{
Some(data.clone())
} else {
None
}
})
.find(|attributes| attributes.hash.eq(wrapper_hash))
.map(|event| {
event.masp_refs.get(&index).cloned().unwrap_or_default()
})
.unwrap_or_default()
}
}
Loading