Skip to content

Commit

Permalink
Chain parameters (#40)
Browse files Browse the repository at this point in the history
* wip

* wip

* added chain parameters indexer and endpoint
  • Loading branch information
Fraccaman authored Jun 4, 2024
1 parent e4c5c20 commit d3d6cda
Show file tree
Hide file tree
Showing 37 changed files with 566 additions and 36 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[workspace]
resolver = "2"

members = ["chain", "shared", "rewards", "orm", "pos", "governance", "webserver", "seeder"]
members = ["chain", "shared", "rewards", "orm", "pos", "governance", "webserver", "seeder", "parameters"]

[workspace.package]
authors = ["Heliax <[email protected]>"]
Expand Down
6 changes: 4 additions & 2 deletions chain/src/repository/revealed_pk.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use anyhow::Context;
use diesel::{PgConnection, RunQueryDsl};
use orm::{revealed_pk::RevealedPkInsertDb, schema::revealed_pk};
use shared::{id::Id, public_key::PublicKey};
use orm::revealed_pk::RevealedPkInsertDb;
use orm::schema::revealed_pk;
use shared::id::Id;
use shared::public_key::PublicKey;

pub fn insert_revealed_pks(
transaction_conn: &mut PgConnection,
Expand Down
2 changes: 2 additions & 0 deletions orm/migrations/2024-05-31-125032_chain_parameters/down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
DROP TABLE chain_parameters;
6 changes: 6 additions & 0 deletions orm/migrations/2024-05-31-125032_chain_parameters/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
CREATE TABLE chain_parameters (
epoch INT PRIMARY KEY,
unbonding_length INT NOT NULL,
pipeline_length INT NOT NULL,
epochs_per_year INT NOT NULL
);
8 changes: 3 additions & 5 deletions orm/src/bond.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
use diesel::{
associations::Associations, Identifiable, Insertable, Queryable, Selectable,
};
use diesel::associations::Associations;
use diesel::{Identifiable, Insertable, Queryable, Selectable};
use shared::bond::Bond;

use crate::validators::ValidatorDb;

use crate::schema::bonds;
use crate::validators::ValidatorDb;

#[derive(Insertable, Clone, Queryable, Selectable)]
#[diesel(table_name = bonds)]
Expand Down
1 change: 1 addition & 0 deletions orm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ pub mod epoch_crawler_state;
pub mod governance_proposal;
pub mod governance_votes;
pub mod migrations;
pub mod parameters;
pub mod pos_rewards;
pub mod revealed_pk;
pub mod schema;
Expand Down
29 changes: 29 additions & 0 deletions orm/src/parameters.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use diesel::prelude::Insertable;
use diesel::{Queryable, Selectable};
use serde::Serialize;
use shared::parameters::Parameters;

use crate::schema::chain_parameters;

#[derive(Serialize, Queryable, Selectable, Insertable, Clone)]
#[diesel(table_name = chain_parameters)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct ParametersInsertDb {
pub epoch: i32,
pub unbonding_length: i32,
pub pipeline_length: i32,
pub epochs_per_year: i32,
}

pub type ParametersDb = ParametersInsertDb;

impl From<Parameters> for ParametersInsertDb {
fn from(value: Parameters) -> Self {
Self {
epoch: value.epoch as i32,
unbonding_length: value.unbonding_length as i32,
pipeline_length: value.pipeline_length as i32,
epochs_per_year: value.epochs_per_year as i32,
}
}
}
3 changes: 2 additions & 1 deletion orm/src/revealed_pk.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use diesel::{Insertable, Queryable, Selectable};
use shared::{id::Id, public_key::PublicKey};
use shared::id::Id;
use shared::public_key::PublicKey;

use crate::schema::revealed_pk;

Expand Down
34 changes: 30 additions & 4 deletions orm/src/schema.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,35 @@
// @generated automatically by Diesel CLI.

pub mod sql_types {
#[derive(diesel::query_builder::QueryId, std::fmt::Debug, diesel::sql_types::SqlType)]
#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "governance_kind"))]
pub struct GovernanceKind;

#[derive(diesel::query_builder::QueryId, std::fmt::Debug, diesel::sql_types::SqlType)]
#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "governance_result"))]
pub struct GovernanceResult;

#[derive(diesel::query_builder::QueryId, std::fmt::Debug, diesel::sql_types::SqlType)]
#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "governance_tally_type"))]
pub struct GovernanceTallyType;

#[derive(diesel::query_builder::QueryId, std::fmt::Debug, diesel::sql_types::SqlType)]
#[derive(
diesel::query_builder::QueryId,
std::fmt::Debug,
diesel::sql_types::SqlType,
)]
#[diesel(postgres_type(name = "vote_kind"))]
pub struct VoteKind;
}
Expand Down Expand Up @@ -44,6 +60,15 @@ diesel::table! {
}
}

diesel::table! {
chain_parameters (epoch) {
epoch -> Int4,
unbonding_length -> Int4,
pipeline_length -> Int4,
epochs_per_year -> Int4,
}
}

diesel::table! {
epoch_crawler_state (id) {
id -> Int4,
Expand Down Expand Up @@ -138,6 +163,7 @@ diesel::allow_tables_to_appear_in_same_query!(
balances,
block_crawler_state,
bonds,
chain_parameters,
epoch_crawler_state,
governance_proposals,
governance_votes,
Expand Down
34 changes: 34 additions & 0 deletions parameters/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
[package]
name = "parameters"
description = "Namada chain parameters crawling."
resolver = "2"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme.workspace = true
version.workspace = true

[[bin]]
name = "pos"
path = "src/main.rs"

[dependencies]
tokio.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
clap.workspace = true
anyhow.workspace = true
namada_sdk.workspace = true
namada_core.workspace = true
namada_parameters.workspace = true
tendermint-rpc.workspace = true
shared.workspace = true
futures.workspace = true
deadpool-diesel.workspace = true
diesel.workspace = true
diesel_migrations.workspace = true
orm.workspace = true
clap-verbosity-flag.workspace = true

[build-dependencies]
vergen = { version = "8.0.0", features = ["build", "git", "gitcl"] }
24 changes: 24 additions & 0 deletions parameters/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.78-bookworm AS chef
WORKDIR /app

FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json

FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json

RUN apt-get update && apt-get install -y protobuf-compiler build-essential clang-tools-14

RUN cargo chef cook --release --recipe-path recipe.json

COPY . .
RUN cargo build --release --package parameters

FROM debian:bookworm-slim AS runtime
WORKDIR /app
COPY --from=builder /app/target/release/parameters /app/parameters

WORKDIR /app

CMD ["./parameters"]
8 changes: 8 additions & 0 deletions parameters/build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
use std::error::Error;

use vergen::EmitBuilder;

fn main() -> Result<(), Box<dyn Error>> {
EmitBuilder::builder().all_git().emit()?;
Ok(())
}
2 changes: 2 additions & 0 deletions parameters/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@

cargo run -- --tendermint-url http://127.0.0.1:27657 --database-url postgres://postgres:[email protected]:5435/namada-indexer
35 changes: 35 additions & 0 deletions parameters/src/app_state.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
use std::env;

use anyhow::Context;
use deadpool_diesel::postgres::{Object, Pool as DbPool};

#[derive(Clone)]
pub struct AppState {
db: DbPool,
}

impl AppState {
pub fn new(db_url: String) -> anyhow::Result<Self> {
let max_pool_size = env::var("DATABASE_POOL_SIZE")
.unwrap_or_else(|_| 8.to_string())
.parse::<usize>()
.unwrap_or(8_usize);
let pool_manager = deadpool_diesel::Manager::new(
db_url,
deadpool_diesel::Runtime::Tokio1,
);
let pool = DbPool::builder(pool_manager)
.max_size(max_pool_size)
.build()
.context("Failed to build Postgres db pool")?;

Ok(Self { db: pool })
}

pub async fn get_db_connection(&self) -> anyhow::Result<Object> {
self.db
.get()
.await
.context("Failed to get db connection handle from deadpool")
}
}
28 changes: 28 additions & 0 deletions parameters/src/config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
use core::fmt;
use std::fmt::Display;

use clap_verbosity_flag::{InfoLevel, Verbosity};

#[derive(clap::ValueEnum, Clone, Debug, Copy)]
pub enum CargoEnv {
Development,
Production,
}

impl Display for CargoEnv {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}

#[derive(clap::Parser)]
pub struct AppConfig {
#[clap(long, env)]
pub tendermint_url: String,

#[clap(long, env)]
pub database_url: String,

#[command(flatten)]
pub verbosity: Verbosity<InfoLevel>,
}
4 changes: 4 additions & 0 deletions parameters/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
pub mod app_state;
pub mod config;
pub mod repository;
pub mod services;
91 changes: 91 additions & 0 deletions parameters/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
use std::sync::Arc;

use anyhow::Context;
use clap::Parser;
use clap_verbosity_flag::LevelFilter;
use deadpool_diesel::postgres::Object;
use diesel::RunQueryDsl;
use orm::migrations::run_migrations;
use orm::parameters::ParametersInsertDb;
use orm::schema::chain_parameters;
use parameters::app_state::AppState;
use parameters::config::AppConfig;
use parameters::services::namada as namada_service;
use shared::crawler;
use shared::error::{AsDbError, AsRpcError, ContextDbInteractError, MainError};
use tendermint_rpc::HttpClient;
use tracing::Level;
use tracing_subscriber::FmtSubscriber;

#[tokio::main]
async fn main() -> Result<(), MainError> {
let config = AppConfig::parse();

let log_level = match config.verbosity.log_level_filter() {
LevelFilter::Off => None,
LevelFilter::Error => Some(Level::ERROR),
LevelFilter::Warn => Some(Level::WARN),
LevelFilter::Info => Some(Level::INFO),
LevelFilter::Debug => Some(Level::DEBUG),
LevelFilter::Trace => Some(Level::TRACE),
};
if let Some(log_level) = log_level {
let subscriber =
FmtSubscriber::builder().with_max_level(log_level).finish();
tracing::subscriber::set_global_default(subscriber).unwrap();
}

let client =
Arc::new(HttpClient::new(config.tendermint_url.as_str()).unwrap());

let app_state = AppState::new(config.database_url).into_db_error()?;
let conn = Arc::new(app_state.get_db_connection().await.into_db_error()?);

// Run migrations
run_migrations(&conn)
.await
.context_db_interact_error()
.into_db_error()?;

// We always start from the current epoch
let current_epoch = namada_service::get_current_epoch(&client.clone())
.await
.into_rpc_error()?;

crawler::crawl(
move |epoch| crawling_fn(epoch, conn.clone(), client.clone()),
current_epoch,
)
.await
}

async fn crawling_fn(
epoch_to_process: u32,
conn: Arc<Object>,
client: Arc<HttpClient>,
) -> Result<(), MainError> {
tracing::info!("Attempting to process epoch: {}...", epoch_to_process);

let parameters = namada_service::get_parameters(&client, epoch_to_process)
.await
.into_rpc_error()?;

conn.interact(move |conn| {
conn.build_transaction()
.read_write()
.run(|transaction_conn| {
diesel::insert_into(chain_parameters::table)
.values::<&ParametersInsertDb>(&parameters.into())
.on_conflict_do_nothing()
.execute(transaction_conn)
.context("Failed to update crawler state in db")?;

anyhow::Ok(())
})
})
.await
.context_db_interact_error()
.into_db_error()?
.context("Commit block db transaction error")
.into_db_error()
}
Loading

0 comments on commit d3d6cda

Please sign in to comment.