From c4055739d43a586acd22950757de13cd267091a2 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 26 Jun 2021 17:42:56 -0300 Subject: [PATCH 01/39] make stake pool optional, score-all command --- .gitignore | 1 + Cargo.lock | 4 +++- bot/src/main.rs | 55 ++++++++++++++++++++++++++++--------------------- 3 files changed, 36 insertions(+), 24 deletions(-) diff --git a/.gitignore b/.gitignore index fd552c93..b988c1dd 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ /registry-cli /db test-ledger/ +.vscode diff --git a/Cargo.lock b/Cargo.lock index 8d9b3096..97ba2c54 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,5 +1,7 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "Inflector" version = "0.11.4" @@ -3963,7 +3965,7 @@ dependencies = [ [[package]] name = "solana-foundation-delegation-program-cli" -version = "1.0.3" +version = "1.0.4" dependencies = [ "clap", "solana-account-decoder", diff --git a/bot/src/main.rs b/bot/src/main.rs index 642423ac..4b5a0db3 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -195,6 +195,9 @@ struct Config { dry_run: bool, + /// compute score foll all validators in the cluster + score_all: bool, + /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch quality_block_producer_percentage: usize, @@ -315,7 +318,7 @@ fn app_version() -> String { }) } -fn get_config() -> BoxResult<(Config, RpcClient, Box)> { +fn get_config() -> BoxResult<(Config, RpcClient, Option>)> { let default_confirmed_block_cache_path = default_confirmed_block_cache_path() .to_str() .unwrap() @@ -594,9 +597,13 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { .validator(is_amount) ) ) + .subcommand( + SubCommand::with_name("score-all").about("Score all validators in the cluster") + ) .get_matches(); let dry_run = !matches.is_present("confirm"); + let score_all = !matches.is_present("score-all"); let cluster = match value_t_or_exit!(matches, "cluster", String).as_str() { "mainnet-beta" => Cluster::MainnetBeta, "testnet" => Cluster::Testnet, @@ -631,7 +638,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { Cluster::MainnetBeta => value_t!(matches, "json_rpc_url", String) .unwrap_or_else(|_| "http://api.mainnet-beta.solana.com".into()), Cluster::Testnet => value_t!(matches, "json_rpc_url", String) - .unwrap_or_else(|_| "http://testnet.solana.com".into()), + .unwrap_or_else(|_| "http://api.testnet.solana.com".into()), }; let db_path = value_t_or_exit!(matches, "db_path", PathBuf); let markdown_path = if matches.is_present("markdown") { @@ -662,6 +669,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { db_path, markdown_path, dry_run, + score_all, quality_block_producer_percentage, max_poor_block_producer_percentage, max_commission, @@ -689,7 +697,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { .get_health() .map_err(|err| format!("RPC endpoint is unhealthy: {:?}", err))?; - let stake_pool: Box = match matches.subcommand() { + let stake_pool: Option> = match matches.subcommand() { ("stake-pool-v0", Some(matches)) => { let authorized_staker = keypair_of(&matches, "authorized_staker").unwrap(); let reserve_stake_address = pubkey_of(&matches, "reserve_stake_address").unwrap(); @@ -697,27 +705,27 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { sol_to_lamports(value_t_or_exit!(matches, "min_reserve_stake_balance", f64)); let baseline_stake_amount = sol_to_lamports(value_t_or_exit!(matches, "baseline_stake_amount", f64)); - Box::new(stake_pool_v0::new( + Some(Box::new(stake_pool_v0::new( &rpc_client, authorized_staker, baseline_stake_amount, reserve_stake_address, min_reserve_stake_balance, - )?) + )?)) } ("stake-pool", Some(matches)) => { let authorized_staker = keypair_of(&matches, "authorized_staker").unwrap(); let pool_address = pubkey_of(&matches, "pool_address").unwrap(); let baseline_stake_amount = sol_to_lamports(value_t_or_exit!(matches, "baseline_stake_amount", f64)); - Box::new(stake_pool::new( + Some(Box::new(stake_pool::new( &rpc_client, authorized_staker, pool_address, baseline_stake_amount, - )?) + )?)) } - _ => unreachable!(), + _ => None, }; Ok((config, rpc_client, stake_pool)) @@ -1444,7 +1452,7 @@ fn classify( fn main() -> BoxResult<()> { solana_logger::setup_with_default("solana=info"); - let (config, rpc_client, mut stake_pool) = get_config()?; + let (config, rpc_client, optional_stake_pool) = get_config()?; info!("Loading participants..."); let participants = get_participants_with_state( @@ -1578,21 +1586,22 @@ fn main() -> BoxResult<()> { }) .collect(); - let (stake_pool_notes, validator_stake_actions, unfunded_validators) = - stake_pool.apply(&rpc_client, config.dry_run, &desired_validator_stake)?; - notifications.extend(stake_pool_notes.clone()); - epoch_classification.notes.extend(stake_pool_notes); - - for identity in unfunded_validators { - validator_classifications - .entry(identity) - .and_modify(|e| e.prioritize_funding_in_next_epoch = Some(true)); - } + if let Some(mut stake_pool) = optional_stake_pool { + let (stake_pool_notes, validator_stake_actions, unfunded_validators) = + stake_pool.apply(&rpc_client, config.dry_run, &desired_validator_stake)?; + notifications.extend(stake_pool_notes.clone()); + epoch_classification.notes.extend(stake_pool_notes); + for identity in unfunded_validators { + validator_classifications + .entry(identity) + .and_modify(|e| e.prioritize_funding_in_next_epoch = Some(true)); + } - for (identity, stake_action) in validator_stake_actions { - validator_classifications - .entry(identity) - .and_modify(|e| e.stake_action = Some(stake_action)); + for (identity, stake_action) in validator_stake_actions { + validator_classifications + .entry(identity) + .and_modify(|e| e.stake_action = Some(stake_action)); + } } validator_notes.sort(); From fc010711041c40fb3e4fc6b7a9c5dc4b7f11a605 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 27 Jun 2021 07:28:07 -0300 Subject: [PATCH 02/39] generate validator-detail.csv --- bot/src/db.rs | 6 ++++++ bot/src/main.rs | 34 +++++++++++++++++++++++++++++++++- 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index 3d4d75a8..0e3edffb 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -22,6 +22,12 @@ pub struct ValidatorClassification { pub stake_state: ValidatorStakeState, pub stake_state_reason: String, + /// computed score (more granular than ValidatorStakeState) + pub score: u32, + pub commission: u8, + pub active_stake: u64, + pub epoch_credits: u64, + // Summary of the action was taken this epoch to advance the validator's stake pub stake_action: Option, diff --git a/bot/src/main.rs b/bot/src/main.rs index 4b5a0db3..a29059d2 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1416,12 +1416,18 @@ fn classify( .unwrap_or_default(); stake_states.insert(0, (stake_state, reason.clone())); + let score: u32 = 0; + validator_classifications.insert( identity, ValidatorClassification { identity, vote_address, stake_state, + score, + commission, + epoch_credits, + active_stake, stake_states: Some(stake_states), stake_action: None, stake_state_reason: reason, @@ -1613,7 +1619,6 @@ fn main() -> BoxResult<()> { if first_time { EpochClassification::new(epoch_classification).save(epoch, &config.cluster_db_path())?; - generate_markdown(epoch, &config)?; // Only notify the user if this is the first run for this epoch for notification in notifications { @@ -1622,6 +1627,9 @@ fn main() -> BoxResult<()> { } } + //conditional to: matches.is_present("markdown") + generate_markdown(epoch, &config)?; + Ok(()) } @@ -1701,10 +1709,18 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { + let mut validator_detail_csv = vec![]; + validator_detail_csv.push("identity, stake_state, stake_state_reason, score, commission, active_stake, epoch_credits".into()); + let mut validator_classifications = validator_classifications.iter().collect::>(); validator_classifications.sort_by(|a, b| a.0.cmp(&b.0)); for (identity, classification) in validator_classifications { + let mut csv = vec![ + identity.to_string(), + format!("\"{:?}\"", classification.stake_state), + ]; + let validator_markdown = validators_markdown.entry(identity).or_default(); validator_markdown.push(format!( @@ -1725,6 +1741,15 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { "* Stake reason: {}", classification.stake_state_reason )); + csv.push(format!( + r#""{}",{},{},{},{}"#, + classification.stake_state_reason, + classification.score, + classification.commission, + classification.active_stake, + classification.epoch_credits + )); + if let Some(ref stake_action) = classification.stake_action { validator_markdown.push(format!("* Staking activity: {}", stake_action)); } @@ -1758,7 +1783,14 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { for note in &classification.notes { validator_markdown.push(format!("* {}", note)); } + + validator_detail_csv.push(csv.join(",")); } + // save validator-detail.csv + let filename = config.cluster_db_path().join("validator-detail.csv"); + info!("Writing {}", filename.display()); + let mut file = File::create(filename)?; + file.write_all(&validator_detail_csv.join("\n").into_bytes())?; } } From 3e0b4057b4c33d87f5f0e5233770a34677002390 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 27 Jun 2021 13:49:36 -0300 Subject: [PATCH 03/39] skip heavy rpc-call, ensure all validators scored --- bot/src/main.rs | 58 ++++++++++++++++++++++++++++++++----------------- sql/fixes.sql | 4 ++++ sql/import.sql | 24 ++++++++++++++++++++ sql/queries.sql | 45 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 111 insertions(+), 20 deletions(-) create mode 100644 sql/fixes.sql create mode 100644 sql/import.sql create mode 100644 sql/queries.sql diff --git a/bot/src/main.rs b/bot/src/main.rs index a29059d2..8f37ecef 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -602,8 +602,8 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option Cluster::MainnetBeta, "testnet" => Cluster::Testnet, @@ -944,6 +944,7 @@ fn get_self_stake_by_vote_account( info!("Fetching stake accounts..."); let all_stake_accounts = rpc_client.get_program_accounts(&solana_stake_program::id())?; + info!("{} stake accounts", all_stake_accounts.len()); let stake_history_account = rpc_client .get_account_with_commitment(&sysvar::stake_history::id(), CommitmentConfig::finalized())? @@ -1077,9 +1078,19 @@ fn classify( .collect::>(); let (vote_account_info, total_active_stake) = get_vote_account_info(&rpc_client, last_epoch)?; + info!( + "validators:{} total_active_stake:{}", + vote_account_info.len(), + total_active_stake + ); - let self_stake_by_vote_account = - get_self_stake_by_vote_account(rpc_client, epoch, &vote_account_info)?; + // Note: get_self_stake_by_vote_account is expensive because it does a RPC call for each validator + // we skip this data gathering if config.min_self_stake_lamports==0 + let self_stake_by_vote_account = if config.min_self_stake_lamports > 0 { + get_self_stake_by_vote_account(rpc_client, epoch, &vote_account_info)? + } else { + HashMap::new() + }; let (cluster_nodes_with_old_version, min_release_version): (HashMap, _) = match config.min_release_version { @@ -1089,7 +1100,7 @@ fn classify( .into_iter() .filter_map(|rpc_contact_info| { if let Ok(identity) = Pubkey::from_str(&rpc_contact_info.pubkey) { - if validator_list.contains(&identity) { + if config.score_all || validator_list.contains(&identity) { if let Some(ref version) = rpc_contact_info.version { if let Ok(semver) = semver::Version::parse(version) { if semver < *min_release_version { @@ -1214,6 +1225,7 @@ fn classify( None } else { let mut validator_classifications = HashMap::new(); + let mut total_skipped: u32 = 0; for VoteAccountInfo { identity, @@ -1223,7 +1235,8 @@ fn classify( epoch_credits, } in vote_account_info { - if !validator_list.contains(&identity) { + if !config.score_all && !validator_list.contains(&identity) { + total_skipped += 1; continue; } @@ -1281,7 +1294,10 @@ fn classify( let insufficent_self_stake_msg = format!("insufficient self stake: {}", Sol(self_stake)); - if !config.enforce_min_self_stake && self_stake < config.min_self_stake_lamports { + if config.min_self_stake_lamports > 0 + && !config.enforce_min_self_stake + && self_stake < config.min_self_stake_lamports + { validator_notes.push(insufficent_self_stake_msg.clone()); } @@ -1355,7 +1371,7 @@ fn classify( ) } else { assert!(!poor_voters.contains(&identity)); - assert!(not_in_leader_schedule.contains(&identity)); + assert!(config.score_all || not_in_leader_schedule.contains(&identity)); ( // If the validator is not in the leader schedule but was Bonus previously, // maintain Bonus. @@ -1443,6 +1459,11 @@ fn classify( "{} validators processed", validator_classifications.len() )); + info!( + "{} validators, {} skipped", + &validator_classifications.len(), + total_skipped + ); Some(validator_classifications) }; @@ -1710,17 +1731,12 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { let mut validator_detail_csv = vec![]; - validator_detail_csv.push("identity, stake_state, stake_state_reason, score, commission, active_stake, epoch_credits".into()); + validator_detail_csv.push("identity,score,commission,active_stake,epoch_credits,stake_state,stake_state_reason".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); validator_classifications.sort_by(|a, b| a.0.cmp(&b.0)); for (identity, classification) in validator_classifications { - let mut csv = vec![ - identity.to_string(), - format!("\"{:?}\"", classification.stake_state), - ]; - let validator_markdown = validators_markdown.entry(identity).or_default(); validator_markdown.push(format!( @@ -1741,14 +1757,18 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { "* Stake reason: {}", classification.stake_state_reason )); - csv.push(format!( - r#""{}",{},{},{},{}"#, - classification.stake_state_reason, + + let csv_line = format!( + r#""{}",{},{},{},{},"{:?}","{}""#, + identity.to_string(), classification.score, classification.commission, classification.active_stake, - classification.epoch_credits - )); + classification.epoch_credits, + classification.stake_state, + classification.stake_state_reason, + ); + validator_detail_csv.push(csv_line); if let Some(ref stake_action) = classification.stake_action { validator_markdown.push(format!("* Staking activity: {}", stake_action)); @@ -1783,8 +1803,6 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { for note in &classification.notes { validator_markdown.push(format!("* {}", note)); } - - validator_detail_csv.push(csv.join(",")); } // save validator-detail.csv let filename = config.cluster_db_path().join("validator-detail.csv"); diff --git a/sql/fixes.sql b/sql/fixes.sql new file mode 100644 index 00000000..301a7342 --- /dev/null +++ b/sql/fixes.sql @@ -0,0 +1,4 @@ +-- SQLite +--drop table mainnet; +--delete FROM mainnet where identity='identity'; +--select sum(active_stake)/1e9 from mainnet where active_stake is not null; diff --git a/sql/import.sql b/sql/import.sql new file mode 100644 index 00000000..323d6f05 --- /dev/null +++ b/sql/import.sql @@ -0,0 +1,24 @@ +.open ./db/data-mainnet-beta/sqlite3.db +CREATE TABLE IF NOT EXISTS mainnet( + identity TEXT, + score INTEGER, + commission SHORT, + active_stake INTEGER, + epoch_credits INTEGER, + stake_state TEXT, + stake_state_reason TEXT +); +delete from mainnet; +.mode csv +.import ./db/data-mainnet-beta/validator-detail.csv mainnet +--remove header row +delete FROM mainnet where identity='identity'; +--show total stake +select 'validators',count(*),'total staked',sum(active_stake)/1e9 from mainnet; +select 'avg epoch_credits',avg(epoch_credits) from mainnet; +select 'below half avg epoch_credits',count(*), + "stake",sum(active_stake)/1e9 + where epoch_credits < (select avg(epoch_credits)/2 from mainnet) + from mainnet; +.exit + diff --git a/sql/queries.sql b/sql/queries.sql new file mode 100644 index 00000000..813a1f82 --- /dev/null +++ b/sql/queries.sql @@ -0,0 +1,45 @@ +-- SQLite3 +/*SELECT identity, ` stake_state`, ` score`, ` commission`, ` active_stake`, ` epoch_credits` +, ` stake_state_reason` +FROM mainnet +order by ` epoch_credits` desc; +*/ +--.schema data + +/*CREATE TABLE mainnet( + identity TEXT, + score INTEGER, + commission SHORT, + active_stake INTEGER, + epoch_credits INTEGER, + stake_state TEXT, + stake_state_reason TEXT +) +*/ +--insert into data +--select * from mainnet +/*select identity, + score, + commission, + active_stake/1e9, + epoch_credits + --max(epoch_credits), + --avg(epoch_credits) + from mainnet +order by active_stake desc; +*/ + +--select sum(active_stake) from mainnet where active_stake is not null; +/* +select 'below half avg epoch_credits',count(*), + "stake",sum(active_stake)/1e9 + from mainnet + where epoch_credits < (select avg(epoch_credits)*0.50 from mainnet) +*/ + +-- if epoch_credits < 60% of max epoch_credits, discard +select * + from mainnet + where epoch_credits < (select max(epoch_credits)*0.60 from mainnet) + order by epoch_credits desc + \ No newline at end of file From f56159cd214c156fec3796e296cd8a6ba07070f0 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 27 Jun 2021 18:26:20 -0300 Subject: [PATCH 04/39] compute score --- bot/src/db.rs | 48 ++++++++++++++++++++++++-- bot/src/main.rs | 79 ++++++++++++++++++++++++++++++++++++------- import-into-sqlite.sh | 1 + sql/import.sql | 19 +++++++---- sql/queries.sql | 48 ++------------------------ sql/test-queries.sql | 51 ++++++++++++++++++++++++++++ 6 files changed, 181 insertions(+), 65 deletions(-) create mode 100644 import-into-sqlite.sh create mode 100644 sql/test-queries.sql diff --git a/bot/src/db.rs b/bot/src/db.rs index 0e3edffb..b53544de 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -14,6 +14,13 @@ use { }, }; +#[derive(Default, Clone, Deserialize, Serialize)] +pub struct ScoreDiscounts { + pub low_credits: bool, + pub insufficient_self_stake: bool, + pub can_halt_the_network_group: bool, +} + #[derive(Default, Clone, Deserialize, Serialize)] pub struct ValidatorClassification { pub identity: Pubkey, // Validator identity @@ -23,10 +30,11 @@ pub struct ValidatorClassification { pub stake_state_reason: String, /// computed score (more granular than ValidatorStakeState) - pub score: u32, + pub epoch_credits: u64, // epoch_credits is the base score + pub score_discounts: ScoreDiscounts, pub commission: u8, pub active_stake: u64, - pub epoch_credits: u64, + pub data_center_concentration: f64, // Summary of the action was taken this epoch to advance the validator's stake pub stake_action: Option, @@ -52,6 +60,42 @@ pub struct ValidatorClassification { } impl ValidatorClassification { + pub fn score(&self) -> u64 { + if self.score_discounts.can_halt_the_network_group + || self.score_discounts.insufficient_self_stake + || self.score_discounts.low_credits + { + 0 + } else { + // if data_center_concentration = 100%, lose all score, + // data_center_concentration = 30%, lose 30% (rounded) + let discount_because_data_center_concentration = + self.epoch_credits * (self.data_center_concentration as u64) / 100; + + // score discounts according to commission + const SCORE_MIN_COMMISSION: u8 = 2; + const SCORE_MAX_COMMISSION: u8 = 12; + const SCORE_DISCOUNT_PER_COMMISSION_POINT: u32 = 10_000; + let discount_because_commission = if self.commission < SCORE_MIN_COMMISSION + || self.commission > SCORE_MAX_COMMISSION + { + // we discourage 0% & 1% commission, because we don't want to incentivize a race-to-the-bottom + // where we promote validators subsidizing/working below cost/"dumping" + // we also discard validators with commission > SCORE_MAX_COMMISSION + self.epoch_credits // discount all + } else { + // discount according to commission + (SCORE_DISCOUNT_PER_COMMISSION_POINT + * (self.commission - SCORE_MIN_COMMISSION) as u32) as u64 + }; + + //result + self.epoch_credits + .saturating_sub(discount_because_commission) + .saturating_sub(discount_because_data_center_concentration) + } + } + pub fn stake_state_streak(&self) -> usize { let mut streak = 1; diff --git a/bot/src/main.rs b/bot/src/main.rs index 50a1634f..abc9528b 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -274,6 +274,7 @@ impl Config { db_path: PathBuf::default(), markdown_path: None, dry_run: true, + score_all: false, quality_block_producer_percentage: 15, max_poor_block_producer_percentage: 20, max_commission: 100, @@ -1077,11 +1078,31 @@ fn classify( .flat_map(|(v, sp)| v.into_iter().map(move |v| (v, sp))) .collect::>(); - let (vote_account_info, total_active_stake) = get_vote_account_info(&rpc_client, last_epoch)?; + let (mut vote_account_info, total_active_stake) = + get_vote_account_info(&rpc_client, last_epoch)?; + + // compute cumulative_stake_limit => active_stake of the last validator inside the can-halt-the-network group + // we later set score=0 to all validators whose stake >= concentrated_validators_stake_limit + // sort by active_stake + vote_account_info.sort_by(|a, b| a.active_stake.cmp(&b.active_stake)); + let mut accumulated: u64 = 0; + let mut count_halt_group: u32 = 0; + let limit: u64 = total_active_stake / 100 * 34; + let mut concentrated_validators_stake_limit = limit; + for info in &vote_account_info { + accumulated += info.active_stake; + count_halt_group += 1; + if accumulated > limit { + concentrated_validators_stake_limit = info.active_stake; + break; + } + } info!( - "validators:{} total_active_stake:{}", - vote_account_info.len(), - total_active_stake + "validators:{} total_active_stake:{}, can_halt_the_network:top {}, pro-decentralization-stake-limit: less than {}", + &vote_account_info.len(), + total_active_stake, + count_halt_group, + lamports_to_sol(concentrated_validators_stake_limit), ); // Note: get_self_stake_by_vote_account is expensive because it does a RPC call for each validator @@ -1240,6 +1261,21 @@ fn classify( continue; } + /* -- ------------------ + -- heuristic data, epoch 196 + -- ------------------ + select max(epoch_credits), min(epoch_credits) + from mainnet + where epoch_credits > (select max(epoch_credits)*0.50 from mainnet) + order by epoch_credits desc; + --max(epoch_credits),min(epoch_credits) + --242503,134403 + --so delta max-min epoch_credits ~= 100k + */ + // we start score with epoch_credits + // let mut score = epoch_credits; + let mut score_discounts = db::ScoreDiscounts::default(); + let participant = identity_to_participant.get(&identity).cloned(); let current_data_center = data_centers @@ -1248,6 +1284,13 @@ fn classify( .cloned() .unwrap_or_default(); + // score: check data center concentration + let data_center_info = data_centers + .info + .iter() + .find(|x| x.id == current_data_center) + .unwrap(); + let previous_classification = previous_epoch_validator_classifications .map(|p| p.get(&identity)) .flatten(); @@ -1299,6 +1342,7 @@ fn classify( && self_stake < config.min_self_stake_lamports { validator_notes.push(insufficent_self_stake_msg.clone()); + score_discounts.insufficient_self_stake = true; //discount all } let insufficent_testnet_participation = testnet_participation @@ -1318,6 +1362,13 @@ fn classify( }) .flatten(); + // no score if below 50% from avg credits + score_discounts.low_credits = epoch_credits < min_epoch_credits; + + // no score if in the can-halt-the-network group + score_discounts.can_halt_the_network_group = + active_stake >= concentrated_validators_stake_limit; + let (stake_state, reason) = if let Some(reason) = infrastructure_concentration_destake_reason { @@ -1433,18 +1484,17 @@ fn classify( .unwrap_or_default(); stake_states.insert(0, (stake_state, reason.clone())); - let score: u32 = 0; - validator_classifications.insert( identity, ValidatorClassification { identity, vote_address, stake_state, - score, - commission, epoch_credits, + score_discounts, + commission, active_stake, + data_center_concentration: data_center_info.stake_percent, stake_states: Some(stake_states), stake_action: None, stake_state_reason: reason, @@ -1732,7 +1782,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { let mut validator_detail_csv = vec![]; - validator_detail_csv.push("identity,score,commission,active_stake,epoch_credits,stake_state,stake_state_reason".into()); + validator_detail_csv.push("identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); @@ -1759,13 +1809,18 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { classification.stake_state_reason )); + //identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason let csv_line = format!( - r#""{}",{},{},{},{},"{:?}","{}""#, + r#""{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, identity.to_string(), - classification.score, + classification.score(), classification.commission, - classification.active_stake, + lamports_to_sol(classification.active_stake), classification.epoch_credits, + classification.data_center_concentration, + classification.score_discounts.can_halt_the_network_group, + classification.score_discounts.low_credits, + classification.score_discounts.insufficient_self_stake, classification.stake_state, classification.stake_state_reason, ); diff --git a/import-into-sqlite.sh b/import-into-sqlite.sh new file mode 100644 index 00000000..c831bc58 --- /dev/null +++ b/import-into-sqlite.sh @@ -0,0 +1 @@ +sqlite3 <./sql/import.sql \ No newline at end of file diff --git a/sql/import.sql b/sql/import.sql index 323d6f05..691411b4 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -1,24 +1,31 @@ .open ./db/data-mainnet-beta/sqlite3.db -CREATE TABLE IF NOT EXISTS mainnet( +DROP TABLE IF EXISTS mainnet; +CREATE TABLE mainnet( identity TEXT, score INTEGER, commission SHORT, active_stake INTEGER, epoch_credits INTEGER, + data_center_concentration DOUBLE, + can_halt_the_network_group BOOL, + low_credits BOOL, + insufficient_self_stake BOOL, stake_state TEXT, stake_state_reason TEXT ); -delete from mainnet; .mode csv .import ./db/data-mainnet-beta/validator-detail.csv mainnet --remove header row delete FROM mainnet where identity='identity'; ---show total stake -select 'validators',count(*),'total staked',sum(active_stake)/1e9 from mainnet; +--add pct column +ALTER table mainnet add pct FLOAT; +UPDATE mainnet set pct = round(score * 100.0 / (select sum(score) from mainnet),4); +--control, show total staked +select 'validators',count(*),'total staked',sum(active_stake) from mainnet; select 'avg epoch_credits',avg(epoch_credits) from mainnet; select 'below half avg epoch_credits',count(*), "stake",sum(active_stake)/1e9 + from mainnet where epoch_credits < (select avg(epoch_credits)/2 from mainnet) - from mainnet; + ; .exit - diff --git a/sql/queries.sql b/sql/queries.sql index 813a1f82..2f441696 100644 --- a/sql/queries.sql +++ b/sql/queries.sql @@ -1,45 +1,3 @@ --- SQLite3 -/*SELECT identity, ` stake_state`, ` score`, ` commission`, ` active_stake`, ` epoch_credits` -, ` stake_state_reason` -FROM mainnet -order by ` epoch_credits` desc; -*/ ---.schema data - -/*CREATE TABLE mainnet( - identity TEXT, - score INTEGER, - commission SHORT, - active_stake INTEGER, - epoch_credits INTEGER, - stake_state TEXT, - stake_state_reason TEXT -) -*/ ---insert into data ---select * from mainnet -/*select identity, - score, - commission, - active_stake/1e9, - epoch_credits - --max(epoch_credits), - --avg(epoch_credits) - from mainnet -order by active_stake desc; -*/ - ---select sum(active_stake) from mainnet where active_stake is not null; -/* -select 'below half avg epoch_credits',count(*), - "stake",sum(active_stake)/1e9 - from mainnet - where epoch_credits < (select avg(epoch_credits)*0.50 from mainnet) -*/ - --- if epoch_credits < 60% of max epoch_credits, discard -select * - from mainnet - where epoch_credits < (select max(epoch_credits)*0.60 from mainnet) - order by epoch_credits desc - \ No newline at end of file +select pct, A.* +from mainnet as A +order by pct desc diff --git a/sql/test-queries.sql b/sql/test-queries.sql new file mode 100644 index 00000000..c4a15d7d --- /dev/null +++ b/sql/test-queries.sql @@ -0,0 +1,51 @@ +-- SQLite3 +/*SELECT identity, ` stake_state`, ` score`, ` commission`, ` active_stake`, ` epoch_credits` +, ` stake_state_reason` +FROM mainnet +order by ` epoch_credits` desc; +*/ +--.schema data + +/*CREATE TABLE mainnet( + identity TEXT, + score INTEGER, + commission SHORT, + active_stake INTEGER, + epoch_credits INTEGER, + stake_state TEXT, + stake_state_reason TEXT +) +*/ +--insert into data +--select * from mainnet +/*select identity, + score, + commission, + active_stake/1e9, + epoch_credits + --max(epoch_credits), + --avg(epoch_credits) + from mainnet +order by active_stake desc; +*/ + +--select sum(active_stake) from mainnet where active_stake is not null; +/* +select 'below half avg epoch_credits',count(*), + "stake",sum(active_stake)/1e9 + from mainnet + where epoch_credits < (select avg(epoch_credits)*0.50 from mainnet) +*/ + +-- if epoch_credits < 60% of max epoch_credits, discard +/* +select max(epoch_credits), min(epoch_credits) + from mainnet + where epoch_credits > (select max(epoch_credits)*0.50 from mainnet) + order by epoch_credits desc +*/ + +select (score * 100 / (select sum(score) from mainnet)) pct, A.* +from mainnet A + +--select sum(score) from mainnet \ No newline at end of file From bd6464f38ec7c741bc6118e77583a21def951d84 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 27 Jun 2021 18:45:40 -0300 Subject: [PATCH 05/39] scripts --- score-all.sh | 2 ++ sql/import.sql | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 score-all.sh diff --git a/score-all.sh b/score-all.sh new file mode 100644 index 00000000..d9eb4b88 --- /dev/null +++ b/score-all.sh @@ -0,0 +1,2 @@ +./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta score-all +bash ./import-into-sqlite.sh diff --git a/sql/import.sql b/sql/import.sql index 691411b4..21e04538 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -24,7 +24,7 @@ UPDATE mainnet set pct = round(score * 100.0 / (select sum(score) from mainnet), select 'validators',count(*),'total staked',sum(active_stake) from mainnet; select 'avg epoch_credits',avg(epoch_credits) from mainnet; select 'below half avg epoch_credits',count(*), - "stake",sum(active_stake)/1e9 + "stake",sum(active_stake) from mainnet where epoch_credits < (select avg(epoch_credits)/2 from mainnet) ; From a700608147c09902e9bdd47ef7acad72af79a3e5 Mon Sep 17 00:00:00 2001 From: luciotato Date: Wed, 30 Jun 2021 17:10:16 -0300 Subject: [PATCH 06/39] parametrize score, do not combine with --confirm --- bot/src/db.rs | 21 +++++---------------- bot/src/main.rs | 41 +++++++++++++++++++++++++++++++++++++++-- score-all.sh | 3 +-- 3 files changed, 45 insertions(+), 20 deletions(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index b53544de..17fb403c 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -1,5 +1,6 @@ use { crate::{ + Config, data_center_info::{DataCenterId, DataCenterInfo}, generic_stake_pool::ValidatorStakeState, }, @@ -60,10 +61,12 @@ pub struct ValidatorClassification { } impl ValidatorClassification { - pub fn score(&self) -> u64 { + pub fn score(&self, config:&Config) -> u64 { if self.score_discounts.can_halt_the_network_group || self.score_discounts.insufficient_self_stake || self.score_discounts.low_credits + || self.commission > config.score_max_commission + || self.active_stake < config.score_min_stake { 0 } else { @@ -73,21 +76,7 @@ impl ValidatorClassification { self.epoch_credits * (self.data_center_concentration as u64) / 100; // score discounts according to commission - const SCORE_MIN_COMMISSION: u8 = 2; - const SCORE_MAX_COMMISSION: u8 = 12; - const SCORE_DISCOUNT_PER_COMMISSION_POINT: u32 = 10_000; - let discount_because_commission = if self.commission < SCORE_MIN_COMMISSION - || self.commission > SCORE_MAX_COMMISSION - { - // we discourage 0% & 1% commission, because we don't want to incentivize a race-to-the-bottom - // where we promote validators subsidizing/working below cost/"dumping" - // we also discard validators with commission > SCORE_MAX_COMMISSION - self.epoch_credits // discount all - } else { - // discount according to commission - (SCORE_DISCOUNT_PER_COMMISSION_POINT - * (self.commission - SCORE_MIN_COMMISSION) as u32) as u64 - }; + let discount_because_commission = (self.commission as u32 * config.score_commission_discount) as u64; //result self.epoch_credits diff --git a/bot/src/main.rs b/bot/src/main.rs index abc9528b..75d5d47d 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -187,7 +187,7 @@ impl std::fmt::Display for Cluster { } #[derive(Debug)] -struct Config { +pub struct Config { json_rpc_url: String, cluster: Cluster, db_path: PathBuf, @@ -197,6 +197,12 @@ struct Config { /// compute score foll all validators in the cluster score_all: bool, + /// max commission accepted to score (0-100) + score_max_commission: u8, + /// score discount per commission point + score_commission_discount: u32, + /// score min stake required + score_min_stake: u64, /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch @@ -275,6 +281,9 @@ impl Config { markdown_path: None, dry_run: true, score_all: false, + score_max_commission: 8, + score_commission_discount: 12_000, + score_min_stake: sol_to_lamports(75.0), quality_block_producer_percentage: 15, max_poor_block_producer_percentage: 20, max_commission: 100, @@ -600,10 +609,29 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option Cluster::MainnetBeta, @@ -671,6 +699,9 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<(Config, RpcClient, Option None, }; + // guard - let's make sure score-all can not be set for distribution + if score_all && (stake_pool.is_some() || !dry_run) { + error!("DO NOT combine score-all with `--confirm` or `stake-pool`"); + process::exit(1); + } + Ok((config, rpc_client, stake_pool)) } @@ -1813,7 +1850,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { let csv_line = format!( r#""{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, identity.to_string(), - classification.score(), + classification.score(config), classification.commission, lamports_to_sol(classification.active_stake), classification.epoch_credits, diff --git a/score-all.sh b/score-all.sh index d9eb4b88..6a154082 100644 --- a/score-all.sh +++ b/score-all.sh @@ -1,2 +1 @@ -./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta score-all -bash ./import-into-sqlite.sh +./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta $* score-all From 1f82f09e3cbcf555300a9e4d0e8c20c8a7427c24 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 3 Jul 2021 14:04:24 -0300 Subject: [PATCH 07/39] fix score-all arguments --- bot/src/db.rs | 13 +++++++------ bot/src/main.rs | 32 +++++++++++++++++++++----------- score-all.sh | 7 ++++++- sql/import.sql | 25 +++++++++++++++++++------ 4 files changed, 53 insertions(+), 24 deletions(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index 17fb403c..dc0dba47 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -1,8 +1,8 @@ use { crate::{ - Config, data_center_info::{DataCenterId, DataCenterInfo}, generic_stake_pool::ValidatorStakeState, + Config, }, log::*, serde::{Deserialize, Serialize}, @@ -61,7 +61,7 @@ pub struct ValidatorClassification { } impl ValidatorClassification { - pub fn score(&self, config:&Config) -> u64 { + pub fn score(&self, config: &Config) -> u64 { if self.score_discounts.can_halt_the_network_group || self.score_discounts.insufficient_self_stake || self.score_discounts.low_credits @@ -70,13 +70,14 @@ impl ValidatorClassification { { 0 } else { - // if data_center_concentration = 100%, lose all score, - // data_center_concentration = 30%, lose 30% (rounded) + // if data_center_concentration = 25%, lose all score, + // data_center_concentration = 10%, lose 40% (rounded) let discount_because_data_center_concentration = - self.epoch_credits * (self.data_center_concentration as u64) / 100; + self.epoch_credits * (self.data_center_concentration as u64 * 4) / 100; // score discounts according to commission - let discount_because_commission = (self.commission as u32 * config.score_commission_discount) as u64; + let discount_because_commission = + (self.commission as u32 * config.score_commission_discount) as u64; //result self.epoch_credits diff --git a/bot/src/main.rs b/bot/src/main.rs index 75d5d47d..313892b2 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -304,7 +304,9 @@ impl Config { } fn cluster_db_path_for(&self, cluster: Cluster) -> PathBuf { - self.db_path.join(format!("data-{}", cluster)) + // store db on different dir for score-all to not mess with SPL-stake-pool distribution usage + let dir = if self.score_all { "score-all" } else { "data" }; + self.db_path.join(format!("{}-{}", dir, cluster)) } fn cluster_db_path(&self) -> PathBuf { @@ -611,14 +613,14 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<(Config, RpcClient, Option Cluster::MainnetBeta, @@ -692,6 +688,18 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option ( + true, + value_t!(matches, "score_max_commission", u8).unwrap_or(10), + value_t!(matches, "commission_point_discount", u32).unwrap_or(16_000), + value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), + ), + _ => (false, 0, 0, 0), + }; + let config = Config { json_rpc_url, cluster, @@ -1745,7 +1753,7 @@ fn main() -> BoxResult<()> { fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { let markdown_path = match config.markdown_path.as_ref() { Some(d) => d, - None => return Ok(()), + None => return Ok(()), // exit if !matches.is_present("markdown") }; fs::create_dir_all(&markdown_path)?; @@ -1908,7 +1916,9 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { for (identity, validator_markdown) in validators_markdown { let markdown = validator_markdown.join("\n"); let filename = markdown_path.join(format!("Validator-{}.md", identity)); - info!("Writing {}", filename.display()); + if !config.score_all { + info!("Writing {}", filename.display()) + } let mut file = File::create(filename)?; file.write_all(&markdown.into_bytes())?; } diff --git a/score-all.sh b/score-all.sh index 6a154082..5403977c 100644 --- a/score-all.sh +++ b/score-all.sh @@ -1 +1,6 @@ -./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta $* score-all +./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta $* \ + --min-epoch-credit-percentage-of-average 0 \ + score-all \ + --score-max-commission 10 \ + --commission-point-discount 15000 + diff --git a/sql/import.sql b/sql/import.sql index 21e04538..df09cd04 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -1,4 +1,4 @@ -.open ./db/data-mainnet-beta/sqlite3.db +.open ./db/score-all-mainnet-beta/sqlite3.db DROP TABLE IF EXISTS mainnet; CREATE TABLE mainnet( identity TEXT, @@ -14,7 +14,7 @@ CREATE TABLE mainnet( stake_state_reason TEXT ); .mode csv -.import ./db/data-mainnet-beta/validator-detail.csv mainnet +.import ./db/score-all-mainnet-beta/validator-detail.csv mainnet --remove header row delete FROM mainnet where identity='identity'; --add pct column @@ -22,10 +22,23 @@ ALTER table mainnet add pct FLOAT; UPDATE mainnet set pct = round(score * 100.0 / (select sum(score) from mainnet),4); --control, show total staked select 'validators',count(*),'total staked',sum(active_stake) from mainnet; -select 'avg epoch_credits',avg(epoch_credits) from mainnet; -select 'below half avg epoch_credits',count(*), - "stake",sum(active_stake) +select 'validators with 0 score count:',count(*), + "sum stake",sum(active_stake) from mainnet - where epoch_credits < (select avg(epoch_credits)/2 from mainnet) + where pct=0 ; +select 'validators with non-zero score count:',count(*), + "sum stake",sum(active_stake) + from mainnet + where pct>0 + ; +select 'avg epoch_credits',avg(epoch_credits), + 'max epoch credits',max(epoch_credits), + 'min epoch credits',min(epoch_credits), min(epoch_credits)/avg(epoch_credits)*100, "% of avg", + char(10) || 'max score',max(score), + 'min score',min(score), + char(10) || 'max pct',max(pct), + 'min pct',min(pct) + from mainnet + where pct>0; .exit From b562d359593899a5d0f5860b50b09ab8d112ad41 Mon Sep 17 00:00:00 2001 From: "Lucio M. Tato" Date: Sat, 3 Jul 2021 14:13:57 -0300 Subject: [PATCH 08/39] score-all command * make stake pool optional, score-all command * generate validator-detail.csv * skip heavy rpc-call, ensure all validators scored * compute score * scripts * parametrize score, do not combine with --confirm * fix score-all arguments --- .gitignore | 1 + bot/src/db.rs | 40 ++++++++ bot/src/main.rs | 233 +++++++++++++++++++++++++++++++++++------- import-into-sqlite.sh | 1 + score-all.sh | 6 ++ sql/fixes.sql | 4 + sql/import.sql | 44 ++++++++ sql/queries.sql | 3 + sql/test-queries.sql | 51 +++++++++ 9 files changed, 347 insertions(+), 36 deletions(-) create mode 100644 import-into-sqlite.sh create mode 100644 score-all.sh create mode 100644 sql/fixes.sql create mode 100644 sql/import.sql create mode 100644 sql/queries.sql create mode 100644 sql/test-queries.sql diff --git a/.gitignore b/.gitignore index fd552c93..b988c1dd 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ /registry-cli /db test-ledger/ +.vscode diff --git a/bot/src/db.rs b/bot/src/db.rs index b96d6558..0fe72ab8 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -2,6 +2,7 @@ use { crate::{ data_center_info::{DataCenterId, DataCenterInfo}, generic_stake_pool::ValidatorStakeState, + Config, }, log::*, serde::{Deserialize, Serialize}, @@ -14,6 +15,13 @@ use { }, }; +#[derive(Default, Clone, Deserialize, Serialize)] +pub struct ScoreDiscounts { + pub low_credits: bool, + pub insufficient_self_stake: bool, + pub can_halt_the_network_group: bool, +} + #[derive(Default, Clone, Deserialize, Serialize)] pub struct ValidatorClassification { pub identity: Pubkey, // Validator identity @@ -22,6 +30,13 @@ pub struct ValidatorClassification { pub stake_state: ValidatorStakeState, pub stake_state_reason: String, + /// computed score (more granular than ValidatorStakeState) + pub epoch_credits: u64, // epoch_credits is the base score + pub score_discounts: ScoreDiscounts, + pub commission: u8, + pub active_stake: u64, + pub data_center_concentration: f64, + // Summary of the action was taken this epoch to advance the validator's stake pub stake_action: Option, @@ -46,6 +61,31 @@ pub struct ValidatorClassification { } impl ValidatorClassification { + pub fn score(&self, config: &Config) -> u64 { + if self.score_discounts.can_halt_the_network_group + || self.score_discounts.insufficient_self_stake + || self.score_discounts.low_credits + || self.commission > config.score_max_commission + || self.active_stake < config.score_min_stake + { + 0 + } else { + // if data_center_concentration = 25%, lose all score, + // data_center_concentration = 10%, lose 40% (rounded) + let discount_because_data_center_concentration = + self.epoch_credits * (self.data_center_concentration as u64 * 4) / 100; + + // score discounts according to commission + let discount_because_commission = + (self.commission as u32 * config.score_commission_discount) as u64; + + //result + self.epoch_credits + .saturating_sub(discount_because_commission) + .saturating_sub(discount_because_data_center_concentration) + } + } + pub fn stake_state_streak(&self) -> usize { let mut streak = 1; diff --git a/bot/src/main.rs b/bot/src/main.rs index d8e31a34..5f34cecc 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -187,7 +187,7 @@ impl std::fmt::Display for Cluster { } #[derive(Debug)] -struct Config { +pub struct Config { json_rpc_url: String, cluster: Cluster, db_path: PathBuf, @@ -196,6 +196,15 @@ struct Config { dry_run: bool, + /// compute score foll all validators in the cluster + score_all: bool, + /// max commission accepted to score (0-100) + score_max_commission: u8, + /// score discount per commission point + score_commission_discount: u32, + /// score min stake required + score_min_stake: u64, + /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch quality_block_producer_percentage: usize, @@ -273,6 +282,10 @@ impl Config { require_classification: false, markdown_path: None, dry_run: true, + score_all: false, + score_max_commission: 8, + score_commission_discount: 12_000, + score_min_stake: sol_to_lamports(75.0), quality_block_producer_percentage: 15, max_poor_block_producer_percentage: 20, max_commission: 100, @@ -293,7 +306,9 @@ impl Config { } fn cluster_db_path_for(&self, cluster: Cluster) -> PathBuf { - self.db_path.join(format!("data-{}", cluster)) + // store db on different dir for score-all to not mess with SPL-stake-pool distribution usage + let dir = if self.score_all { "score-all" } else { "data" }; + self.db_path.join(format!("{}-{}", dir, cluster)) } fn cluster_db_path(&self) -> PathBuf { @@ -317,7 +332,7 @@ fn app_version() -> String { }) } -fn get_config() -> BoxResult<(Config, RpcClient, Box)> { +fn get_config() -> BoxResult<(Config, RpcClient, Option>)> { let default_confirmed_block_cache_path = default_confirmed_block_cache_path() .to_str() .unwrap() @@ -602,6 +617,23 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { .validator(is_amount) ) ) + .subcommand( + SubCommand::with_name("score-all").about("Score all validators in the cluster") + .arg( + Arg::with_name("score_max_commission") + .long("score-max-commission") + .takes_value(true) + .required(false) + .help("scoring max accepted commission") + ) + .arg( + Arg::with_name("commission_point_discount") + .long ("commission-point-discount") + .takes_value(true) + .required(false) + .help("score to discount for each commission point") + ) + ) .get_matches(); let dry_run = !matches.is_present("confirm"); @@ -639,7 +671,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { Cluster::MainnetBeta => value_t!(matches, "json_rpc_url", String) .unwrap_or_else(|_| "http://api.mainnet-beta.solana.com".into()), Cluster::Testnet => value_t!(matches, "json_rpc_url", String) - .unwrap_or_else(|_| "http://testnet.solana.com".into()), + .unwrap_or_else(|_| "http://api.testnet.solana.com".into()), }; let db_path = value_t_or_exit!(matches, "db_path", PathBuf); let markdown_path = if matches.is_present("markdown") { @@ -665,6 +697,18 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { ) .unwrap(); + // score-all command and arguments + let (score_all, score_max_commission, score_commission_discount, score_min_stake) = + match matches.subcommand() { + ("score-all", Some(matches)) => ( + true, + value_t!(matches, "score_max_commission", u8).unwrap_or(10), + value_t!(matches, "commission_point_discount", u32).unwrap_or(16_000), + value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), + ), + _ => (false, 0, 0, 0), + }; + let config = Config { json_rpc_url, cluster, @@ -672,6 +716,10 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { require_classification, markdown_path, dry_run, + score_all, + score_max_commission, + score_commission_discount, + score_min_stake, quality_block_producer_percentage, max_poor_block_producer_percentage, max_commission, @@ -699,7 +747,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { .get_health() .map_err(|err| format!("RPC endpoint is unhealthy: {:?}", err))?; - let stake_pool: Box = match matches.subcommand() { + let stake_pool: Option> = match matches.subcommand() { ("stake-pool-v0", Some(matches)) => { let authorized_staker = keypair_of(&matches, "authorized_staker").unwrap(); let reserve_stake_address = pubkey_of(&matches, "reserve_stake_address").unwrap(); @@ -707,29 +755,35 @@ fn get_config() -> BoxResult<(Config, RpcClient, Box)> { sol_to_lamports(value_t_or_exit!(matches, "min_reserve_stake_balance", f64)); let baseline_stake_amount = sol_to_lamports(value_t_or_exit!(matches, "baseline_stake_amount", f64)); - Box::new(stake_pool_v0::new( + Some(Box::new(stake_pool_v0::new( &rpc_client, authorized_staker, baseline_stake_amount, reserve_stake_address, min_reserve_stake_balance, - )?) + )?)) } ("stake-pool", Some(matches)) => { let authorized_staker = keypair_of(&matches, "authorized_staker").unwrap(); let pool_address = pubkey_of(&matches, "pool_address").unwrap(); let baseline_stake_amount = sol_to_lamports(value_t_or_exit!(matches, "baseline_stake_amount", f64)); - Box::new(stake_pool::new( + Some(Box::new(stake_pool::new( &rpc_client, authorized_staker, pool_address, baseline_stake_amount, - )?) + )?)) } - _ => unreachable!(), + _ => None, }; + // guard - let's make sure score-all can not be set for distribution + if score_all && (stake_pool.is_some() || !dry_run) { + error!("DO NOT combine score-all with `--confirm` or `stake-pool`"); + process::exit(1); + } + Ok((config, rpc_client, stake_pool)) } @@ -946,6 +1000,7 @@ fn get_self_stake_by_vote_account( info!("Fetching stake accounts..."); let all_stake_accounts = rpc_client.get_program_accounts(&solana_stake_program::id())?; + info!("{} stake accounts", all_stake_accounts.len()); let stake_history_account = rpc_client .get_account_with_commitment(&sysvar::stake_history::id(), CommitmentConfig::finalized())? @@ -1078,10 +1133,40 @@ fn classify( .flat_map(|(v, sp)| v.into_iter().map(move |v| (v, sp))) .collect::>(); - let (vote_account_info, total_active_stake) = get_vote_account_info(&rpc_client, last_epoch)?; + let (mut vote_account_info, total_active_stake) = + get_vote_account_info(&rpc_client, last_epoch)?; + + // compute cumulative_stake_limit => active_stake of the last validator inside the can-halt-the-network group + // we later set score=0 to all validators whose stake >= concentrated_validators_stake_limit + // sort by active_stake + vote_account_info.sort_by(|a, b| a.active_stake.cmp(&b.active_stake)); + let mut accumulated: u64 = 0; + let mut count_halt_group: u32 = 0; + let limit: u64 = total_active_stake / 100 * 34; + let mut concentrated_validators_stake_limit = limit; + for info in &vote_account_info { + accumulated += info.active_stake; + count_halt_group += 1; + if accumulated > limit { + concentrated_validators_stake_limit = info.active_stake; + break; + } + } + info!( + "validators:{} total_active_stake:{}, can_halt_the_network:top {}, pro-decentralization-stake-limit: less than {}", + &vote_account_info.len(), + total_active_stake, + count_halt_group, + lamports_to_sol(concentrated_validators_stake_limit), + ); - let self_stake_by_vote_account = - get_self_stake_by_vote_account(rpc_client, epoch, &vote_account_info)?; + // Note: get_self_stake_by_vote_account is expensive because it does a RPC call for each validator + // we skip this data gathering if config.min_self_stake_lamports==0 + let self_stake_by_vote_account = if config.min_self_stake_lamports > 0 { + get_self_stake_by_vote_account(rpc_client, epoch, &vote_account_info)? + } else { + HashMap::new() + }; let (cluster_nodes_with_old_version, min_release_version): (HashMap, _) = match config.min_release_version { @@ -1091,7 +1176,7 @@ fn classify( .into_iter() .filter_map(|rpc_contact_info| { if let Ok(identity) = Pubkey::from_str(&rpc_contact_info.pubkey) { - if validator_list.contains(&identity) { + if config.score_all || validator_list.contains(&identity) { if let Some(ref version) = rpc_contact_info.version { if let Ok(semver) = semver::Version::parse(version) { if semver < *min_release_version { @@ -1216,6 +1301,7 @@ fn classify( None } else { let mut validator_classifications = HashMap::new(); + let mut total_skipped: u32 = 0; for VoteAccountInfo { identity, @@ -1225,10 +1311,26 @@ fn classify( epoch_credits, } in vote_account_info { - if !validator_list.contains(&identity) { + if !config.score_all && !validator_list.contains(&identity) { + total_skipped += 1; continue; } + /* -- ------------------ + -- heuristic data, epoch 196 + -- ------------------ + select max(epoch_credits), min(epoch_credits) + from mainnet + where epoch_credits > (select max(epoch_credits)*0.50 from mainnet) + order by epoch_credits desc; + --max(epoch_credits),min(epoch_credits) + --242503,134403 + --so delta max-min epoch_credits ~= 100k + */ + // we start score with epoch_credits + // let mut score = epoch_credits; + let mut score_discounts = db::ScoreDiscounts::default(); + let participant = identity_to_participant.get(&identity).cloned(); let current_data_center = data_centers @@ -1237,6 +1339,13 @@ fn classify( .cloned() .unwrap_or_default(); + // score: check data center concentration + let data_center_info = data_centers + .info + .iter() + .find(|x| x.id == current_data_center) + .unwrap(); + let previous_classification = previous_epoch_validator_classifications .map(|p| p.get(&identity)) .flatten(); @@ -1282,9 +1391,13 @@ fn classify( }); let insufficent_self_stake_msg = - format!("Insufficient self stake: {}", Sol(self_stake)); - if !config.enforce_min_self_stake && self_stake < config.min_self_stake_lamports { + format!("insufficient self stake: {}", Sol(self_stake)); + if config.min_self_stake_lamports > 0 + && !config.enforce_min_self_stake + && self_stake < config.min_self_stake_lamports + { validator_notes.push(insufficent_self_stake_msg.clone()); + score_discounts.insufficient_self_stake = true; //discount all } let insufficent_testnet_participation = testnet_participation @@ -1304,6 +1417,13 @@ fn classify( }) .flatten(); + // no score if below 50% from avg credits + score_discounts.low_credits = epoch_credits < min_epoch_credits; + + // no score if in the can-halt-the-network group + score_discounts.can_halt_the_network_group = + active_stake >= concentrated_validators_stake_limit; + let (stake_state, reason) = if let Some(reason) = infrastructure_concentration_destake_reason { @@ -1357,7 +1477,7 @@ fn classify( ) } else { assert!(!poor_voters.contains(&identity)); - assert!(not_in_leader_schedule.contains(&identity)); + assert!(config.score_all || not_in_leader_schedule.contains(&identity)); ( // If the validator is not in the leader schedule but was Bonus previously, // maintain Bonus. @@ -1425,6 +1545,11 @@ fn classify( identity, vote_address, stake_state, + epoch_credits, + score_discounts, + commission, + active_stake, + data_center_concentration: data_center_info.stake_percent, stake_states: Some(stake_states), stake_action: None, stake_state_reason: reason, @@ -1440,6 +1565,11 @@ fn classify( "{} validators processed", validator_classifications.len() )); + info!( + "{} validators, {} skipped", + &validator_classifications.len(), + total_skipped + ); Some(validator_classifications) }; @@ -1455,7 +1585,7 @@ fn classify( fn main() -> BoxResult<()> { solana_logger::setup_with_default("solana=info"); - let (config, rpc_client, mut stake_pool) = get_config()?; + let (config, rpc_client, optional_stake_pool) = get_config()?; info!("Loading participants..."); let participants = get_participants_with_state( @@ -1592,21 +1722,22 @@ fn main() -> BoxResult<()> { }) .collect(); - let (stake_pool_notes, validator_stake_actions, unfunded_validators) = - stake_pool.apply(&rpc_client, config.dry_run, &desired_validator_stake)?; - notifications.extend(stake_pool_notes.clone()); - epoch_classification.notes.extend(stake_pool_notes); - - for identity in unfunded_validators { - validator_classifications - .entry(identity) - .and_modify(|e| e.prioritize_funding_in_next_epoch = Some(true)); - } + if let Some(mut stake_pool) = optional_stake_pool { + let (stake_pool_notes, validator_stake_actions, unfunded_validators) = + stake_pool.apply(&rpc_client, config.dry_run, &desired_validator_stake)?; + notifications.extend(stake_pool_notes.clone()); + epoch_classification.notes.extend(stake_pool_notes); + for identity in unfunded_validators { + validator_classifications + .entry(identity) + .and_modify(|e| e.prioritize_funding_in_next_epoch = Some(true)); + } - for (identity, stake_action) in validator_stake_actions { - validator_classifications - .entry(identity) - .and_modify(|e| e.stake_action = Some(stake_action)); + for (identity, stake_action) in validator_stake_actions { + validator_classifications + .entry(identity) + .and_modify(|e| e.stake_action = Some(stake_action)); + } } validator_notes.sort(); @@ -1618,7 +1749,6 @@ fn main() -> BoxResult<()> { if first_time { EpochClassification::new(epoch_classification).save(epoch, &config.cluster_db_path())?; - generate_markdown(epoch, &config)?; // Only notify the user if this is the first run for this epoch for notification in notifications { @@ -1627,13 +1757,16 @@ fn main() -> BoxResult<()> { } } + //conditional to: matches.is_present("markdown") + generate_markdown(epoch, &config)?; + Ok(()) } fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { let markdown_path = match config.markdown_path.as_ref() { Some(d) => d, - None => return Ok(()), + None => return Ok(()), // exit if !matches.is_present("markdown") }; fs::create_dir_all(&markdown_path)?; @@ -1706,6 +1839,9 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { + let mut validator_detail_csv = vec![]; + validator_detail_csv.push("identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); + let mut validator_classifications = validator_classifications.iter().collect::>(); validator_classifications.sort_by(|a, b| a.0.cmp(&b.0)); @@ -1730,6 +1866,24 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { "* Stake reason: {}", classification.stake_state_reason )); + + //identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason + let csv_line = format!( + r#""{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, + identity.to_string(), + classification.score(config), + classification.commission, + lamports_to_sol(classification.active_stake), + classification.epoch_credits, + classification.data_center_concentration, + classification.score_discounts.can_halt_the_network_group, + classification.score_discounts.low_credits, + classification.score_discounts.insufficient_self_stake, + classification.stake_state, + classification.stake_state_reason, + ); + validator_detail_csv.push(csv_line); + if let Some(ref stake_action) = classification.stake_action { validator_markdown.push(format!("* Staking activity: {}", stake_action)); } @@ -1764,13 +1918,20 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { validator_markdown.push(format!("* {}", note)); } } + // save validator-detail.csv + let filename = config.cluster_db_path().join("validator-detail.csv"); + info!("Writing {}", filename.display()); + let mut file = File::create(filename)?; + file.write_all(&validator_detail_csv.join("\n").into_bytes())?; } } for (identity, validator_markdown) in validators_markdown { let markdown = validator_markdown.join("\n"); let filename = markdown_path.join(format!("Validator-{}.md", identity)); - info!("Writing {}", filename.display()); + if !config.score_all { + info!("Writing {}", filename.display()) + } let mut file = File::create(filename)?; file.write_all(&markdown.into_bytes())?; } diff --git a/import-into-sqlite.sh b/import-into-sqlite.sh new file mode 100644 index 00000000..c831bc58 --- /dev/null +++ b/import-into-sqlite.sh @@ -0,0 +1 @@ +sqlite3 <./sql/import.sql \ No newline at end of file diff --git a/score-all.sh b/score-all.sh new file mode 100644 index 00000000..5403977c --- /dev/null +++ b/score-all.sh @@ -0,0 +1,6 @@ +./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta $* \ + --min-epoch-credit-percentage-of-average 0 \ + score-all \ + --score-max-commission 10 \ + --commission-point-discount 15000 + diff --git a/sql/fixes.sql b/sql/fixes.sql new file mode 100644 index 00000000..301a7342 --- /dev/null +++ b/sql/fixes.sql @@ -0,0 +1,4 @@ +-- SQLite +--drop table mainnet; +--delete FROM mainnet where identity='identity'; +--select sum(active_stake)/1e9 from mainnet where active_stake is not null; diff --git a/sql/import.sql b/sql/import.sql new file mode 100644 index 00000000..df09cd04 --- /dev/null +++ b/sql/import.sql @@ -0,0 +1,44 @@ +.open ./db/score-all-mainnet-beta/sqlite3.db +DROP TABLE IF EXISTS mainnet; +CREATE TABLE mainnet( + identity TEXT, + score INTEGER, + commission SHORT, + active_stake INTEGER, + epoch_credits INTEGER, + data_center_concentration DOUBLE, + can_halt_the_network_group BOOL, + low_credits BOOL, + insufficient_self_stake BOOL, + stake_state TEXT, + stake_state_reason TEXT +); +.mode csv +.import ./db/score-all-mainnet-beta/validator-detail.csv mainnet +--remove header row +delete FROM mainnet where identity='identity'; +--add pct column +ALTER table mainnet add pct FLOAT; +UPDATE mainnet set pct = round(score * 100.0 / (select sum(score) from mainnet),4); +--control, show total staked +select 'validators',count(*),'total staked',sum(active_stake) from mainnet; +select 'validators with 0 score count:',count(*), + "sum stake",sum(active_stake) + from mainnet + where pct=0 + ; +select 'validators with non-zero score count:',count(*), + "sum stake",sum(active_stake) + from mainnet + where pct>0 + ; +select 'avg epoch_credits',avg(epoch_credits), + 'max epoch credits',max(epoch_credits), + 'min epoch credits',min(epoch_credits), min(epoch_credits)/avg(epoch_credits)*100, "% of avg", + char(10) || 'max score',max(score), + 'min score',min(score), + char(10) || 'max pct',max(pct), + 'min pct',min(pct) + from mainnet + where pct>0; +.exit diff --git a/sql/queries.sql b/sql/queries.sql new file mode 100644 index 00000000..2f441696 --- /dev/null +++ b/sql/queries.sql @@ -0,0 +1,3 @@ +select pct, A.* +from mainnet as A +order by pct desc diff --git a/sql/test-queries.sql b/sql/test-queries.sql new file mode 100644 index 00000000..c4a15d7d --- /dev/null +++ b/sql/test-queries.sql @@ -0,0 +1,51 @@ +-- SQLite3 +/*SELECT identity, ` stake_state`, ` score`, ` commission`, ` active_stake`, ` epoch_credits` +, ` stake_state_reason` +FROM mainnet +order by ` epoch_credits` desc; +*/ +--.schema data + +/*CREATE TABLE mainnet( + identity TEXT, + score INTEGER, + commission SHORT, + active_stake INTEGER, + epoch_credits INTEGER, + stake_state TEXT, + stake_state_reason TEXT +) +*/ +--insert into data +--select * from mainnet +/*select identity, + score, + commission, + active_stake/1e9, + epoch_credits + --max(epoch_credits), + --avg(epoch_credits) + from mainnet +order by active_stake desc; +*/ + +--select sum(active_stake) from mainnet where active_stake is not null; +/* +select 'below half avg epoch_credits',count(*), + "stake",sum(active_stake)/1e9 + from mainnet + where epoch_credits < (select avg(epoch_credits)*0.50 from mainnet) +*/ + +-- if epoch_credits < 60% of max epoch_credits, discard +/* +select max(epoch_credits), min(epoch_credits) + from mainnet + where epoch_credits > (select max(epoch_credits)*0.50 from mainnet) + order by epoch_credits desc +*/ + +select (score * 100 / (select sum(score) from mainnet)) pct, A.* +from mainnet A + +--select sum(score) from mainnet \ No newline at end of file From a4b97a20570123b0a8efa6138e5f05714aba693a Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 17 Jul 2021 01:30:50 -0300 Subject: [PATCH 09/39] add vote-address --- bot/src/main.rs | 13 ++++++++----- score-all.sh | 3 ++- sql/import.sql | 4 +++- sql/queries.sql | 1 + 4 files changed, 14 insertions(+), 7 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index 313892b2..83bf7cf3 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1827,7 +1827,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { let mut validator_detail_csv = vec![]; - validator_detail_csv.push("identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); + validator_detail_csv.push("identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); @@ -1854,10 +1854,11 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { classification.stake_state_reason )); - //identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason + //identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason let csv_line = format!( - r#""{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, + r#""{}","{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, identity.to_string(), + classification.vote_address, classification.score(config), classification.commission, lamports_to_sol(classification.active_stake), @@ -1905,8 +1906,10 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { validator_markdown.push(format!("* {}", note)); } } - // save validator-detail.csv - let filename = config.cluster_db_path().join("validator-detail.csv"); + // save {cluster}-validator-detail.csv (repeating the cluster in the name is intentional) + let filename = config + .cluster_db_path() + .join(format!("{}-validator-detail.csv", config.cluster)); info!("Writing {}", filename.display()); let mut file = File::create(filename)?; file.write_all(&validator_detail_csv.join("\n").into_bytes())?; diff --git a/score-all.sh b/score-all.sh index 5403977c..cb432739 100644 --- a/score-all.sh +++ b/score-all.sh @@ -1,6 +1,7 @@ -./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta $* \ +./target/debug/solana-stake-o-matic --markdown $* \ --min-epoch-credit-percentage-of-average 0 \ score-all \ --score-max-commission 10 \ --commission-point-discount 15000 +# --cluster mainnet-beta \ No newline at end of file diff --git a/sql/import.sql b/sql/import.sql index df09cd04..0c5b6b38 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -2,6 +2,7 @@ DROP TABLE IF EXISTS mainnet; CREATE TABLE mainnet( identity TEXT, + vote_address TEXT, score INTEGER, commission SHORT, active_stake INTEGER, @@ -14,7 +15,8 @@ CREATE TABLE mainnet( stake_state_reason TEXT ); .mode csv -.import ./db/score-all-mainnet-beta/validator-detail.csv mainnet +--.import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv mainnet +.import ./db/score-all-testnet/testnet-validator-detail.csv mainnet --remove header row delete FROM mainnet where identity='identity'; --add pct column diff --git a/sql/queries.sql b/sql/queries.sql index 2f441696..815ef75f 100644 --- a/sql/queries.sql +++ b/sql/queries.sql @@ -1,3 +1,4 @@ select pct, A.* from mainnet as A +where score>0 order by pct desc From b9299ca75374f9e0338c3193c72d1fe8ae370a97 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 17 Jul 2021 01:35:48 -0300 Subject: [PATCH 10/39] do not include .vscode --- .gitignore | 1 + Cargo.lock | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index fd552c93..af313745 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ /registry-cli /db test-ledger/ +.vscode/ \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 8d9b3096..97ba2c54 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,5 +1,7 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "Inflector" version = "0.11.4" @@ -3963,7 +3965,7 @@ dependencies = [ [[package]] name = "solana-foundation-delegation-program-cli" -version = "1.0.3" +version = "1.0.4" dependencies = [ "clap", "solana-account-decoder", From 4fdaed2cbfa2c6f06004d5f5ef40eef985588f70 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 17 Jul 2021 01:49:59 -0300 Subject: [PATCH 11/39] remove comment block as requested --- bot/src/main.rs | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index 83bf7cf3..9a21a208 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1306,19 +1306,6 @@ fn classify( continue; } - /* -- ------------------ - -- heuristic data, epoch 196 - -- ------------------ - select max(epoch_credits), min(epoch_credits) - from mainnet - where epoch_credits > (select max(epoch_credits)*0.50 from mainnet) - order by epoch_credits desc; - --max(epoch_credits),min(epoch_credits) - --242503,134403 - --so delta max-min epoch_credits ~= 100k - */ - // we start score with epoch_credits - // let mut score = epoch_credits; let mut score_discounts = db::ScoreDiscounts::default(); let participant = identity_to_participant.get(&identity).cloned(); From f971ff4e666d4ab6ae2bfdcf2e7b9cb5e033feb7 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 26 Jun 2021 17:42:56 -0300 Subject: [PATCH 12/39] make stake pool optional, score-all command --- bot/src/main.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/src/main.rs b/bot/src/main.rs index 7be03e8a..efebf4d2 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -637,6 +637,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option Cluster::MainnetBeta, "testnet" => Cluster::Testnet, From 4608fa937c06cd24ead758259244dfb9dd7e83cf Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 27 Jun 2021 13:49:36 -0300 Subject: [PATCH 13/39] skip heavy rpc-call, ensure all validators scored --- bot/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index efebf4d2..c2bc0357 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -637,7 +637,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option Cluster::MainnetBeta, "testnet" => Cluster::Testnet, From 8a3df58537069c4c5ef5d666534bf5b7595811bf Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 17 Jul 2021 01:30:50 -0300 Subject: [PATCH 14/39] add vote-address --- bot/src/main.rs | 13 ++++++++----- score-all.sh | 3 ++- sql/import.sql | 4 +++- sql/queries.sql | 1 + 4 files changed, 14 insertions(+), 7 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index c2bc0357..d3ce7cdf 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1866,7 +1866,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { let mut validator_detail_csv = vec![]; - validator_detail_csv.push("identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); + validator_detail_csv.push("identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); @@ -1893,10 +1893,11 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { classification.stake_state_reason )); - //identity,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason + //identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason let csv_line = format!( - r#""{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, + r#""{}","{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, identity.to_string(), + classification.vote_address, classification.score(config), classification.commission, lamports_to_sol(classification.active_stake), @@ -1943,8 +1944,10 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { validator_markdown.push(format!("* {}", note)); } } - // save validator-detail.csv - let filename = config.cluster_db_path().join("validator-detail.csv"); + // save {cluster}-validator-detail.csv (repeating the cluster in the name is intentional) + let filename = config + .cluster_db_path() + .join(format!("{}-validator-detail.csv", config.cluster)); info!("Writing {}", filename.display()); let mut file = File::create(filename)?; file.write_all(&validator_detail_csv.join("\n").into_bytes())?; diff --git a/score-all.sh b/score-all.sh index 5403977c..cb432739 100644 --- a/score-all.sh +++ b/score-all.sh @@ -1,6 +1,7 @@ -./target/debug/solana-stake-o-matic --markdown --cluster mainnet-beta $* \ +./target/debug/solana-stake-o-matic --markdown $* \ --min-epoch-credit-percentage-of-average 0 \ score-all \ --score-max-commission 10 \ --commission-point-discount 15000 +# --cluster mainnet-beta \ No newline at end of file diff --git a/sql/import.sql b/sql/import.sql index df09cd04..0c5b6b38 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -2,6 +2,7 @@ DROP TABLE IF EXISTS mainnet; CREATE TABLE mainnet( identity TEXT, + vote_address TEXT, score INTEGER, commission SHORT, active_stake INTEGER, @@ -14,7 +15,8 @@ CREATE TABLE mainnet( stake_state_reason TEXT ); .mode csv -.import ./db/score-all-mainnet-beta/validator-detail.csv mainnet +--.import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv mainnet +.import ./db/score-all-testnet/testnet-validator-detail.csv mainnet --remove header row delete FROM mainnet where identity='identity'; --add pct column diff --git a/sql/queries.sql b/sql/queries.sql index 2f441696..815ef75f 100644 --- a/sql/queries.sql +++ b/sql/queries.sql @@ -1,3 +1,4 @@ select pct, A.* from mainnet as A +where score>0 order by pct desc From 004b7bde54ddd89797cd0097bcd43603a6ce0d86 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 17 Jul 2021 01:49:59 -0300 Subject: [PATCH 15/39] remove comment block as requested --- bot/src/main.rs | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index d3ce7cdf..67915ab1 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1339,19 +1339,6 @@ fn classify( continue; } - /* -- ------------------ - -- heuristic data, epoch 196 - -- ------------------ - select max(epoch_credits), min(epoch_credits) - from mainnet - where epoch_credits > (select max(epoch_credits)*0.50 from mainnet) - order by epoch_credits desc; - --max(epoch_credits),min(epoch_credits) - --242503,134403 - --so delta max-min epoch_credits ~= 100k - */ - // we start score with epoch_credits - // let mut score = epoch_credits; let mut score_discounts = db::ScoreDiscounts::default(); let participant = identity_to_participant.get(&identity).cloned(); From c55c904fe4bda624617dffce50f51fa9e81be4e0 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 17 Jul 2021 02:12:23 -0300 Subject: [PATCH 16/39] remove unused var --- bot/src/main.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index 67915ab1..255b75b9 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -637,7 +637,6 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option Cluster::MainnetBeta, "testnet" => Cluster::Testnet, From 351f31dc83a92a63867bdcf20976840c37b1397c Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 24 Jul 2021 01:21:30 -0300 Subject: [PATCH 17/39] act on recommendation - db backward compat --- bot/src/db.rs | 23 +++++++++----- bot/src/main.rs | 83 +++++++++++++++++++++++++++++++------------------ score-all.sh | 3 +- 3 files changed, 69 insertions(+), 40 deletions(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index 0fe72ab8..5a84d383 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -21,6 +21,15 @@ pub struct ScoreDiscounts { pub insufficient_self_stake: bool, pub can_halt_the_network_group: bool, } +#[derive(Default, Clone, Deserialize, Serialize)] +pub struct ScoreData { + /// computed score (more granular than ValidatorStakeState) + pub epoch_credits: u64, // epoch_credits is the base score + pub score_discounts: ScoreDiscounts, + pub commission: u8, + pub active_stake: u64, + pub data_center_concentration: f64, +} #[derive(Default, Clone, Deserialize, Serialize)] pub struct ValidatorClassification { @@ -30,12 +39,8 @@ pub struct ValidatorClassification { pub stake_state: ValidatorStakeState, pub stake_state_reason: String, - /// computed score (more granular than ValidatorStakeState) - pub epoch_credits: u64, // epoch_credits is the base score - pub score_discounts: ScoreDiscounts, - pub commission: u8, - pub active_stake: u64, - pub data_center_concentration: f64, + // added optional validator scoring data + pub score_data: Option, // Summary of the action was taken this epoch to advance the validator's stake pub stake_action: Option, @@ -60,7 +65,7 @@ pub struct ValidatorClassification { pub prioritize_funding_in_next_epoch: Option, } -impl ValidatorClassification { +impl ScoreData { pub fn score(&self, config: &Config) -> u64 { if self.score_discounts.can_halt_the_network_group || self.score_discounts.insufficient_self_stake @@ -73,7 +78,7 @@ impl ValidatorClassification { // if data_center_concentration = 25%, lose all score, // data_center_concentration = 10%, lose 40% (rounded) let discount_because_data_center_concentration = - self.epoch_credits * (self.data_center_concentration as u64 * 4) / 100; + (self.data_center_concentration * config.score_concentration_point_discount as f64) as u64; // score discounts according to commission let discount_because_commission = @@ -85,7 +90,9 @@ impl ValidatorClassification { .saturating_sub(discount_because_data_center_concentration) } } +} +impl ValidatorClassification { pub fn stake_state_streak(&self) -> usize { let mut streak = 1; diff --git a/bot/src/main.rs b/bot/src/main.rs index 255b75b9..1265d104 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -204,6 +204,8 @@ pub struct Config { score_commission_discount: u32, /// score min stake required score_min_stake: u64, + /// score discount per concentration percentage point + score_concentration_point_discount: u32, /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch @@ -286,6 +288,7 @@ impl Config { score_max_commission: 8, score_commission_discount: 12_000, score_min_stake: sol_to_lamports(75.0), + score_concentration_point_discount: 4_000, quality_block_producer_percentage: 15, max_poor_block_producer_percentage: 20, max_commission: 100, @@ -633,6 +636,13 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<(Config, RpcClient, Option ( - true, - value_t!(matches, "score_max_commission", u8).unwrap_or(10), - value_t!(matches, "commission_point_discount", u32).unwrap_or(16_000), - value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), - ), - _ => (false, 0, 0, 0), - }; + let ( + score_all, + score_max_commission, + score_commission_discount, + score_min_stake, + score_concentration_point_discount, + ) = match matches.subcommand() { + ("score-all", Some(matches)) => ( + true, + value_t!(matches, "score_max_commission", u8).unwrap_or(10), + value_t!(matches, "commission_point_discount", u32).unwrap_or(16_000), + value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), + value_t!(matches, "commission_point_discount", u32).unwrap_or(4000), + ), + _ => (false, 0, 0, 0, 0), + }; let config = Config { json_rpc_url, @@ -720,6 +736,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<()> { )); //identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason - let csv_line = format!( - r#""{}","{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, - identity.to_string(), - classification.vote_address, - classification.score(config), - classification.commission, - lamports_to_sol(classification.active_stake), - classification.epoch_credits, - classification.data_center_concentration, - classification.score_discounts.can_halt_the_network_group, - classification.score_discounts.low_credits, - classification.score_discounts.insufficient_self_stake, - classification.stake_state, - classification.stake_state_reason, - ); - validator_detail_csv.push(csv_line); + if let Some(score_data) = &classification.score_data { + let csv_line = format!( + r#""{}","{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, + identity.to_string(), + classification.vote_address, + score_data.score(config), + score_data.commission, + lamports_to_sol(score_data.active_stake), + score_data.epoch_credits, + score_data.data_center_concentration, + score_data.score_discounts.can_halt_the_network_group, + score_data.score_discounts.low_credits, + score_data.score_discounts.insufficient_self_stake, + classification.stake_state, + classification.stake_state_reason, + ); + validator_detail_csv.push(csv_line); + } if let Some(ref stake_action) = classification.stake_action { validator_markdown.push(format!("* Staking activity: {}", stake_action)); diff --git a/score-all.sh b/score-all.sh index cb432739..6621420e 100644 --- a/score-all.sh +++ b/score-all.sh @@ -2,6 +2,7 @@ --min-epoch-credit-percentage-of-average 0 \ score-all \ --score-max-commission 10 \ - --commission-point-discount 15000 + --commission-point-discount 2000 \ + --concentration-point-discount 2000 # --cluster mainnet-beta \ No newline at end of file From e7aa2689c5ea85f9d3034c008feca70ff0b3483b Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 24 Jul 2021 22:26:44 -0300 Subject: [PATCH 18/39] add average-position, recompute score every time --- bot/src/db.rs | 22 +++++++++++++--------- bot/src/main.rs | 19 ++++++++++++------- score-all.sh | 7 ++++--- sql/import.sql | 3 +-- 4 files changed, 30 insertions(+), 21 deletions(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index 5a84d383..aed3ba68 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -17,14 +17,15 @@ use { #[derive(Default, Clone, Deserialize, Serialize)] pub struct ScoreDiscounts { - pub low_credits: bool, - pub insufficient_self_stake: bool, pub can_halt_the_network_group: bool, } #[derive(Default, Clone, Deserialize, Serialize)] +/// computed score (more granular than ValidatorStakeState) pub struct ScoreData { - /// computed score (more granular than ValidatorStakeState) - pub epoch_credits: u64, // epoch_credits is the base score + /// epoch_credits is the base score + pub epoch_credits: u64, + /// 50 => Average, 0=>worst, 100=twice the average + pub average_position: u32, pub score_discounts: ScoreDiscounts, pub commission: u8, pub active_stake: u64, @@ -68,17 +69,20 @@ pub struct ValidatorClassification { impl ScoreData { pub fn score(&self, config: &Config) -> u64 { if self.score_discounts.can_halt_the_network_group - || self.score_discounts.insufficient_self_stake - || self.score_discounts.low_credits - || self.commission > config.score_max_commission || self.active_stake < config.score_min_stake + || self.average_position + < 50_u32.saturating_sub(config.min_epoch_credit_percentage_of_average as u32 / 2) + // if min_epoch_credit_percentage_of_average=100 => everybody passes + // if min_epoch_credit_percentage_of_average=25 => only validators above avg-25% pass + || self.commission > config.score_max_commission { 0 } else { // if data_center_concentration = 25%, lose all score, // data_center_concentration = 10%, lose 40% (rounded) - let discount_because_data_center_concentration = - (self.data_center_concentration * config.score_concentration_point_discount as f64) as u64; + let discount_because_data_center_concentration = (self.data_center_concentration + * config.score_concentration_point_discount as f64) + as u64; // score discounts according to commission let discount_because_commission = diff --git a/bot/src/main.rs b/bot/src/main.rs index 1265d104..8007cae8 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -629,6 +629,13 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option= concentrated_validators_stake_limit; @@ -1573,6 +1576,9 @@ fn classify( stake_state, score_data: Some(ScoreData { epoch_credits, + average_position: ((epoch_credits as u128 * 50_u128) + / avg_epoch_credits as u128) + as u32, score_discounts, commission, active_stake, @@ -1901,17 +1907,16 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { //identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason if let Some(score_data) = &classification.score_data { let csv_line = format!( - r#""{}","{}",{},{},{},{},{:.4},{},{},{},"{:?}","{}""#, + r#""{}","{}",{},{},{},{},{},{:.4},{},"{:?}","{}""#, identity.to_string(), classification.vote_address, score_data.score(config), + score_data.average_position, score_data.commission, lamports_to_sol(score_data.active_stake), score_data.epoch_credits, score_data.data_center_concentration, score_data.score_discounts.can_halt_the_network_group, - score_data.score_discounts.low_credits, - score_data.score_discounts.insufficient_self_stake, classification.stake_state, classification.stake_state_reason, ); diff --git a/score-all.sh b/score-all.sh index 6621420e..ae34fb8e 100644 --- a/score-all.sh +++ b/score-all.sh @@ -1,8 +1,9 @@ ./target/debug/solana-stake-o-matic --markdown $* \ - --min-epoch-credit-percentage-of-average 0 \ + --min-epoch-credit-percentage-of-average 20 \ score-all \ --score-max-commission 10 \ - --commission-point-discount 2000 \ - --concentration-point-discount 2000 + --score-min-stake 100 \ + --commission-point-discount 5000 \ + --concentration-point-discount 1000 # --cluster mainnet-beta \ No newline at end of file diff --git a/sql/import.sql b/sql/import.sql index 0c5b6b38..67e85c18 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -4,13 +4,12 @@ CREATE TABLE mainnet( identity TEXT, vote_address TEXT, score INTEGER, + avg_position INTEGER, commission SHORT, active_stake INTEGER, epoch_credits INTEGER, data_center_concentration DOUBLE, can_halt_the_network_group BOOL, - low_credits BOOL, - insufficient_self_stake BOOL, stake_state TEXT, stake_state_reason TEXT ); From 6eb887e666726cd2ed0fb8656d1d5af3ca9c3a26 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 25 Jul 2021 00:32:02 -0300 Subject: [PATCH 19/39] fix csv headers --- bot/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index 8007cae8..c956f35a 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1877,7 +1877,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { let mut validator_detail_csv = vec![]; - validator_detail_csv.push("identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason".into()); + validator_detail_csv.push("identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); From d6c5dc572ceee5317178c33f1caf0de2f8d29786 Mon Sep 17 00:00:00 2001 From: luciotato Date: Fri, 30 Jul 2021 15:53:42 -0300 Subject: [PATCH 20/39] score-all mainnet --- bot/src/main.rs | 4 ++-- score-all-mainnet.sh | 9 +++++++++ score-all.sh => score-all-testnet.sh | 0 sql/import.sql | 4 ++-- 4 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 score-all-mainnet.sh rename score-all.sh => score-all-testnet.sh (100%) diff --git a/bot/src/main.rs b/bot/src/main.rs index c956f35a..6c58e37a 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -727,7 +727,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option (false, 0, 0, 0, 0), }; @@ -1185,7 +1185,7 @@ fn classify( // compute cumulative_stake_limit => active_stake of the last validator inside the can-halt-the-network group // we later set score=0 to all validators whose stake >= concentrated_validators_stake_limit // sort by active_stake - vote_account_info.sort_by(|a, b| a.active_stake.cmp(&b.active_stake)); + vote_account_info.sort_by(|a, b| b.active_stake.cmp(&a.active_stake)); let mut accumulated: u64 = 0; let mut count_halt_group: u32 = 0; let limit: u64 = total_active_stake / 100 * 34; diff --git a/score-all-mainnet.sh b/score-all-mainnet.sh new file mode 100644 index 00000000..a0e2fe2b --- /dev/null +++ b/score-all-mainnet.sh @@ -0,0 +1,9 @@ +./target/debug/solana-stake-o-matic --cluster mainnet-beta --markdown $* \ + --min-epoch-credit-percentage-of-average 20 \ + score-all \ + --score-max-commission 8 \ + --score-min-stake 100 \ + --commission-point-discount 15000 \ + --concentration-point-discount 1500 + +# --cluster mainnet-beta \ No newline at end of file diff --git a/score-all.sh b/score-all-testnet.sh similarity index 100% rename from score-all.sh rename to score-all-testnet.sh diff --git a/sql/import.sql b/sql/import.sql index 67e85c18..46c21dd3 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -14,8 +14,8 @@ CREATE TABLE mainnet( stake_state_reason TEXT ); .mode csv ---.import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv mainnet -.import ./db/score-all-testnet/testnet-validator-detail.csv mainnet +.import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv mainnet +--.import ./db/score-all-testnet/testnet-validator-detail.csv mainnet --remove header row delete FROM mainnet where identity='identity'; --add pct column From 480b52b3a61a99085a0b254e7745ee5292f18ae3 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 31 Jul 2021 19:41:28 -0300 Subject: [PATCH 21/39] set limit to 33% --- bot/src/main.rs | 2 +- sql/import.sql | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index 6c58e37a..dd819678 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1188,7 +1188,7 @@ fn classify( vote_account_info.sort_by(|a, b| b.active_stake.cmp(&a.active_stake)); let mut accumulated: u64 = 0; let mut count_halt_group: u32 = 0; - let limit: u64 = total_active_stake / 100 * 34; + let limit: u64 = total_active_stake / 100 * 33; let mut concentrated_validators_stake_limit = limit; for info in &vote_account_info { accumulated += info.active_stake; diff --git a/sql/import.sql b/sql/import.sql index 46c21dd3..1a399491 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -20,7 +20,11 @@ CREATE TABLE mainnet( delete FROM mainnet where identity='identity'; --add pct column ALTER table mainnet add pct FLOAT; -UPDATE mainnet set pct = round(score * 100.0 / (select sum(score) from mainnet),4); +ALTER table mainnet add pct_of_stake FLOAT; +UPDATE mainnet set + pct = round(score * 100.0 / (select sum(score) from mainnet),4), + pct_of_stake = round(active_stake * 100.0 / (select sum(active_stake) from mainnet),4) + ; --control, show total staked select 'validators',count(*),'total staked',sum(active_stake) from mainnet; select 'validators with 0 score count:',count(*), From f3651d490687e9cf1fa245ef7555abce19481114 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 31 Jul 2021 20:05:14 -0300 Subject: [PATCH 22/39] include 1st outside sec-group --- bot/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index dd819678..fb84724c 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1194,9 +1194,9 @@ fn classify( accumulated += info.active_stake; count_halt_group += 1; if accumulated > limit { - concentrated_validators_stake_limit = info.active_stake; break; } + concentrated_validators_stake_limit = info.active_stake } info!( "validators:{} total_active_stake:{}, can_halt_the_network:top {}, pro-decentralization-stake-limit: less than {}", From 8be79ad80c7dd18b256a6c7f79e4d8f4fcdb5b81 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 1 Aug 2021 18:35:32 -0300 Subject: [PATCH 23/39] apply commission discount to credits more fairly --- bot/src/db.rs | 17 +++++++++++++++-- bot/src/main.rs | 34 ++++++++++++---------------------- score-all-mainnet.sh | 1 - 3 files changed, 27 insertions(+), 25 deletions(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index aed3ba68..47511e1f 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -85,13 +85,26 @@ impl ScoreData { as u64; // score discounts according to commission - let discount_because_commission = - (self.commission as u32 * config.score_commission_discount) as u64; + // apply commission % as a discount to credits_observed. + // The rationale es: + // If you're the top performer validator and get 300K credits, but you have 50% commission, + // from our user's point of view, it's the same as a 150K credits validator with 0% commission, + // both represent the same APY for the user. + // So to treat both the same we apply commission to self.epoch_credits + let discount_because_commission = self.commission as u64 * self.epoch_credits / 100; + + // give more score to above average validators in order to increase APY for our users + let points_added_above_average: u64 = if self.average_position > 50 { + (self.average_position - 50) as u64 * self.epoch_credits / 10_u64 + } else { + 0 + }; //result self.epoch_credits .saturating_sub(discount_because_commission) .saturating_sub(discount_because_data_center_concentration) + .saturating_add(points_added_above_average) } } } diff --git a/bot/src/main.rs b/bot/src/main.rs index fb84724c..5bab79f8 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -200,8 +200,6 @@ pub struct Config { score_all: bool, /// max commission accepted to score (0-100) score_max_commission: u8, - /// score discount per commission point - score_commission_discount: u32, /// score min stake required score_min_stake: u64, /// score discount per concentration percentage point @@ -286,9 +284,8 @@ impl Config { dry_run: true, score_all: false, score_max_commission: 8, - score_commission_discount: 12_000, - score_min_stake: sol_to_lamports(75.0), - score_concentration_point_discount: 4_000, + score_min_stake: sol_to_lamports(100.0), + score_concentration_point_discount: 1_500, quality_block_producer_percentage: 15, max_poor_block_producer_percentage: 20, max_commission: 100, @@ -715,22 +712,16 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option ( - true, - value_t!(matches, "score_max_commission", u8).unwrap_or(10), - value_t!(matches, "commission_point_discount", u32).unwrap_or(16_000), - value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), - value_t!(matches, "concentration_point_discount", u32).unwrap_or(2000), - ), - _ => (false, 0, 0, 0, 0), - }; + let (score_all, score_max_commission, score_min_stake, score_concentration_point_discount) = + match matches.subcommand() { + ("score-all", Some(matches)) => ( + true, + value_t!(matches, "score_max_commission", u8).unwrap_or(10), + value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), + value_t!(matches, "concentration_point_discount", u32).unwrap_or(2000), + ), + _ => (false, 0, 0, 0), + }; let config = Config { json_rpc_url, @@ -741,7 +732,6 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option Date: Tue, 3 Aug 2021 01:52:09 -0300 Subject: [PATCH 24/39] better name for table field --- sql/import.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/import.sql b/sql/import.sql index 1a399491..ef696b2a 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -20,10 +20,10 @@ CREATE TABLE mainnet( delete FROM mainnet where identity='identity'; --add pct column ALTER table mainnet add pct FLOAT; -ALTER table mainnet add pct_of_stake FLOAT; +ALTER table mainnet add stake_conc FLOAT; UPDATE mainnet set pct = round(score * 100.0 / (select sum(score) from mainnet),4), - pct_of_stake = round(active_stake * 100.0 / (select sum(active_stake) from mainnet),4) + stake_conc = round(active_stake * 100.0 / (select sum(active_stake) from mainnet),4) ; --control, show total staked select 'validators',count(*),'total staked',sum(active_stake) from mainnet; From 2a58220ff88f904ae34bbf6efc58b2cab205b8d2 Mon Sep 17 00:00:00 2001 From: luciotato Date: Tue, 10 Aug 2021 09:07:22 -0300 Subject: [PATCH 25/39] add validator-name, min-avg-position --- bot/src/data_center_info.rs | 14 +++++++-- bot/src/db.rs | 25 +++++++++++----- bot/src/main.rs | 58 +++++++++++++++++++++++++------------ clean-score-all-mainnet.sh | 5 ++++ import-into-sqlite.sh | 2 +- score-all-mainnet.sh | 6 ++-- sql/import.sql | 43 ++++++++++++++++----------- 7 files changed, 101 insertions(+), 52 deletions(-) create mode 100644 clean-score-all-mainnet.sh diff --git a/bot/src/data_center_info.rs b/bot/src/data_center_info.rs index fa236292..5a3d4511 100644 --- a/bot/src/data_center_info.rs +++ b/bot/src/data_center_info.rs @@ -1,5 +1,5 @@ use { - crate::validators_app, + crate::{validators_app,ByIdentityInfo}, log::*, serde::{Deserialize, Serialize}, solana_sdk::pubkey::Pubkey, @@ -75,7 +75,7 @@ impl std::fmt::Display for DataCenterInfo { #[derive(Debug, Default)] pub struct DataCenters { pub info: Vec, - pub by_identity: HashMap, + pub by_identity: HashMap, } pub fn get(cluster: &str) -> Result> { @@ -123,7 +123,15 @@ pub fn get(cluster: &str) -> Result> { }) .unwrap_or_default(); - by_identity.insert(identity, data_center_id.clone()); + by_identity.insert( + identity, + ByIdentityInfo { + data_center_id: data_center_id.clone(), + keybase_id: String::from(v.keybase_id.as_deref().unwrap_or("")), + name: String::from(v.name.as_deref().unwrap_or("")), + www_url: String::from(v.www_url.as_deref().unwrap_or("")), + }, + ); let mut data_center_info = data_center_map .entry(data_center_id.clone()) diff --git a/bot/src/db.rs b/bot/src/db.rs index 47511e1f..4b9a029e 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -19,17 +19,27 @@ use { pub struct ScoreDiscounts { pub can_halt_the_network_group: bool, } + +#[derive(Debug, Default, Clone, Deserialize, Serialize)] +pub struct ByIdentityInfo { + pub data_center_id: DataCenterId, + pub keybase_id: String, + pub name: String, + pub www_url: String, +} + #[derive(Default, Clone, Deserialize, Serialize)] /// computed score (more granular than ValidatorStakeState) pub struct ScoreData { /// epoch_credits is the base score pub epoch_credits: u64, /// 50 => Average, 0=>worst, 100=twice the average - pub average_position: u32, + pub average_position: f64, pub score_discounts: ScoreDiscounts, pub commission: u8, pub active_stake: u64, pub data_center_concentration: f64, + pub validators_app_info: ByIdentityInfo, } #[derive(Default, Clone, Deserialize, Serialize)] @@ -70,10 +80,9 @@ impl ScoreData { pub fn score(&self, config: &Config) -> u64 { if self.score_discounts.can_halt_the_network_group || self.active_stake < config.score_min_stake - || self.average_position - < 50_u32.saturating_sub(config.min_epoch_credit_percentage_of_average as u32 / 2) - // if min_epoch_credit_percentage_of_average=100 => everybody passes - // if min_epoch_credit_percentage_of_average=25 => only validators above avg-25% pass + || self.average_position < config.min_avg_position + // if config.min_avg_position=100 => everybody passes + // if config.min_avg_position=50 => only validators above avg pass || self.commission > config.score_max_commission { 0 @@ -93,9 +102,9 @@ impl ScoreData { // So to treat both the same we apply commission to self.epoch_credits let discount_because_commission = self.commission as u64 * self.epoch_credits / 100; - // give more score to above average validators in order to increase APY for our users - let points_added_above_average: u64 = if self.average_position > 50 { - (self.average_position - 50) as u64 * self.epoch_credits / 10_u64 + // give extra score to above average validators in order to increase APY for our users + let points_added_above_average: u64 = if self.average_position > 50.0 { + (self.average_position - 50.0) as u64 * self.epoch_credits / 10_u64 } else { 0 }; diff --git a/bot/src/main.rs b/bot/src/main.rs index 5bab79f8..257fd270 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -204,6 +204,8 @@ pub struct Config { score_min_stake: u64, /// score discount per concentration percentage point score_concentration_point_discount: u32, + /// min average position considering credits_observed, 50.0 = average + min_avg_position: f64, /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch @@ -235,7 +237,7 @@ pub struct Config { /// None: skip infrastructure concentration check max_infrastructure_concentration: Option, - /// How validators with infrastruction concentration above `max_infrastructure_concentration` + /// How validators with infrastructure concentration above `max_infrastructure_concentration` /// will be affected. Accepted values are: /// 1) "warn" - Stake unaffected. A warning message is notified /// 2) "destake" - Removes all validator stake @@ -647,6 +649,13 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<(Config, RpcClient, Option ( - true, - value_t!(matches, "score_max_commission", u8).unwrap_or(10), - value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), - value_t!(matches, "concentration_point_discount", u32).unwrap_or(2000), - ), - _ => (false, 0, 0, 0), - }; + let ( + score_all, + score_max_commission, + score_min_stake, + score_concentration_point_discount, + min_avg_position, + ) = match matches.subcommand() { + ("score-all", Some(matches)) => ( + true, + value_t!(matches, "score_max_commission", u8).unwrap_or(10), + value_t!(matches, "score_min_stake", u64).unwrap_or(sol_to_lamports(100.0)), + value_t!(matches, "concentration_point_discount", u32).unwrap_or(2000), + value_t!(matches, "min_avg_position", f64).unwrap_or(50.0), + ), + _ => (false, 0, 0, 0, 0.0), + }; let config = Config { json_rpc_url, @@ -734,6 +749,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<()> { if let Some(ref validator_classifications) = epoch_classification.validator_classifications { let mut validator_detail_csv = vec![]; - validator_detail_csv.push("identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason".into()); - + validator_detail_csv.push("epoch,keybase_id,name,identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason,www_url".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); validator_classifications.sort_by(|a, b| a.0.cmp(b.0)); @@ -1894,10 +1910,13 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { classification.stake_state_reason )); - //identity,vote_address,score,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,low_credits,insufficient_self_stake,stake_state,stake_state_reason + //epoch,keybase_id,name,identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason,www_url if let Some(score_data) = &classification.score_data { let csv_line = format!( - r#""{}","{}",{},{},{},{},{},{:.4},{},"{:?}","{}""#, + r#"{},"{}","{}","{}","{}",{},{},{},{},{},{:.4},{},"{:?}","{}","{}""#, + epoch, + score_data.validators_app_info.keybase_id, + score_data.validators_app_info.name, identity.to_string(), classification.vote_address, score_data.score(config), @@ -1909,6 +1928,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { score_data.score_discounts.can_halt_the_network_group, classification.stake_state, classification.stake_state_reason, + score_data.validators_app_info.www_url, ); validator_detail_csv.push(csv_line); } diff --git a/clean-score-all-mainnet.sh b/clean-score-all-mainnet.sh new file mode 100644 index 00000000..0ede3c04 --- /dev/null +++ b/clean-score-all-mainnet.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -ex +rm -rf db/score-all-mainnet-beta +. ./score-all-mainnet.sh +. ./import-into-sqlite.sh diff --git a/import-into-sqlite.sh b/import-into-sqlite.sh index c831bc58..6c6117cf 100644 --- a/import-into-sqlite.sh +++ b/import-into-sqlite.sh @@ -1 +1 @@ -sqlite3 <./sql/import.sql \ No newline at end of file +sqlite3 <./sql/import.sql diff --git a/score-all-mainnet.sh b/score-all-mainnet.sh index d3abcb7b..e0d22c68 100644 --- a/score-all-mainnet.sh +++ b/score-all-mainnet.sh @@ -1,8 +1,6 @@ ./target/debug/solana-stake-o-matic --cluster mainnet-beta --markdown $* \ - --min-epoch-credit-percentage-of-average 20 \ score-all \ --score-max-commission 8 \ --score-min-stake 100 \ - --concentration-point-discount 1500 - -# --cluster mainnet-beta \ No newline at end of file + --concentration-point-discount 1500 \ + --min-avg-position 53.0 diff --git a/sql/import.sql b/sql/import.sql index ef696b2a..0fe01531 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -1,40 +1,45 @@ -.open ./db/score-all-mainnet-beta/sqlite3.db -DROP TABLE IF EXISTS mainnet; -CREATE TABLE mainnet( +.open ./db/score-sqlite3.db +DROP TABLE IF EXISTS imported; +CREATE TABLE imported( + epoch INT, + keybase_id TEXT, + name TEXT, identity TEXT, vote_address TEXT, score INTEGER, - avg_position INTEGER, + avg_position REAL, commission SHORT, active_stake INTEGER, epoch_credits INTEGER, data_center_concentration DOUBLE, can_halt_the_network_group BOOL, stake_state TEXT, - stake_state_reason TEXT + stake_state_reason TEXT, + www_url TEXT ); .mode csv -.import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv mainnet ---.import ./db/score-all-testnet/testnet-validator-detail.csv mainnet +.import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv imported +--.import ./db/score-all-testnet/testnet-validator-detail.csv imported --remove header row -delete FROM mainnet where identity='identity'; +delete FROM imported where identity='identity'; --add pct column -ALTER table mainnet add pct FLOAT; -ALTER table mainnet add stake_conc FLOAT; -UPDATE mainnet set - pct = round(score * 100.0 / (select sum(score) from mainnet),4), - stake_conc = round(active_stake * 100.0 / (select sum(active_stake) from mainnet),4) +ALTER table imported add pct FLOAT; +ALTER table imported add stake_conc FLOAT; +UPDATE imported set + pct = round(score * 100.0 / (select sum(score) from imported),4), + stake_conc = round(active_stake * 100.0 / (select sum(active_stake) from imported),4) ; --control, show total staked -select 'validators',count(*),'total staked',sum(active_stake) from mainnet; +select DISTINCT epoch from imported; +select 'validators',count(*),'total staked',sum(active_stake) from imported; select 'validators with 0 score count:',count(*), "sum stake",sum(active_stake) - from mainnet + from imported where pct=0 ; select 'validators with non-zero score count:',count(*), "sum stake",sum(active_stake) - from mainnet + from imported where pct>0 ; select 'avg epoch_credits',avg(epoch_credits), @@ -44,6 +49,10 @@ select 'avg epoch_credits',avg(epoch_credits), 'min score',min(score), char(10) || 'max pct',max(pct), 'min pct',min(pct) - from mainnet + from imported where pct>0; +-- add to scores --drop table scores; +create TABLE if not EXISTS scores as select * from imported; +DELETE FROM scores where epoch = (select DISTINCT epoch from imported); +INSERT INTO scores select * from imported; .exit From dd30b2b0be16b2ca5faaa084fcde065573375d00 Mon Sep 17 00:00:00 2001 From: luciotato Date: Fri, 13 Aug 2021 22:20:39 -0300 Subject: [PATCH 26/39] select top X validators --- bot/src/data_center_info.rs | 2 +- bot/src/db.rs | 3 ++- bot/src/main.rs | 34 +++++++++++++++++++++++++++++++--- score-all-mainnet.sh | 5 +++-- 4 files changed, 37 insertions(+), 7 deletions(-) diff --git a/bot/src/data_center_info.rs b/bot/src/data_center_info.rs index 5a3d4511..49061d97 100644 --- a/bot/src/data_center_info.rs +++ b/bot/src/data_center_info.rs @@ -1,5 +1,5 @@ use { - crate::{validators_app,ByIdentityInfo}, + crate::{validators_app, ByIdentityInfo}, log::*, serde::{Deserialize, Serialize}, solana_sdk::pubkey::Pubkey, diff --git a/bot/src/db.rs b/bot/src/db.rs index 4b9a029e..ed9a5f22 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -104,7 +104,8 @@ impl ScoreData { // give extra score to above average validators in order to increase APY for our users let points_added_above_average: u64 = if self.average_position > 50.0 { - (self.average_position - 50.0) as u64 * self.epoch_credits / 10_u64 + let above = self.average_position - 50.0; + (above * self.epoch_credits as f64) as u64 } else { 0 }; diff --git a/bot/src/main.rs b/bot/src/main.rs index 257fd270..ee9bf4e0 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -206,6 +206,8 @@ pub struct Config { score_concentration_point_discount: u32, /// min average position considering credits_observed, 50.0 = average min_avg_position: f64, + /// select top n validators + top: u16, /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch @@ -656,6 +658,13 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<(Config, RpcClient, Option ( true, @@ -734,8 +744,9 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option (false, 0, 0, 0, 0.0), + _ => (false, 0, 0, 0, 0.0, 0), }; let config = Config { @@ -750,6 +761,7 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<()> { validator_detail_csv.push("epoch,keybase_id,name,identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason,www_url".into()); let mut validator_classifications = validator_classifications.iter().collect::>(); - validator_classifications.sort_by(|a, b| a.0.cmp(b.0)); + // sort by credits, desc + validator_classifications.sort_by(|a, b| { + b.1.score_data + .as_ref() + .unwrap() + .epoch_credits + .cmp(&a.1.score_data.as_ref().unwrap().epoch_credits) + }); + let mut index_with_score = 0; for (identity, classification) in validator_classifications { let validator_markdown = validators_markdown.entry(identity).or_default(); @@ -1912,6 +1932,14 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { //epoch,keybase_id,name,identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason,www_url if let Some(score_data) = &classification.score_data { + let mut score = score_data.score(config); + if score > 0 { + index_with_score += 1; + if index_with_score > config.top { + score = 0; + } + } + let csv_line = format!( r#"{},"{}","{}","{}","{}",{},{},{},{},{},{:.4},{},"{:?}","{}","{}""#, epoch, @@ -1919,7 +1947,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { score_data.validators_app_info.name, identity.to_string(), classification.vote_address, - score_data.score(config), + score, score_data.average_position, score_data.commission, lamports_to_sol(score_data.active_stake), diff --git a/score-all-mainnet.sh b/score-all-mainnet.sh index e0d22c68..33b4f6c3 100644 --- a/score-all-mainnet.sh +++ b/score-all-mainnet.sh @@ -1,6 +1,7 @@ ./target/debug/solana-stake-o-matic --cluster mainnet-beta --markdown $* \ score-all \ - --score-max-commission 8 \ + --score-max-commission 10 \ --score-min-stake 100 \ --concentration-point-discount 1500 \ - --min-avg-position 53.0 + --min-avg-position 50 \ + --top 100 From 8028451df591636bce22e0fcce6c60b306e4919e Mon Sep 17 00:00:00 2001 From: luciotato Date: Wed, 18 Aug 2021 07:16:58 -0300 Subject: [PATCH 27/39] tilt towards high average --- bot/src/db.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/src/db.rs b/bot/src/db.rs index ed9a5f22..0584c683 100644 --- a/bot/src/db.rs +++ b/bot/src/db.rs @@ -105,7 +105,8 @@ impl ScoreData { // give extra score to above average validators in order to increase APY for our users let points_added_above_average: u64 = if self.average_position > 50.0 { let above = self.average_position - 50.0; - (above * self.epoch_credits as f64) as u64 + let multiplier = if above * above > 25.0 { 25.0 } else {above * above}; + (multiplier * self.epoch_credits as f64) as u64 } else { 0 }; From c8bdc27306cc6bc664738b98bb76c57dd47a9fbe Mon Sep 17 00:00:00 2001 From: luciotato Date: Sat, 21 Aug 2021 11:26:07 -0300 Subject: [PATCH 28/39] apply data-center concentration penalty --- sql/average.sql | 27 +++++++++++++++++++++++++++ sql/import.sql | 34 ++++++++++++++++++++++++++++++++++ sql/queries.sql | 11 +++++++++++ 3 files changed, 72 insertions(+) create mode 100644 sql/average.sql diff --git a/sql/average.sql b/sql/average.sql new file mode 100644 index 00000000..c9efc1fc --- /dev/null +++ b/sql/average.sql @@ -0,0 +1,27 @@ +.open ./db/score-sqlite3.db + +drop table avg; + +create table AVG as +select epoch,keybase_id,name,score, case when score=0 then 0 else b_score end as b_score, b_score-score as delta_score, avg_position, ap, commission, c2, epoch_credits, ec2, ec2-epoch_credits as delta_credits, 0.0 as pct, vote_address from scores A +left outer JOIN (select round( avg(epoch_credits) * (100-avg(commission))/100 * (100-avg(data_center_concentration)*4)/100 * (avg(avg_position)-49) * (avg(avg_position)-49) ) as B_score, avg(avg_position) as ap, avg(commission) as c2, avg(epoch_credits) as ec2, vote_address as va2 from scores B +where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) +group by vote_address) +on va2 = a.vote_address +where A.epoch = (select distinct epoch from imported) +--and score=0 and b_score>0 +--and score>0 WE MUST INCLUDE ALL RECORDS - so update-scores checks all validators health +order by b_score desc +; + +-- compute PCT (informative) +update avg as U +set pct = B_score / (select sum(A.b_score) from avg A where A.epoch = U.epoch) * 100 +; + +-- show validators with pct assgined (informative) +select * from AVG +order by pct desc +where pct>0; + +.exit diff --git a/sql/import.sql b/sql/import.sql index 0fe01531..36bde8fd 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -1,4 +1,5 @@ .open ./db/score-sqlite3.db + DROP TABLE IF EXISTS imported; CREATE TABLE imported( epoch INT, @@ -17,11 +18,13 @@ CREATE TABLE imported( stake_state_reason TEXT, www_url TEXT ); + .mode csv .import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv imported --.import ./db/score-all-testnet/testnet-validator-detail.csv imported --remove header row delete FROM imported where identity='identity'; + --add pct column ALTER table imported add pct FLOAT; ALTER table imported add stake_conc FLOAT; @@ -29,6 +32,7 @@ UPDATE imported set pct = round(score * 100.0 / (select sum(score) from imported),4), stake_conc = round(active_stake * 100.0 / (select sum(active_stake) from imported),4) ; + --control, show total staked select DISTINCT epoch from imported; select 'validators',count(*),'total staked',sum(active_stake) from imported; @@ -51,8 +55,38 @@ select 'avg epoch_credits',avg(epoch_credits), 'min pct',min(pct) from imported where pct>0; + -- add to scores --drop table scores; create TABLE if not EXISTS scores as select * from imported; DELETE FROM scores where epoch = (select DISTINCT epoch from imported); INSERT INTO scores select * from imported; + +-- recompute avg last 3 epochs +DROP TABLE IF EXISTS avg; +create table AVG as +select epoch,keybase_id,name,score, case when score=0 then 0 else b_score end as b_score, b_score-score as delta_score, avg_position, ap, commission, c2, epoch_credits, ec2, ec2-epoch_credits as delta_credits, 0.0 as pct, vote_address from scores A +left outer JOIN (select round( avg(epoch_credits) * (100-avg(commission))/100 * (100-avg(data_center_concentration)*4)/100 * (avg(avg_position)-49) * (avg(avg_position)-49) ) as B_score, avg(avg_position) as ap, avg(commission) as c2, avg(epoch_credits) as ec2, vote_address as va2 from scores B +where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) +group by vote_address) +on va2 = a.vote_address +where A.epoch = (select distinct epoch from imported) +--and score=0 and b_score>0 +--and score>0 WE MUST INCLUDE ALL RECORDS - so update-scores checks all validators health +order by b_score desc +; + +-- compute PCT (informative) +update avg as U +set pct = B_score / (select sum(A.b_score) from avg A where A.epoch = U.epoch) * 100 +; + +-- show top validators with pct assgined (informative) +.mode column +.headers ON +select epoch,keybase_id,name, round(pct,2) as pct, b_score,delta_score,avg_position,epoch_credits, round(c2) as comm, vote_address from AVG +where pct>0 +order by pct desc +LIMIT 10 +; + .exit diff --git a/sql/queries.sql b/sql/queries.sql index 815ef75f..c47067e1 100644 --- a/sql/queries.sql +++ b/sql/queries.sql @@ -1,4 +1,15 @@ +-- * -- select pct, A.* from mainnet as A where score>0 order by pct desc +-- * -- +-- compare epochs +select epoch,keybase_id,name,score,b_score, avg_position, ap, commission, c2, epoch_credits, ec2, can_halt_the_network_group as h1, h2 from scores A +JOIN (select score as B_score, avg_position as ap, commission as c2, epoch_credits as ec2, can_halt_the_network_group as h2, vote_address as va2 from scores B +where B.epoch = 214) +on va2 = a.vote_address +where A.epoch = 213 +--and score=0 and b_score>0 +order by score desc +-- * -- \ No newline at end of file From 72824f3cbfaf02c6f7fd30db37abe2965762f3a0 Mon Sep 17 00:00:00 2001 From: luciotato Date: Fri, 3 Sep 2021 01:17:24 -0300 Subject: [PATCH 29/39] take TOP 200 validators --- bot/src/main.rs | 25 ++++--------------------- score-all-mainnet.sh | 3 +-- sql/import.sql | 28 +++++++++++++++++++++------- 3 files changed, 26 insertions(+), 30 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index ee9bf4e0..29d1fb99 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -206,8 +206,6 @@ pub struct Config { score_concentration_point_discount: u32, /// min average position considering credits_observed, 50.0 = average min_avg_position: f64, - /// select top n validators - top: u16, /// Quality validators produce within this percentage of the cluster average skip rate over /// the previous epoch @@ -290,6 +288,7 @@ impl Config { score_max_commission: 8, score_min_stake: sol_to_lamports(100.0), score_concentration_point_discount: 1_500, + min_avg_position: 40.0, quality_block_producer_percentage: 15, max_poor_block_producer_percentage: 20, max_commission: 100, @@ -658,13 +657,6 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<(Config, RpcClient, Option ( true, @@ -744,9 +735,8 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option (false, 0, 0, 0, 0.0, 0), + _ => (false, 0, 0, 0, 0.0), }; let config = Config { @@ -761,7 +751,6 @@ fn get_config() -> BoxResult<(Config, RpcClient, Option BoxResult<()> { .epoch_credits .cmp(&a.1.score_data.as_ref().unwrap().epoch_credits) }); - let mut index_with_score = 0; + for (identity, classification) in validator_classifications { let validator_markdown = validators_markdown.entry(identity).or_default(); @@ -1932,13 +1921,7 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { //epoch,keybase_id,name,identity,vote_address,score,average_position,commission,active_stake,epoch_credits,data_center_concentration,can_halt_the_network_group,stake_state,stake_state_reason,www_url if let Some(score_data) = &classification.score_data { - let mut score = score_data.score(config); - if score > 0 { - index_with_score += 1; - if index_with_score > config.top { - score = 0; - } - } + let score = score_data.score(config); let csv_line = format!( r#"{},"{}","{}","{}","{}",{},{},{},{},{},{:.4},{},"{:?}","{}","{}""#, diff --git a/score-all-mainnet.sh b/score-all-mainnet.sh index 33b4f6c3..f0f90344 100644 --- a/score-all-mainnet.sh +++ b/score-all-mainnet.sh @@ -3,5 +3,4 @@ --score-max-commission 10 \ --score-min-stake 100 \ --concentration-point-discount 1500 \ - --min-avg-position 50 \ - --top 100 + --min-avg-position 50 \ No newline at end of file diff --git a/sql/import.sql b/sql/import.sql index 36bde8fd..c44b9280 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -62,13 +62,23 @@ DELETE FROM scores where epoch = (select DISTINCT epoch from imported); INSERT INTO scores select * from imported; -- recompute avg last 3 epochs +-- * (avg(b.avg_position)-49) * (avg(b.avg_position)-49) ) DROP TABLE IF EXISTS avg; create table AVG as -select epoch,keybase_id,name,score, case when score=0 then 0 else b_score end as b_score, b_score-score as delta_score, avg_position, ap, commission, c2, epoch_credits, ec2, ec2-epoch_credits as delta_credits, 0.0 as pct, vote_address from scores A -left outer JOIN (select round( avg(epoch_credits) * (100-avg(commission))/100 * (100-avg(data_center_concentration)*4)/100 * (avg(avg_position)-49) * (avg(avg_position)-49) ) as B_score, avg(avg_position) as ap, avg(commission) as c2, avg(epoch_credits) as ec2, vote_address as va2 from scores B -where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) -group by vote_address) -on va2 = a.vote_address +select epoch,keybase_id,name,score, + case when score=0 or mult<=0 then 0 else b_score*mult*mult end as b_score, + b_score-score as delta_score, ap-49 mult, avg_position, ap, commission, c2, dcc2, + epoch_credits, ec2, epoch_credits-ec2 as delta_credits, + 0.0 as pct, vote_address +from scores A +left outer JOIN ( + select round( avg(b.epoch_credits) * (100-avg(b.commission))/100 * (100-avg(b.data_center_concentration)*3)/100 ) as B_score, + avg(b.avg_position) as ap, avg(b.avg_position)-49 as mult, avg(b.commission) as c2, avg(b.epoch_credits) as ec2, avg(b.data_center_concentration) as dcc2, b.vote_address as va2 + from scores B + where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) + group by vote_address + ) + on va2 = a.vote_address where A.epoch = (select distinct epoch from imported) --and score=0 and b_score>0 --and score>0 WE MUST INCLUDE ALL RECORDS - so update-scores checks all validators health @@ -76,14 +86,18 @@ order by b_score desc ; -- compute PCT (informative) +-- SELECTING TOP 200 +drop table if exists temp; +create table temp as select * from avg order by b_score desc LIMIT 200; update avg as U -set pct = B_score / (select sum(A.b_score) from avg A where A.epoch = U.epoch) * 100 +set pct = B_score / (select sum(A.b_score) from temp A where A.epoch = U.epoch) * 100 +where exists (select 1 from temp A where A.vote_address = U.vote_address) ; -- show top validators with pct assgined (informative) .mode column .headers ON -select epoch,keybase_id,name, round(pct,2) as pct, b_score,delta_score,avg_position,epoch_credits, round(c2) as comm, vote_address from AVG +select epoch,keybase_id,name, round(pct,2) as pct, b_score,delta_score,avg_position,ec2, round(c2) as comm,round(dcc2) as dcc2, vote_address from AVG where pct>0 order by pct desc LIMIT 10 From 04738cb07b12f12212a094c6bd252b2633bad433 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 12 Sep 2021 09:15:32 -0300 Subject: [PATCH 30/39] add comments to sql processing --- sql/import.sql | 60 +++++++++++++++++++++++++++++++------------------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/sql/import.sql b/sql/import.sql index c44b9280..345ebc32 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -1,5 +1,6 @@ .open ./db/score-sqlite3.db +-- create table to receive stake-o-matic data DROP TABLE IF EXISTS imported; CREATE TABLE imported( epoch INT, @@ -19,13 +20,13 @@ CREATE TABLE imported( www_url TEXT ); +-- import stake-o-matic data .mode csv .import ./db/score-all-mainnet-beta/mainnet-beta-validator-detail.csv imported ---.import ./db/score-all-testnet/testnet-validator-detail.csv imported --remove header row delete FROM imported where identity='identity'; ---add pct column +--add pct and stake-concentration columns ALTER table imported add pct FLOAT; ALTER table imported add stake_conc FLOAT; UPDATE imported set @@ -56,51 +57,64 @@ select 'avg epoch_credits',avg(epoch_credits), from imported where pct>0; --- add to scores --drop table scores; +-- add imported epoch to table scores create TABLE if not EXISTS scores as select * from imported; DELETE FROM scores where epoch = (select DISTINCT epoch from imported); INSERT INTO scores select * from imported; --- recompute avg last 3 epochs --- * (avg(b.avg_position)-49) * (avg(b.avg_position)-49) ) +-- recompute avg table with last 3 epochs +-- if score=0 from imported => below nakamoto coefficient, or commission 100% or less than 100 SOL staked +-- also we set score=0 if below 50% avg or less than 3 epochs on record +-- create pct column and set to zero, will update after when selecting top 200 DROP TABLE IF EXISTS avg; create table AVG as -select epoch,keybase_id,name,score, - case when score=0 or mult<=0 then 0 else b_score*mult*mult end as b_score, - b_score-score as delta_score, ap-49 mult, avg_position, ap, commission, c2, dcc2, - epoch_credits, ec2, epoch_credits-ec2 as delta_credits, - 0.0 as pct, vote_address -from scores A +select 0 as rank, epoch,keybase_id, vote_address,name, + case when score=0 or mult<=0 or score_records<3 then 0 else ROUND(base_score*mult*mult) end as avg_score, + base_score, ap-49 mult, ap as avg_pos, commission, round(c2,2) as avg_commiss, dcc2, + epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, + 0.0 as pct, score_records +from imported A left outer JOIN ( - select round( avg(b.epoch_credits) * (100-avg(b.commission))/100 * (100-avg(b.data_center_concentration)*3)/100 ) as B_score, - avg(b.avg_position) as ap, avg(b.avg_position)-49 as mult, avg(b.commission) as c2, avg(b.epoch_credits) as ec2, avg(b.data_center_concentration) as dcc2, b.vote_address as va2 + select count(*) as score_records, + round( avg(b.epoch_credits) * (100-avg(b.commission))/100 * (100-avg(b.data_center_concentration)*3)/100 ) as base_score, + avg(b.avg_position) as ap, avg(b.avg_position)-49 as mult, avg(b.commission) as c2, ROUND(avg(b.epoch_credits)) as ec2, + avg(b.data_center_concentration) as dcc2, b.vote_address as va2 from scores B where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) group by vote_address ) on va2 = a.vote_address where A.epoch = (select distinct epoch from imported) ---and score=0 and b_score>0 ---and score>0 WE MUST INCLUDE ALL RECORDS - so update-scores checks all validators health -order by b_score desc +--and score>0 NOTE: WE MUST INCLUDE ALL RECORDS - so update-scores checks all validators' health +order by base_score desc ; --- compute PCT (informative) --- SELECTING TOP 200 +-- compute rank drop table if exists temp; -create table temp as select * from avg order by b_score desc LIMIT 200; +create table temp as select vote_address, RANK() over (order by avg_score DESC) as rank from avg; +-- set rank in avg table +update avg +set rank = (select rank from temp where temp.vote_address=avg.vote_address); + +-- SELECT TOP 200 +drop table if exists temp; +create table temp as select * from avg order by avg_score desc LIMIT 200; +-- set pct ONLY ON TOP 200 update avg as U -set pct = B_score / (select sum(A.b_score) from temp A where A.epoch = U.epoch) * 100 +set pct = avg_score / (select sum(A.avg_score) from temp A where A.epoch = U.epoch) * 100 where exists (select 1 from temp A where A.vote_address = U.vote_address) ; -- show top validators with pct assgined (informative) .mode column .headers ON -select epoch,keybase_id,name, round(pct,2) as pct, b_score,delta_score,avg_position,ec2, round(c2) as comm,round(dcc2) as dcc2, vote_address from AVG +select epoch,rank,keybase_id,name, round(pct,4) as pct, avg_score, ROUND(mult,4) as mult, + round(avg_pos,4) as avg_pos, + epoch_credits,avg_ec,delta_credits, + avg_commiss,round(dcc2,5) as dcc2 from AVG where pct>0 -order by pct desc +order by rank LIMIT 10 ; - +select count(*) as validators_with_pct from avg where pct<>0; .exit From 92e3c434bb141eef0ae6331dfc5766fe535c1732 Mon Sep 17 00:00:00 2001 From: luciotato Date: Wed, 15 Sep 2021 22:31:24 -0300 Subject: [PATCH 31/39] new field adj_credits --- sql/import.sql | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/sql/import.sql b/sql/import.sql index 345ebc32..8d00261a 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -29,11 +29,18 @@ delete FROM imported where identity='identity'; --add pct and stake-concentration columns ALTER table imported add pct FLOAT; ALTER table imported add stake_conc FLOAT; +ALTER table imported add adj_credits INTEGER; UPDATE imported set pct = round(score * 100.0 / (select sum(score) from imported),4), - stake_conc = round(active_stake * 100.0 / (select sum(active_stake) from imported),4) + stake_conc = round(active_stake * 100.0 / (select sum(active_stake) from imported),4), + adj_credits = CAST((epoch_credits * (100-commission-3*data_center_concentration)/100) as INTEGER) ; +--recompute avg_position based on adj_credits +update imported +set avg_position = adj_credits * 50 / (select avg(adj_credits) from scores B where adj_credits>30000); + + --control, show total staked select DISTINCT epoch from imported; select 'validators',count(*),'total staked',sum(active_stake) from imported; @@ -62,6 +69,7 @@ create TABLE if not EXISTS scores as select * from imported; DELETE FROM scores where epoch = (select DISTINCT epoch from imported); INSERT INTO scores select * from imported; + -- recompute avg table with last 3 epochs -- if score=0 from imported => below nakamoto coefficient, or commission 100% or less than 100 SOL staked -- also we set score=0 if below 50% avg or less than 3 epochs on record @@ -69,14 +77,14 @@ INSERT INTO scores select * from imported; DROP TABLE IF EXISTS avg; create table AVG as select 0 as rank, epoch,keybase_id, vote_address,name, - case when score=0 or mult<=0 or score_records<3 then 0 else ROUND(base_score*mult*mult) end as avg_score, + case when score=0 or mult<=0 or score_records<3 then 0 else ROUND(base_score*mult) end as avg_score, base_score, ap-49 mult, ap as avg_pos, commission, round(c2,2) as avg_commiss, dcc2, epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, 0.0 as pct, score_records from imported A left outer JOIN ( select count(*) as score_records, - round( avg(b.epoch_credits) * (100-avg(b.commission))/100 * (100-avg(b.data_center_concentration)*3)/100 ) as base_score, + round( avg(b.adj_credits) ) as base_score, avg(b.avg_position) as ap, avg(b.avg_position)-49 as mult, avg(b.commission) as c2, ROUND(avg(b.epoch_credits)) as ec2, avg(b.data_center_concentration) as dcc2, b.vote_address as va2 from scores B From 28943a528383b3ebe2154c5accb6d031d2e82f7b Mon Sep 17 00:00:00 2001 From: luciotato Date: Tue, 19 Oct 2021 10:55:19 -0300 Subject: [PATCH 32/39] add active stake --- import-into-sqlite.sh | 1 + score-all-mainnet.sh | 1 + sql/import.sql | 8 ++++---- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/import-into-sqlite.sh b/import-into-sqlite.sh index 6c6117cf..3a5834a1 100644 --- a/import-into-sqlite.sh +++ b/import-into-sqlite.sh @@ -1 +1,2 @@ sqlite3 <./sql/import.sql +date diff --git a/score-all-mainnet.sh b/score-all-mainnet.sh index f0f90344..ec7881c5 100644 --- a/score-all-mainnet.sh +++ b/score-all-mainnet.sh @@ -1,3 +1,4 @@ +date ./target/debug/solana-stake-o-matic --cluster mainnet-beta --markdown $* \ score-all \ --score-max-commission 10 \ diff --git a/sql/import.sql b/sql/import.sql index 8d00261a..a192a267 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -79,14 +79,14 @@ create table AVG as select 0 as rank, epoch,keybase_id, vote_address,name, case when score=0 or mult<=0 or score_records<3 then 0 else ROUND(base_score*mult) end as avg_score, base_score, ap-49 mult, ap as avg_pos, commission, round(c2,2) as avg_commiss, dcc2, - epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, - 0.0 as pct, score_records + epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, + 0.0 as pct, score_records, avg_active_stake from imported A left outer JOIN ( select count(*) as score_records, round( avg(b.adj_credits) ) as base_score, avg(b.avg_position) as ap, avg(b.avg_position)-49 as mult, avg(b.commission) as c2, ROUND(avg(b.epoch_credits)) as ec2, - avg(b.data_center_concentration) as dcc2, b.vote_address as va2 + avg(b.data_center_concentration) as dcc2, b.vote_address as va2, avg(b.active_stake) as avg_active_stake from scores B where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) group by vote_address @@ -122,7 +122,7 @@ select epoch,rank,keybase_id,name, round(pct,4) as pct, avg_score, ROUND(mult,4) avg_commiss,round(dcc2,5) as dcc2 from AVG where pct>0 order by rank -LIMIT 10 +LIMIT 15 ; select count(*) as validators_with_pct from avg where pct<>0; .exit From 4caeccd458d986953c099602b653b88049408b46 Mon Sep 17 00:00:00 2001 From: luciotato Date: Tue, 2 Nov 2021 12:11:11 -0300 Subject: [PATCH 33/39] move to avg last 5 epochs --- sql/import.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/import.sql b/sql/import.sql index a192a267..00001061 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -70,14 +70,14 @@ DELETE FROM scores where epoch = (select DISTINCT epoch from imported); INSERT INTO scores select * from imported; --- recompute avg table with last 3 epochs +-- recompute avg table with last 5 epochs -- if score=0 from imported => below nakamoto coefficient, or commission 100% or less than 100 SOL staked --- also we set score=0 if below 50% avg or less than 3 epochs on record +-- also we set score=0 if below 50% avg or less than 5 epochs on record -- create pct column and set to zero, will update after when selecting top 200 DROP TABLE IF EXISTS avg; create table AVG as select 0 as rank, epoch,keybase_id, vote_address,name, - case when score=0 or mult<=0 or score_records<3 then 0 else ROUND(base_score*mult) end as avg_score, + case when score=0 or mult<=0 or score_records<5 then 0 else ROUND(base_score*mult) end as avg_score, base_score, ap-49 mult, ap as avg_pos, commission, round(c2,2) as avg_commiss, dcc2, epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, 0.0 as pct, score_records, avg_active_stake @@ -88,7 +88,7 @@ left outer JOIN ( avg(b.avg_position) as ap, avg(b.avg_position)-49 as mult, avg(b.commission) as c2, ROUND(avg(b.epoch_credits)) as ec2, avg(b.data_center_concentration) as dcc2, b.vote_address as va2, avg(b.active_stake) as avg_active_stake from scores B - where B.epoch between (select distinct epoch from imported)-2 and (select distinct epoch from imported) + where B.epoch between (select distinct epoch from imported)-4 and (select distinct epoch from imported) group by vote_address ) on va2 = a.vote_address From bab763f521ebf71418bf223625ae94124faeea44 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 7 Nov 2021 10:34:59 -0300 Subject: [PATCH 34/39] set score=0 if stake<100 --- sql/check-commision-changes.sql | 11 +++++++++++ sql/control.sql | 13 +++++++++++++ sql/import.sql | 2 +- 3 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 sql/check-commision-changes.sql create mode 100644 sql/control.sql diff --git a/sql/check-commision-changes.sql b/sql/check-commision-changes.sql new file mode 100644 index 00000000..0adfe5c9 --- /dev/null +++ b/sql/check-commision-changes.sql @@ -0,0 +1,11 @@ +-- +-- CHECK FOR commision changes +-- +drop if exists table t1; +create table t1 as +select vote_address,commission from scores where epoch=243 +EXCEPT select vote_address,commission from scores where epoch=242 +; +select * from scores where vote_address in ( select vote_address from t1 ) +order by vote_address,epoch +; diff --git a/sql/control.sql b/sql/control.sql new file mode 100644 index 00000000..073d8acb --- /dev/null +++ b/sql/control.sql @@ -0,0 +1,13 @@ +.open db/score-sqlite3.db +.mode column +.headers ON +select epoch,rank,keybase_id,name, round(pct,4) as pct, avg_score, ROUND(mult,4) as mult, + round(avg_pos,4) as avg_pos, + epoch_credits,avg_ec,delta_credits, + avg_commiss,round(dcc2,5) as dcc2 from AVG +where pct>0 +order by rank +LIMIT 20 +; +select count(*) as validators_with_pct from avg where pct<>0; +.exit diff --git a/sql/import.sql b/sql/import.sql index 00001061..96a7846a 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -77,7 +77,7 @@ INSERT INTO scores select * from imported; DROP TABLE IF EXISTS avg; create table AVG as select 0 as rank, epoch,keybase_id, vote_address,name, - case when score=0 or mult<=0 or score_records<5 then 0 else ROUND(base_score*mult) end as avg_score, + case when score=0 or mult<=0 or score_records<5 or COALESCE(avg_active_stake,0)<100 then 0 else ROUND(base_score*mult) end as avg_score, base_score, ap-49 mult, ap as avg_pos, commission, round(c2,2) as avg_commiss, dcc2, epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, 0.0 as pct, score_records, avg_active_stake From 81bbd28ed3d9c312257af4f34cbe751011f5ba46 Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 21 Nov 2021 10:40:56 -0300 Subject: [PATCH 35/39] extend score cut to top 250 --- sql/import.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/import.sql b/sql/import.sql index 96a7846a..4ce4ec7e 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -73,7 +73,7 @@ INSERT INTO scores select * from imported; -- recompute avg table with last 5 epochs -- if score=0 from imported => below nakamoto coefficient, or commission 100% or less than 100 SOL staked -- also we set score=0 if below 50% avg or less than 5 epochs on record --- create pct column and set to zero, will update after when selecting top 200 +-- create pct column and set to zero, will update after when selecting top 250 DROP TABLE IF EXISTS avg; create table AVG as select 0 as rank, epoch,keybase_id, vote_address,name, @@ -104,10 +104,10 @@ create table temp as select vote_address, RANK() over (order by avg_score DESC) update avg set rank = (select rank from temp where temp.vote_address=avg.vote_address); --- SELECT TOP 200 +-- SELECT TOP 250 drop table if exists temp; -create table temp as select * from avg order by avg_score desc LIMIT 200; --- set pct ONLY ON TOP 200 +create table temp as select * from avg order by avg_score desc LIMIT 250; +-- set pct ONLY ON selected TOP validators update avg as U set pct = avg_score / (select sum(A.avg_score) from temp A where A.epoch = U.epoch) * 100 where exists (select 1 from temp A where A.vote_address = U.vote_address) From 879de8ddc57b4ba075ce7655895bb749ea6959bd Mon Sep 17 00:00:00 2001 From: luciotato Date: Sun, 26 Dec 2021 10:50:29 -0300 Subject: [PATCH 36/39] sql-utils --- sql/average-epochs.sql | 23 +++++++++++++++++++++++ sql/copy-from-scores2.sql | 21 +++++++++++++++++++++ sql/fields-list.sql | 1 + sql/import.sql | 3 ++- 4 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 sql/average-epochs.sql create mode 100644 sql/copy-from-scores2.sql create mode 100644 sql/fields-list.sql diff --git a/sql/average-epochs.sql b/sql/average-epochs.sql new file mode 100644 index 00000000..da55c6c1 --- /dev/null +++ b/sql/average-epochs.sql @@ -0,0 +1,23 @@ + +select +261 as epoch , +max(keybase_id) , +max(name), +max(identity) , +vote_address, +avg(score), +avg(avg_position), +avg(commission), +avg(active_stake), +CAST(avg(epoch_credits) as INTEGER), +avg(data_center_concentration), +min(can_halt_the_network_group), +min(stake_state), +min(stake_state_reason), +max(www_url), +avg(pct), +avg(stake_conc), +CAST(avg(adj_credits) as INTEGER) +from scores +where epoch BETWEEN 260 and 262 +group by vote_address diff --git a/sql/copy-from-scores2.sql b/sql/copy-from-scores2.sql new file mode 100644 index 00000000..665830a4 --- /dev/null +++ b/sql/copy-from-scores2.sql @@ -0,0 +1,21 @@ +insert into scores +select +epoch , +null as keybase_id , +name , +null as identity , +vote_address , +score , +average_position as avg_position , +commission , +avg_active_stake as active_stake , +this_epoch_credits epoch_credits , +data_center_concentration , +null as can_halt_the_network_group , +null as stake_state , +null as stake_state_reason , +null as www_url , +pct , +null as stake_conc , +null as adj_credits +from scores2 \ No newline at end of file diff --git a/sql/fields-list.sql b/sql/fields-list.sql new file mode 100644 index 00000000..24a6d906 --- /dev/null +++ b/sql/fields-list.sql @@ -0,0 +1 @@ +select name,"," from pragma_table_info("scores") \ No newline at end of file diff --git a/sql/import.sql b/sql/import.sql index 4ce4ec7e..c19e3dac 100644 --- a/sql/import.sql +++ b/sql/import.sql @@ -80,7 +80,8 @@ select 0 as rank, epoch,keybase_id, vote_address,name, case when score=0 or mult<=0 or score_records<5 or COALESCE(avg_active_stake,0)<100 then 0 else ROUND(base_score*mult) end as avg_score, base_score, ap-49 mult, ap as avg_pos, commission, round(c2,2) as avg_commiss, dcc2, epoch_credits, cast(ec2 as integer) as avg_ec, epoch_credits-ec2 as delta_credits, - 0.0 as pct, score_records, avg_active_stake + 0.0 as pct, score_records, avg_active_stake, + can_halt_the_network_group, identity, stake_conc from imported A left outer JOIN ( select count(*) as score_records, From 03c8bc9e6176edef41ec566943a5e9f5652caad1 Mon Sep 17 00:00:00 2001 From: luciotato Date: Fri, 31 Dec 2021 19:59:00 -0300 Subject: [PATCH 37/39] include last validator in nakamoto coeff --- bot/src/main.rs | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index 29d1fb99..f0c3824b 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1196,21 +1196,21 @@ fn classify( let mut accumulated: u64 = 0; let mut count_halt_group: u32 = 0; let limit: u64 = total_active_stake / 100 * 33; - let mut concentrated_validators_stake_limit = limit; + let mut last_under_nakamoto_active_stake = limit; for info in &vote_account_info { + last_under_nakamoto_active_stake = info.active_stake; accumulated += info.active_stake; count_halt_group += 1; if accumulated > limit { break; } - concentrated_validators_stake_limit = info.active_stake } info!( - "validators:{} total_active_stake:{}, can_halt_the_network:top {}, pro-decentralization-stake-limit: less than {}", + "validators:{} total_active_stake:{}, can_halt_the_network:top {}, last under-nakamoto-coefficient active-stake: {}", &vote_account_info.len(), total_active_stake, count_halt_group, - lamports_to_sol(concentrated_validators_stake_limit), + lamports_to_sol(last_under_nakamoto_active_stake), ); // Note: get_self_stake_by_vote_account is expensive because it does a RPC call for each validator @@ -1432,16 +1432,16 @@ fn classify( } }); - let insufficent_self_stake_msg = + let insufficient_self_stake_msg = format!("insufficient self stake: {}", Sol(self_stake)); if config.min_self_stake_lamports > 0 && !config.enforce_min_self_stake && self_stake < config.min_self_stake_lamports { - validator_notes.push(insufficent_self_stake_msg.clone()); + validator_notes.push(insufficient_self_stake_msg.clone()); } - let insufficent_testnet_participation = testnet_participation + let insufficient_testnet_participation = testnet_participation .as_ref() .map(|testnet_participation| { if let Some(participant) = participant { @@ -1460,14 +1460,14 @@ fn classify( // no score if in the can-halt-the-network group score_discounts.can_halt_the_network_group = - active_stake >= concentrated_validators_stake_limit; + active_stake >= last_under_nakamoto_active_stake; let (stake_state, reason) = if let Some(reason) = infrastructure_concentration_destake_reason { (ValidatorStakeState::None, reason) } else if config.enforce_min_self_stake && self_stake < config.min_self_stake_lamports { - (ValidatorStakeState::None, insufficent_self_stake_msg) + (ValidatorStakeState::None, insufficient_self_stake_msg) } else if active_stake > config.max_active_stake_lamports { ( ValidatorStakeState::None, @@ -1478,10 +1478,13 @@ fn classify( ValidatorStakeState::None, format!("Commission is too high: {}% commission", commission), ) - } else if let Some(insufficent_testnet_participation) = - insufficent_testnet_participation + } else if let Some(insufficient_testnet_participation) = + insufficient_testnet_participation { - (ValidatorStakeState::None, insufficent_testnet_participation) + ( + ValidatorStakeState::None, + insufficient_testnet_participation, + ) } else if poor_voters.contains(&identity) { ( ValidatorStakeState::None, @@ -1896,7 +1899,6 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { .epoch_credits .cmp(&a.1.score_data.as_ref().unwrap().epoch_credits) }); - for (identity, classification) in validator_classifications { let validator_markdown = validators_markdown.entry(identity).or_default(); From 6fff2bed23fc6944f0337f5b8e9537973dbaf60c Mon Sep 17 00:00:00 2001 From: janlegner <32453746+janlegner@users.noreply.github.com> Date: Wed, 19 Jan 2022 18:33:25 +0100 Subject: [PATCH 38/39] Dockerized (#3) --- Dockerfile | 21 +++++++++++++++++++ RELEASE.md | 21 +++++++++++-------- ...mainnet.sh => clean-score-all-mainnet.bash | 0 docker-build.bash | 3 +++ docker-run.bash | 18 ++++++++++++++++ score-all-mainnet.sh | 0 score-all-testnet.sh | 0 7 files changed, 54 insertions(+), 9 deletions(-) create mode 100644 Dockerfile rename clean-score-all-mainnet.sh => clean-score-all-mainnet.bash (100%) mode change 100644 => 100755 create mode 100755 docker-build.bash create mode 100755 docker-run.bash mode change 100644 => 100755 score-all-mainnet.sh mode change 100644 => 100755 score-all-testnet.sh diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..54640152 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,21 @@ +FROM alpine:3.14 + +RUN apk update +RUN apk upgrade +RUN apk add --no-cache bash sqlite rust cargo openssl-dev eudev-dev linux-headers + +WORKDIR /usr/local/ + +ADD program program/ +ADD cli cli/ +ADD bot bot/ +ADD sql sql/ +ADD Cargo.lock . +ADD Cargo.toml . +ADD clean-score-all-mainnet.bash . +ADD score-all-mainnet.sh . +ADD import-into-sqlite.sh . + +RUN cargo build + +CMD ./clean-score-all-mainnet.bash diff --git a/RELEASE.md b/RELEASE.md index 6dbfb6bf..596c7a85 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,11 +1,14 @@ +# stake-o-matic -The release of the binaries is fully automated. Do not create a Github release -manually. +## Build with cargo and run +```bash +cargo build +wget "https://github.com/marinade-finance/staking-status/raw/main/scores.sqlite3" -O "db/score-sqlite3.db" +./clean-score-all-mainnet.bash +``` -#### Release Process -1. Create a new tag for the next available release number, see - https://github.com/solana-labs/stake-o-matic/tags, and push it to the repo: - eg, `git tag v42 && git push origin v42` -2. The GitHub workflow automatically triggers a new build, creates a release - with the name of the tag, and uploads the release artifacts. You can monitor - the release process at https://github.com/solana-labs/stake-o-matic/actions +## Build with docker and run +```bash +./docker-build.bash +./docker-run.bash +``` diff --git a/clean-score-all-mainnet.sh b/clean-score-all-mainnet.bash old mode 100644 new mode 100755 similarity index 100% rename from clean-score-all-mainnet.sh rename to clean-score-all-mainnet.bash diff --git a/docker-build.bash b/docker-build.bash new file mode 100755 index 00000000..bdc5c834 --- /dev/null +++ b/docker-build.bash @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker build -t stake-o-matic . diff --git a/docker-run.bash b/docker-run.bash new file mode 100755 index 00000000..e69b9bb6 --- /dev/null +++ b/docker-run.bash @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -x + +SCRIPT_DIR="$( cd "$(dirname "$0")" ; pwd -P )" +DB_PATH="$SCRIPT_DIR/db" +SQLITE_SCORES_PATH="$DB_PATH/score-sqlite3.db" +HISTORIC_DATA="https://github.com/marinade-finance/staking-status/raw/main/scores.sqlite3" + +mkdir -p "$DB_PATH" +wget "$HISTORIC_DATA" -O "$SQLITE_SCORES_PATH" + +docker run \ + --name stake-o-matic \ + --user "$UID" \ + --rm \ + --volume "$DB_PATH:/usr/local/db" \ + --env "VALIDATORS_APP_TOKEN=$VALIDATORS_APP_TOKEN" \ + stake-o-matic ./clean-score-all-mainnet.bash diff --git a/score-all-mainnet.sh b/score-all-mainnet.sh old mode 100644 new mode 100755 diff --git a/score-all-testnet.sh b/score-all-testnet.sh old mode 100644 new mode 100755 From d4798c68531e0c135e1157d05816dc0686777dff Mon Sep 17 00:00:00 2001 From: Jan Legner Date: Sat, 22 Jan 2022 19:52:30 +0100 Subject: [PATCH 39/39] Fixed double quotes escaping --- bot/src/main.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/bot/src/main.rs b/bot/src/main.rs index f0c3824b..1ed2e119 100644 --- a/bot/src/main.rs +++ b/bot/src/main.rs @@ -1928,8 +1928,8 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { let csv_line = format!( r#"{},"{}","{}","{}","{}",{},{},{},{},{},{:.4},{},"{:?}","{}","{}""#, epoch, - score_data.validators_app_info.keybase_id, - score_data.validators_app_info.name, + escape_quotes(&score_data.validators_app_info.keybase_id), + escape_quotes(&score_data.validators_app_info.name), identity.to_string(), classification.vote_address, score, @@ -1940,8 +1940,8 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { score_data.data_center_concentration, score_data.score_discounts.can_halt_the_network_group, classification.stake_state, - classification.stake_state_reason, - score_data.validators_app_info.www_url, + escape_quotes(&classification.stake_state_reason), + escape_quotes(&score_data.validators_app_info.www_url), ); validator_detail_csv.push(csv_line); } @@ -2008,6 +2008,10 @@ fn generate_markdown(epoch: Epoch, config: &Config) -> BoxResult<()> { Ok(()) } +fn escape_quotes(original: &String) -> String { + original.replace("\"", "\"\"") +} + #[cfg(test)] mod test { use super::*;