hagridctl: update for sqlite

This commit is contained in:
Vincent Breitmoser
2024-03-24 13:10:57 +01:00
parent df6bfb2d84
commit a504b0ea12
4 changed files with 6 additions and 284 deletions

View File

@@ -1,122 +0,0 @@
use anyhow::Result;
use openpgp::parse::Parse;
use openpgp::serialize::Serialize;
use openpgp::Cert;
use std::fs::{self, File};
use std::path::Path;
use std::time::Instant;
use indicatif::{ProgressBar, ProgressStyle};
use walkdir::WalkDir;
use HagridConfig;
struct DumpStats<'a> {
progress: &'a ProgressBar,
prefix: String,
count_total: u64,
count_err: u64,
count_partial: u64,
start_time_partial: Instant,
kps_partial: u64,
}
impl<'a> DumpStats<'a> {
fn new(progress: &'a ProgressBar) -> Self {
Self {
progress,
prefix: "".to_owned(),
count_total: 0,
count_err: 0,
count_partial: 0,
start_time_partial: Instant::now(),
kps_partial: 0,
}
}
fn update(&mut self, tpk: &Cert) {
// If a new TPK starts, parse and import.
self.count_total += 1;
self.count_partial += 1;
if (self.count_total % 10) == 0 {
self.prefix = tpk.fingerprint().to_string()[0..4].to_owned();
}
self.progress_update();
}
fn progress_update(&mut self) {
if (self.count_total % 10) != 0 {
return;
}
if self.count_partial >= 1000 {
let runtime = (self.start_time_partial.elapsed().as_millis() + 1) as u64;
self.kps_partial = (self.count_partial * 1000) / runtime;
self.start_time_partial = Instant::now();
self.count_partial = 0;
}
self.progress.set_message(&format!(
"prefix {} dumpd {:5} keys, {:5} Errors ({:3} keys/s)",
self.prefix, self.count_total, self.count_err, self.kps_partial
));
}
}
pub fn do_dump(config: &HagridConfig) -> Result<()> {
let published_dir = config
.keys_external_dir
.as_ref()
.unwrap()
.join("links")
.join("by-email");
let dirs: Vec<_> = WalkDir::new(published_dir)
.min_depth(1)
.max_depth(1)
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.flatten()
.map(|entry| entry.into_path())
.collect();
let progress_bar = ProgressBar::new(dirs.len() as u64);
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
.progress_chars("##-"),
);
let mut stats = DumpStats::new(&progress_bar);
let mut output_file = File::create("keyring.pub.pgp")?;
for dir in dirs {
progress_bar.inc(1);
dump_dir_recursively(&mut stats, &mut output_file, &dir)?;
}
progress_bar.finish();
Ok(())
}
fn dump_dir_recursively(stats: &mut DumpStats, output_file: &mut File, dir: &Path) -> Result<()> {
for path in WalkDir::new(dir)
.follow_links(true)
.into_iter()
.flatten()
.filter(|e| e.file_type().is_file())
.map(|entry| entry.into_path())
{
let result: Result<()> = (|| {
let bytes = fs::read_to_string(path.as_path())?;
let tpk = Cert::from_bytes(bytes.as_bytes())?;
tpk.export(output_file)?;
stats.update(&tpk);
Ok(())
})();
if let Err(err) = result {
stats.progress.println(format!("error: {:?}", err));
stats.count_err += 1
}
}
Ok(())
}

View File

@@ -27,7 +27,7 @@ use HagridConfig;
const NUM_THREADS_MAX: usize = 3;
#[allow(clippy::needless_collect)]
pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>) -> Result<()> {
pub fn do_import(config: &HagridConfig, input_files: Vec<PathBuf>) -> Result<()> {
let num_threads = min(NUM_THREADS_MAX, input_files.len());
let input_file_chunks = setup_chunks(input_files, num_threads);
@@ -40,7 +40,7 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
let config = config.clone();
let multi_progress = multi_progress.clone();
thread::spawn(move || {
import_from_files(&config, dry_run, input_file_chunk, multi_progress).unwrap();
import_from_files(&config, input_file_chunk, multi_progress).unwrap();
})
})
.collect();
@@ -117,16 +117,10 @@ impl<'a> ImportStats<'a> {
fn import_from_files(
config: &HagridConfig,
dry_run: bool,
input_files: Vec<PathBuf>,
multi_progress: Arc<MultiProgress>,
) -> Result<()> {
let db = KeyDatabase::new_internal(
config.keys_internal_dir.as_ref().unwrap(),
config.keys_external_dir.as_ref().unwrap(),
config.tmp_dir.as_ref().unwrap(),
dry_run,
)?;
let db = KeyDatabase::new_file(config.keys_internal_dir.as_ref().unwrap())?;
for input_file in input_files {
import_from_file(&db, &input_file, &multi_progress)?;

View File

@@ -17,9 +17,7 @@ use anyhow::Result;
use clap::{App, Arg, SubCommand};
mod dump;
mod import;
mod regenerate;
#[derive(Deserialize)]
pub struct HagridConfigs {
@@ -35,10 +33,10 @@ pub struct HagridConfigs {
pub struct HagridConfig {
_template_dir: Option<PathBuf>,
keys_internal_dir: Option<PathBuf>,
keys_external_dir: Option<PathBuf>,
_keys_external_dir: Option<PathBuf>,
_assets_dir: Option<PathBuf>,
_token_dir: Option<PathBuf>,
tmp_dir: Option<PathBuf>,
_tmp_dir: Option<PathBuf>,
_maintenance_file: Option<PathBuf>,
}
@@ -63,19 +61,9 @@ fn main() -> Result<()> {
.default_value("prod")
.possible_values(&["dev", "stage", "prod"]),
)
.subcommand(SubCommand::with_name("regenerate").about("Regenerate symlink directory"))
.subcommand(
SubCommand::with_name("dump").about("Dump whole database into a large keyring file"),
)
.subcommand(
SubCommand::with_name("import")
.about("Import keys into Hagrid")
.arg(
Arg::with_name("dry run")
.short("n")
.long("dry-run")
.help("don't actually keep imported keys"),
)
.arg(
Arg::with_name("keyring files")
.required(true)
@@ -95,18 +83,13 @@ fn main() -> Result<()> {
};
if let Some(matches) = matches.subcommand_matches("import") {
let dry_run = matches.occurrences_of("dry run") > 0;
let keyrings: Vec<PathBuf> = matches
.values_of_lossy("keyring files")
.unwrap()
.iter()
.map(|arg| PathBuf::from_str(arg).unwrap())
.collect();
import::do_import(&config, dry_run, keyrings)?;
} else if let Some(_matches) = matches.subcommand_matches("regenerate") {
regenerate::do_regenerate(&config)?;
} else if let Some(_matches) = matches.subcommand_matches("dump") {
dump::do_dump(&config)?;
import::do_import(&config, keyrings)?;
} else {
println!("{}", matches.usage());
}

View File

@@ -1,133 +0,0 @@
use anyhow::Result;
use std::path::Path;
use std::time::Instant;
use indicatif::{ProgressBar, ProgressStyle};
use walkdir::WalkDir;
use database::types::Fingerprint;
use database::{Database, KeyDatabase, RegenerateResult};
use HagridConfig;
struct RegenerateStats<'a> {
progress: &'a ProgressBar,
prefix: String,
count_total: u64,
count_err: u64,
count_updated: u64,
count_unchanged: u64,
count_partial: u64,
start_time_partial: Instant,
kps_partial: u64,
}
impl<'a> RegenerateStats<'a> {
fn new(progress: &'a ProgressBar) -> Self {
Self {
progress,
prefix: "".to_owned(),
count_total: 0,
count_err: 0,
count_updated: 0,
count_unchanged: 0,
count_partial: 0,
start_time_partial: Instant::now(),
kps_partial: 0,
}
}
fn update(&mut self, result: Result<RegenerateResult>, fpr: Fingerprint) {
// If a new TPK starts, parse and import.
self.count_total += 1;
self.count_partial += 1;
if (self.count_total % 10) == 0 {
self.prefix = fpr.to_string()[0..4].to_owned();
}
match result {
Err(e) => {
self.progress.println(format!("{}: {}", fpr, e));
self.count_err += 1;
}
Ok(RegenerateResult::Updated) => self.count_updated += 1,
Ok(RegenerateResult::Unchanged) => self.count_unchanged += 1,
}
self.progress_update();
}
fn progress_update(&mut self) {
if (self.count_total % 10) != 0 {
return;
}
if self.count_partial >= 1000 {
let runtime = (self.start_time_partial.elapsed().as_millis() + 1) as u64;
self.kps_partial = (self.count_partial * 1000) / runtime;
self.start_time_partial = Instant::now();
self.count_partial = 0;
}
self.progress.set_message(&format!(
"prefix {} regenerated {:5} keys, {:5} Updated {:5} Unchanged {:5} Errors ({:3} keys/s)",
self.prefix, self.count_total, self.count_updated, self.count_unchanged, self.count_err, self.kps_partial));
}
}
pub fn do_regenerate(config: &HagridConfig) -> Result<()> {
let db = KeyDatabase::new_internal(
config.keys_internal_dir.as_ref().unwrap(),
config.keys_external_dir.as_ref().unwrap(),
config.tmp_dir.as_ref().unwrap(),
false,
)?;
let published_dir = config
.keys_external_dir
.as_ref()
.unwrap()
.join("links")
.join("by-email");
let dirs: Vec<_> = WalkDir::new(published_dir)
.min_depth(1)
.max_depth(1)
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.flatten()
.map(|entry| entry.into_path())
.collect();
let progress_bar = ProgressBar::new(dirs.len() as u64);
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
.progress_chars("##-"),
);
let mut stats = RegenerateStats::new(&progress_bar);
for dir in dirs {
progress_bar.inc(1);
regenerate_dir_recursively(&db, &mut stats, &dir)?;
}
progress_bar.finish();
Ok(())
}
fn regenerate_dir_recursively(
db: &KeyDatabase,
stats: &mut RegenerateStats,
dir: &Path,
) -> Result<()> {
for path in WalkDir::new(dir)
.follow_links(true)
.into_iter()
.flatten()
.filter(|e| e.file_type().is_file())
.map(|entry| entry.into_path())
{
let fpr = KeyDatabase::path_to_primary(&path).unwrap();
let result = db.regenerate_links(&fpr);
stats.update(result, fpr);
}
Ok(())
}