mirror of
https://gitlab.com/keys.openpgp.org/hagrid.git
synced 2025-10-06 00:23:08 +02:00
Replace "-c, --config" and "-e, --environment" opts with "--db-file-path" and "HAGRID_DB_FILE_PATH" env var.
Code review issue: https://gitlab.com/keys.openpgp.org/hagrid/-/merge_requests/214#note_2484117659 Changes: - Extract database file path and log dir path constructions from Sqlite::new_file() and Sqlite::new_internal() associated functions to Sqlite::db_file_path() and Sqlite::log_dir_path(). These path manipulations inside Sqlite/KeyDatabase wasn't ideal as they increased diversity of reasons why Sqlite constructions can fail, unnecessary constrain how Sqlite and with which paths can be used, and that logic doesn't actually belong to Sqlite itself. So, later we probably have to move these db_file_path() and log_dir_path() functions else where. - Replace base_dir parameter of Sqlite::new_file() with "db_file" and "log_dir" params which gives us a way to use database file passed in command line options. - Add Sqlite::log_dir_path_from_db_file_path() associated function to infer log_dir_path based on given db_file_path. It is used in command handler where only db_file_path provided. - Pull up log dir construction from Sqlite::new_internal() to Sqlite::new_file() to avoid passing through log_dir_path through several functions. This also makes Sqlite::new_file() signature cleaner. - Fix usages of Sqlite/KeyDatabase::new_file through out the code base: and instead of providing base_dir, populate db_file_path and log_dir_path using Sqlite::db_file_path() and Sqlite::log_dir_path() functions which calculate paths based on provided base_dir. - Remove "-c, --config" and "-e, --environment" options from hagridctl::cli::Cli. - Remove hagridctl::cli::DeploymentEnvironment enum. - Remove entire hagridctl::config module as we don't extract paths from configuration any more. - Add "env" feature flag to "clap" dependency in hagridctl/Cargo.toml to be able to populate Cli::db_file_path field from HAGRID_DB_FILE_PATH environment variable. - Add optional --db-file-path option to "hagridctl". Cli::db_file_path is obligatory, but when --db-file-path is not provided Clap tries to populate it from HAGRID_DB_FILE_PATH env var and if that's not possible Clap terminates with error message. - In hagridctl::cli::dispatch_cmd() use db_file_path from cli.db_file_path instead of keys_internal_dir extracted from configuration. - Replace keys_db_internal_dir with db_file_path in the following command handlers: - hagridctl::delete::run - hagridctl::import::run - Get rid of unnecessary Result unwrapping and wrapping it again in hagrid::web::configure_db_service function and return the value from KeyDatabase::new_file immediately instead.
This commit is contained in:
@@ -7,7 +7,7 @@ use std::str::FromStr;
|
||||
|
||||
use crate::types::{Email, Fingerprint, KeyID};
|
||||
use crate::{Database, Query};
|
||||
use anyhow::format_err;
|
||||
use anyhow::{anyhow, format_err};
|
||||
use sequoia_openpgp::{Cert, policy::StandardPolicy};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
@@ -21,18 +21,41 @@ use crate::{DatabaseTransaction, wkd};
|
||||
|
||||
pub const POLICY: StandardPolicy = StandardPolicy::new();
|
||||
|
||||
const DEFAULT_DB_FILE_NAME: &str = "keys.sqlite";
|
||||
const DEFAULT_LOG_DIR_NAME: &str = "log";
|
||||
|
||||
pub struct Sqlite {
|
||||
pool: r2d2::Pool<SqliteConnectionManager>,
|
||||
}
|
||||
|
||||
impl Sqlite {
|
||||
pub fn new_file(base_dir: impl AsRef<Path>) -> anyhow::Result<Self> {
|
||||
let base_dir = base_dir.as_ref().to_owned();
|
||||
pub fn new_file(db_file: impl AsRef<Path>, log_dir: impl AsRef<Path>) -> anyhow::Result<Self> {
|
||||
create_dir_all(log_dir)?;
|
||||
|
||||
let db_file = base_dir.join("keys.sqlite");
|
||||
let manager = SqliteConnectionManager::file(db_file);
|
||||
Self::new_internal(SqliteConnectionManager::file(db_file))
|
||||
}
|
||||
|
||||
Self::new_internal(base_dir, manager)
|
||||
pub fn log_dir_path(base_dir: impl AsRef<Path>) -> PathBuf {
|
||||
base_dir.as_ref().join(DEFAULT_LOG_DIR_NAME)
|
||||
}
|
||||
|
||||
pub fn log_dir_path_from_db_file_path(
|
||||
db_file_path: impl AsRef<Path>,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
db_file_path
|
||||
.as_ref()
|
||||
.parent()
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Can't get log dir path from invalid db file path: {:?}",
|
||||
db_file_path.as_ref()
|
||||
)
|
||||
})
|
||||
.map(|parent_dir_path| parent_dir_path.join(DEFAULT_LOG_DIR_NAME))
|
||||
}
|
||||
|
||||
pub fn db_file_path(base_dir: impl AsRef<Path>) -> PathBuf {
|
||||
base_dir.as_ref().join(DEFAULT_DB_FILE_NAME)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -67,10 +90,7 @@ impl Sqlite {
|
||||
Ok(r2d2::Pool::builder().build(manager)?)
|
||||
}
|
||||
|
||||
fn new_internal(base_dir: PathBuf, manager: SqliteConnectionManager) -> anyhow::Result<Self> {
|
||||
let keys_dir_log = base_dir.join("log");
|
||||
create_dir_all(&keys_dir_log)?;
|
||||
|
||||
fn new_internal(manager: SqliteConnectionManager) -> anyhow::Result<Self> {
|
||||
let pool = Self::build_pool(manager)?;
|
||||
let conn = pool.get()?;
|
||||
conn.pragma_update(None, "journal_mode", "wal")?;
|
||||
@@ -493,7 +513,14 @@ mod tests {
|
||||
|
||||
fn open_db() -> (TempDir, Sqlite) {
|
||||
let tmpdir = TempDir::new().unwrap();
|
||||
let db = Sqlite::new_file(tmpdir.path()).unwrap();
|
||||
let tempdir_path = tmpdir.path();
|
||||
|
||||
let db = Sqlite::new_file(
|
||||
Sqlite::db_file_path(tempdir_path),
|
||||
Sqlite::log_dir_path(tempdir_path),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(tmpdir, db)
|
||||
}
|
||||
|
||||
@@ -707,7 +734,12 @@ mod tests {
|
||||
#[test]
|
||||
fn reverse_fingerprint_to_path() {
|
||||
let tmpdir = TempDir::new().unwrap();
|
||||
let db = Sqlite::new_file(tmpdir.path()).unwrap();
|
||||
let tmpdir_path = tmpdir.path();
|
||||
let db = Sqlite::new_file(
|
||||
Sqlite::db_file_path(tmpdir_path),
|
||||
Sqlite::log_dir_path(tmpdir_path),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let _fp: Fingerprint = "CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
|
||||
|
||||
|
@@ -24,6 +24,6 @@ pathdiff = { workspace = true }
|
||||
idna = { workspace = true }
|
||||
fs2 = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "unicode"] }
|
||||
clap = { workspace = true, features = ["derive", "unicode", "env"] }
|
||||
toml = { workspace = true }
|
||||
indicatif = { workspace = true }
|
||||
|
@@ -1,23 +1,14 @@
|
||||
use crate::{config, delete, import};
|
||||
use clap::{Parser, Subcommand, ValueEnum};
|
||||
use crate::{delete, import};
|
||||
use clap::{Parser, Subcommand};
|
||||
use hagrid_database::Query;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None, help_expected = true)]
|
||||
pub(crate) struct Cli {
|
||||
#[arg(
|
||||
short = 'c',
|
||||
long = "config",
|
||||
default_value = "Rocket.toml",
|
||||
global = true
|
||||
)]
|
||||
/// Sets a custom config file
|
||||
file: PathBuf,
|
||||
|
||||
#[arg(short = 'e', long = "environment", value_enum, value_name = "ENVIRONMENT", default_value_t = DeploymentEnvironment::Production, global = true)]
|
||||
/// Specify section of configuration to use. Usually coupled to deployment environment name
|
||||
env: DeploymentEnvironment,
|
||||
#[arg(long, required = false, env = "HAGRID_DB_FILE_PATH")]
|
||||
/// Set a path to the Sqlite database file
|
||||
db_file_path: PathBuf,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Command,
|
||||
@@ -47,31 +38,16 @@ enum Command {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
|
||||
pub(crate) enum DeploymentEnvironment {
|
||||
#[value(alias = "prod")]
|
||||
Production,
|
||||
#[value(alias = "stage")]
|
||||
Staging,
|
||||
#[value(alias = "dev")]
|
||||
Development,
|
||||
}
|
||||
|
||||
pub(crate) fn dispatch_cmd(cli: &Cli) -> anyhow::Result<()> {
|
||||
let keys_db_internal_dir = config::load(&cli.file, cli.env)?
|
||||
.keys_internal_dir
|
||||
.unwrap()
|
||||
.canonicalize()?;
|
||||
let db_file_path = cli.db_file_path.canonicalize()?;
|
||||
|
||||
match &cli.command {
|
||||
Command::Delete {
|
||||
query,
|
||||
all_bindings,
|
||||
all,
|
||||
} => delete::run(keys_db_internal_dir, query, *all_bindings, *all),
|
||||
Command::Import { keyring_files } => {
|
||||
import::run(keys_db_internal_dir, keyring_files.to_owned())
|
||||
}
|
||||
} => delete::run(db_file_path, query, *all_bindings, *all),
|
||||
Command::Import { keyring_files } => import::run(db_file_path, keyring_files.to_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,45 +0,0 @@
|
||||
use crate::cli::DeploymentEnvironment;
|
||||
use serde_derive::Deserialize;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct HagridConfigs {
|
||||
debug: HagridConfig,
|
||||
staging: HagridConfig,
|
||||
release: HagridConfig,
|
||||
}
|
||||
|
||||
impl HagridConfigs {
|
||||
fn get_by(self, deployment_environment: DeploymentEnvironment) -> HagridConfig {
|
||||
match deployment_environment {
|
||||
DeploymentEnvironment::Development => self.debug,
|
||||
DeploymentEnvironment::Staging => self.staging,
|
||||
DeploymentEnvironment::Production => self.release,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// this is not an exact match - Rocket config has more complicated semantics
|
||||
// than a plain toml file.
|
||||
// see also https://github.com/SergioBenitez/Rocket/issues/228
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub(crate) struct HagridConfig {
|
||||
_template_dir: Option<PathBuf>,
|
||||
pub(crate) keys_internal_dir: Option<PathBuf>,
|
||||
_keys_external_dir: Option<PathBuf>,
|
||||
_assets_dir: Option<PathBuf>,
|
||||
_token_dir: Option<PathBuf>,
|
||||
_tmp_dir: Option<PathBuf>,
|
||||
_maintenance_file: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub(crate) fn load(
|
||||
file: impl AsRef<Path>,
|
||||
env: DeploymentEnvironment,
|
||||
) -> std::io::Result<HagridConfig> {
|
||||
let config_data = fs::read_to_string(file)?;
|
||||
let configs: HagridConfigs = toml::from_str(&config_data)?;
|
||||
|
||||
Ok(configs.get_by(env))
|
||||
}
|
@@ -4,12 +4,15 @@ use std::convert::TryInto;
|
||||
use std::path::Path;
|
||||
|
||||
pub(crate) fn run(
|
||||
keys_db_internal_dir: impl AsRef<Path>,
|
||||
db_file_path: impl AsRef<Path>,
|
||||
query: &Query,
|
||||
all_bindings: bool,
|
||||
mut all: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let db = &KeyDatabase::new_file(keys_db_internal_dir)?;
|
||||
let db = &KeyDatabase::new_file(
|
||||
&db_file_path,
|
||||
KeyDatabase::log_dir_path_from_db_file_path(&db_file_path)?,
|
||||
)?;
|
||||
|
||||
match query {
|
||||
Query::ByFingerprint(_) | Query::ByKeyID(_) => {
|
||||
|
@@ -19,10 +19,7 @@ use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
const NUM_THREADS_MAX: usize = 3;
|
||||
|
||||
#[allow(clippy::needless_collect)]
|
||||
pub fn run(
|
||||
keys_db_internal_dir: impl AsRef<Path>,
|
||||
input_files: Vec<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
pub fn run(db_file_path: impl AsRef<Path>, input_files: Vec<PathBuf>) -> anyhow::Result<()> {
|
||||
let num_threads = min(NUM_THREADS_MAX, input_files.len());
|
||||
let input_file_chunks = setup_chunks(input_files, num_threads);
|
||||
|
||||
@@ -32,10 +29,10 @@ pub fn run(
|
||||
let threads: Vec<_> = input_file_chunks
|
||||
.into_iter()
|
||||
.map(|input_file_chunk| {
|
||||
let keys_db_internal_dir = keys_db_internal_dir.as_ref().to_owned();
|
||||
let db_file_path = db_file_path.as_ref().to_owned();
|
||||
let multi_progress = multi_progress.clone();
|
||||
thread::spawn(move || {
|
||||
import_from_files(keys_db_internal_dir, input_file_chunk, multi_progress).unwrap();
|
||||
import_from_files(db_file_path, input_file_chunk, multi_progress).unwrap();
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
@@ -111,11 +108,14 @@ impl<'a> ImportStats<'a> {
|
||||
}
|
||||
|
||||
fn import_from_files(
|
||||
keys_db_internal_dir: impl AsRef<Path>,
|
||||
db_file_path: impl AsRef<Path>,
|
||||
input_files: Vec<PathBuf>,
|
||||
multi_progress: Arc<MultiProgress>,
|
||||
) -> anyhow::Result<()> {
|
||||
let db = KeyDatabase::new_file(keys_db_internal_dir.as_ref())?;
|
||||
let db = KeyDatabase::new_file(
|
||||
&db_file_path,
|
||||
KeyDatabase::log_dir_path_from_db_file_path(&db_file_path)?,
|
||||
)?;
|
||||
|
||||
for input_file in input_files {
|
||||
import_from_file(&db, &input_file, &multi_progress)?;
|
||||
|
@@ -1,7 +1,6 @@
|
||||
use clap::Parser;
|
||||
|
||||
mod cli;
|
||||
mod config;
|
||||
mod delete;
|
||||
mod import;
|
||||
|
||||
|
@@ -516,8 +516,10 @@ fn configure_prometheus(config: &Figment) -> Option<PrometheusMetrics> {
|
||||
fn configure_db_service(config: &Figment) -> anyhow::Result<KeyDatabase> {
|
||||
let keys_internal_dir: PathBuf = config.extract_inner("keys_internal_dir")?;
|
||||
|
||||
let sqlite_db = KeyDatabase::new_file(keys_internal_dir)?;
|
||||
Ok(sqlite_db)
|
||||
KeyDatabase::new_file(
|
||||
KeyDatabase::db_file_path(&keys_internal_dir),
|
||||
KeyDatabase::log_dir_path(&keys_internal_dir),
|
||||
)
|
||||
}
|
||||
|
||||
fn configure_hagrid_state(config: &Figment) -> anyhow::Result<HagridState> {
|
||||
|
Reference in New Issue
Block a user