mirror of
https://gitlab.com/keys.openpgp.org/hagrid.git
synced 2025-10-06 00:23:08 +02:00
Compare commits
15 Commits
v1.3.0
...
nora/sqlit
Author | SHA1 | Date | |
---|---|---|---|
|
eec60a9afd | ||
|
bac42c0b8a | ||
|
fd218bebba | ||
|
a920d3ad9b | ||
|
9d779de5dc | ||
|
c6ae7fb5f5 | ||
|
a70ab40039 | ||
|
348db1f033 | ||
|
e46ed608a4 | ||
|
b54b4f3c07 | ||
|
44ca689b15 | ||
|
dd75ceab05 | ||
|
ba0acdc73d | ||
|
50c8dff783 | ||
|
c2593de5b7 |
94
Cargo.lock
generated
94
Cargo.lock
generated
@@ -71,6 +71,17 @@ dependencies = [
|
||||
"opaque-debug 0.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
|
||||
dependencies = [
|
||||
"getrandom 0.2.3",
|
||||
"once_cell",
|
||||
"version_check 0.9.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.18"
|
||||
@@ -662,6 +673,18 @@ version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
|
||||
|
||||
[[package]]
|
||||
name = "fallible-iterator"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||
|
||||
[[package]]
|
||||
name = "fallible-streaming-iterator"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
|
||||
|
||||
[[package]]
|
||||
name = "fast_chemail"
|
||||
version = "0.9.6"
|
||||
@@ -897,7 +920,10 @@ dependencies = [
|
||||
"log 0.4.14",
|
||||
"multipart",
|
||||
"pathdiff",
|
||||
"r2d2",
|
||||
"r2d2_sqlite",
|
||||
"rand 0.6.5",
|
||||
"rusqlite",
|
||||
"sequoia-openpgp",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
@@ -958,6 +984,18 @@ name = "hashbrown"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
@@ -1243,6 +1281,17 @@ dependencies = [
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2cafc7c74096c336d9d27145f7ebd4f4b6f95ba16aa5a282387267e6925cb58"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.5"
|
||||
@@ -1860,6 +1909,27 @@ dependencies = [
|
||||
"proc-macro2 1.0.36",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r2d2"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "545c5bc2b880973c9c10e4067418407a0ccaa3091781d1671d46eb35107cb26f"
|
||||
dependencies = [
|
||||
"log 0.4.14",
|
||||
"parking_lot",
|
||||
"scheduled-thread-pool",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r2d2_sqlite"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54ca3c9468a76fc2ad724c486a59682fc362efeac7b18d1c012958bc19f34800"
|
||||
dependencies = [
|
||||
"r2d2",
|
||||
"rusqlite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.4.6"
|
||||
@@ -2220,6 +2290,21 @@ dependencies = [
|
||||
"rocket",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rusqlite"
|
||||
version = "0.26.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ba4d3462c8b2e4d7f4fcfcf2b296dc6b65404fbbc7b63daa37fd485c149daf7"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"fallible-iterator",
|
||||
"fallible-streaming-iterator",
|
||||
"hashlink",
|
||||
"libsqlite3-sys",
|
||||
"memchr",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.21"
|
||||
@@ -2277,6 +2362,15 @@ dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scheduled-thread-pool"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc6f74fd1204073fa02d5d5d68bec8021be4c38690b61264b2fdb48083d0e7d7"
|
||||
dependencies = [
|
||||
"parking_lot",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.1.0"
|
||||
|
@@ -23,6 +23,12 @@ fs2 = "0.4"
|
||||
walkdir = "2.2"
|
||||
chrono = "0.4"
|
||||
zbase32 = "0.1.2"
|
||||
r2d2 = "0.8"
|
||||
r2d2_sqlite = "0.19"
|
||||
rusqlite = { version = "0.26", features = ["bundled"] }
|
||||
|
||||
[build-dependencies]
|
||||
rusqlite = { version = "0.26", features = ["trace"] }
|
||||
|
||||
[lib]
|
||||
name = "hagrid_database"
|
||||
|
@@ -771,20 +771,41 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn new() {
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
let k1 = CertBuilder::new().add_userid("a@invalid.example.org")
|
||||
.generate().unwrap().0;
|
||||
let k2 = CertBuilder::new().add_userid("b@invalid.example.org")
|
||||
.generate().unwrap().0;
|
||||
let k3 = CertBuilder::new().add_userid("c@invalid.example.org")
|
||||
.generate().unwrap().0;
|
||||
use crate::ImportResult;
|
||||
|
||||
assert!(db.merge(k1).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
assert!(db.merge(k2.clone()).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
assert!(!db.merge(k2).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
assert!(db.merge(k3.clone()).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
assert!(!db.merge(k3.clone()).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
assert!(!db.merge(k3).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
let k1 = CertBuilder::new()
|
||||
.add_userid("a@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let k2 = CertBuilder::new()
|
||||
.add_userid("b@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let k3 = CertBuilder::new()
|
||||
.add_userid("c@invalid.example.org")
|
||||
.add_userid("d@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
assert!(matches!(
|
||||
db.merge(k1).unwrap(),
|
||||
ImportResult::New(status) if status.email_status.len() == 1));
|
||||
assert!(matches!(
|
||||
db.merge(k2.clone()).unwrap(),
|
||||
ImportResult::New(status) if status.email_status.len() == 1));
|
||||
assert!(matches!(
|
||||
db.merge(k2).unwrap(),
|
||||
ImportResult::Unchanged(status) if status.email_status.len() == 1));
|
||||
assert!(matches!(
|
||||
db.merge(k3.clone()).unwrap(),
|
||||
ImportResult::New(status) if status.email_status.len() == 2));
|
||||
assert!(matches!(
|
||||
db.merge(k3).unwrap(),
|
||||
ImportResult::Unchanged(status) if status.email_status.len() == 2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@@ -25,6 +25,7 @@ extern crate hex;
|
||||
extern crate walkdir;
|
||||
extern crate chrono;
|
||||
extern crate zbase32;
|
||||
extern crate r2d2_sqlite;
|
||||
|
||||
extern crate sequoia_openpgp as openpgp;
|
||||
use openpgp::{
|
||||
@@ -41,7 +42,8 @@ pub mod wkd;
|
||||
pub mod sync;
|
||||
|
||||
mod fs;
|
||||
pub use self::fs::Filesystem as KeyDatabase;
|
||||
mod sqlite;
|
||||
pub use self::sqlite::Sqlite as KeyDatabase;
|
||||
|
||||
mod stateful_tokens;
|
||||
pub use stateful_tokens::StatefulTokens;
|
||||
@@ -101,6 +103,7 @@ pub enum EmailAddressStatus {
|
||||
Revoked,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ImportResult {
|
||||
New(TpkStatus),
|
||||
Updated(TpkStatus),
|
||||
|
969
database/src/sqlite.rs
Normal file
969
database/src/sqlite.rs
Normal file
@@ -0,0 +1,969 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::fs::{create_dir_all, File, OpenOptions};
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use std::time::SystemTime;
|
||||
|
||||
use sync::FlockMutexGuard;
|
||||
use types::{Email, Fingerprint, KeyID};
|
||||
use Result;
|
||||
use {Database, Query};
|
||||
|
||||
use wkd;
|
||||
|
||||
use openpgp::parse::Parse;
|
||||
use openpgp::Cert;
|
||||
use openpgp_utils::POLICY;
|
||||
|
||||
use r2d2_sqlite::rusqlite::params;
|
||||
use r2d2_sqlite::rusqlite::OptionalExtension;
|
||||
use r2d2_sqlite::rusqlite::Result as RusqliteResult;
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
|
||||
pub struct Sqlite {
|
||||
pool: r2d2::Pool<SqliteConnectionManager>,
|
||||
keys_db_file: PathBuf,
|
||||
keys_dir_log: PathBuf,
|
||||
}
|
||||
|
||||
impl Sqlite {
|
||||
pub fn new(base_dir: impl Into<PathBuf>) -> Result<Self> {
|
||||
let base_dir: PathBuf = base_dir.into();
|
||||
|
||||
let keys_db_file = base_dir.join("keys.sqlite");
|
||||
let manager = SqliteConnectionManager::file(&keys_db_file);
|
||||
|
||||
let keys_dir_log = base_dir.join("log");
|
||||
create_dir_all(&keys_dir_log)?;
|
||||
|
||||
let pool = Self::build_pool(manager)?;
|
||||
let conn = pool.get()?;
|
||||
//TODO: make published_armored NOT NULL
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS certs (
|
||||
fingerprint TEXT NOT NULL PRIMARY KEY,
|
||||
full TEXT NOT NULL,
|
||||
published_binary BLOB,
|
||||
published_armored TEXT, -- equals armor(published_binary)
|
||||
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS cert_identifiers (
|
||||
fingerprint TEXT NOT NULL UNIQUE,
|
||||
keyid TEXT NOT NULL UNIQUE AS (substr(fingerprint, -16)),
|
||||
|
||||
primary_fingerprint TEXT NOT NULL,
|
||||
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS emails (
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
wkd_hash TEXT NOT NULL UNIQUE, -- equals `email |> localpart |> tolower |> zbase32`
|
||||
|
||||
primary_fingerprint TEXT NOT NULL,
|
||||
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
Ok(Self { pool, keys_db_file, keys_dir_log})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn build_pool(
|
||||
manager: SqliteConnectionManager,
|
||||
) -> Result<r2d2::Pool<SqliteConnectionManager>> {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct LogConnectionCustomizer;
|
||||
impl<E> r2d2::CustomizeConnection<rusqlite::Connection, E>
|
||||
for LogConnectionCustomizer
|
||||
{
|
||||
fn on_acquire(
|
||||
&self,
|
||||
conn: &mut rusqlite::Connection,
|
||||
) -> std::result::Result<(), E> {
|
||||
println!("Acquiring sqlite pool connection: {:?}", conn);
|
||||
conn.trace(Some(|query| {
|
||||
println!("{}", query);
|
||||
}));
|
||||
std::result::Result::Ok(())
|
||||
}
|
||||
|
||||
fn on_release(&self, conn: rusqlite::Connection) {
|
||||
println!("Releasing pool connection: {:?}", conn);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(r2d2::Pool::builder()
|
||||
.max_size(2)
|
||||
.connection_customizer(Box::new(LogConnectionCustomizer {}))
|
||||
.build(manager)?)
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn build_pool(
|
||||
manager: SqliteConnectionManager,
|
||||
) -> Result<r2d2::Pool<SqliteConnectionManager>> {
|
||||
Ok(r2d2::Pool::builder().max_size(2).build(manager)?)
|
||||
}
|
||||
|
||||
fn primary_fpr_by_any_fpr(
|
||||
&self,
|
||||
fpr: &Fingerprint,
|
||||
) -> Result<Option<Fingerprint>> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let primary_fingerprint: Option<Fingerprint> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM cert_identifiers
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
[fpr.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()?;
|
||||
Ok(primary_fingerprint)
|
||||
}
|
||||
|
||||
fn primary_fpr_by_any_kid(
|
||||
&self,
|
||||
kid: &KeyID,
|
||||
) -> Result<Option<Fingerprint>> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let primary_fingerprint: Option<Fingerprint> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM cert_identifiers
|
||||
WHERE keyid = ?1
|
||||
",
|
||||
[kid.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()?;
|
||||
Ok(primary_fingerprint)
|
||||
}
|
||||
|
||||
fn open_logfile(&self, file_name: &str) -> Result<File> {
|
||||
let file_path = self.keys_dir_log.join(file_name);
|
||||
Ok(OpenOptions::new().create(true).append(true).open(file_path)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for Sqlite {
|
||||
type MutexGuard = FlockMutexGuard;
|
||||
type TempCert = Vec<u8>;
|
||||
|
||||
fn lock(&self) -> Result<Self::MutexGuard> {
|
||||
FlockMutexGuard::lock(&self.keys_db_file)
|
||||
}
|
||||
|
||||
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert> {
|
||||
Ok(content.to_vec())
|
||||
}
|
||||
|
||||
fn write_log_append(
|
||||
&self,
|
||||
filename: &str,
|
||||
fpr_primary: &Fingerprint,
|
||||
) -> Result<()> {
|
||||
let timestamp = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
let fingerprint_line =
|
||||
format!("{:010} {}\n", timestamp, fpr_primary.to_string());
|
||||
|
||||
self.open_logfile(filename)?.write_all(fingerprint_line.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn move_tmp_to_full(
|
||||
&self,
|
||||
file: Self::TempCert,
|
||||
fpr: &Fingerprint,
|
||||
) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
let file = String::from_utf8(file)?;
|
||||
conn.execute(
|
||||
"
|
||||
INSERT INTO certs (fingerprint, full, created_at, updated_at)
|
||||
VALUES (?1, ?2, datetime('now'), datetime('now'))
|
||||
ON CONFLICT(fingerprint) DO UPDATE
|
||||
SET full=excluded.full,
|
||||
updated_at=datetime('now');
|
||||
",
|
||||
params![fpr.to_string(), file],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn move_tmp_to_published(
|
||||
&self,
|
||||
file: Self::TempCert,
|
||||
fpr: &Fingerprint,
|
||||
) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
let file = String::from_utf8(file)?;
|
||||
conn.execute(
|
||||
"
|
||||
UPDATE certs
|
||||
SET published_armored = ?2,
|
||||
updated_at=datetime('now')
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
params![fpr.to_string(), file],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn move_tmp_to_published_wkd(
|
||||
&self,
|
||||
file: Option<Self::TempCert>,
|
||||
fpr: &Fingerprint,
|
||||
) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
conn.execute(
|
||||
"
|
||||
UPDATE certs
|
||||
SET published_binary = ?2,
|
||||
updated_at=datetime('now')
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
params![fpr.to_string(), file],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO!
|
||||
fn write_to_quarantine(
|
||||
&self,
|
||||
fpr: &Fingerprint,
|
||||
content: &[u8],
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_link_fpr(
|
||||
&self,
|
||||
fpr: &Fingerprint,
|
||||
fpr_target: &Fingerprint,
|
||||
) -> Result<Option<Fingerprint>> {
|
||||
let fpr_check = match self.primary_fpr_by_any_fpr(fpr)? {
|
||||
None => Some(fpr.clone()),
|
||||
Some(actual_primary) => {
|
||||
if &actual_primary == fpr_target {
|
||||
None
|
||||
} else {
|
||||
info!(
|
||||
"Fingerprint points to different key for {}
|
||||
(already links to {:?} but {:?} requested)",
|
||||
fpr, actual_primary, fpr_target
|
||||
);
|
||||
return Err(anyhow!(format!(
|
||||
"Fingerprint collision for key {}",
|
||||
fpr
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
let kid_check = match self.primary_fpr_by_any_kid(&KeyID::from(fpr))? {
|
||||
None => Some(fpr.clone()),
|
||||
Some(actual_primary) => {
|
||||
if &actual_primary == fpr_target {
|
||||
None
|
||||
} else {
|
||||
info!(
|
||||
"KeyID points to different key for {}
|
||||
(already links to {:?} but {:?} requested)",
|
||||
fpr, actual_primary, fpr_target
|
||||
);
|
||||
return Err(anyhow!(format!(
|
||||
"KeyID collision for key {}",
|
||||
fpr
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(fpr_check.and(kid_check))
|
||||
}
|
||||
|
||||
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
|
||||
use super::Query::*;
|
||||
|
||||
let conn = self.pool.get().unwrap();
|
||||
let fp: Option<Option<Fingerprint>> = match term {
|
||||
ByFingerprint(ref fp) => {
|
||||
conn.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM cert_identifiers
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
[&fp.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap()
|
||||
}
|
||||
ByKeyID(ref keyid) => {
|
||||
conn.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM cert_identifiers
|
||||
WHERE keyid = ?1
|
||||
",
|
||||
[&keyid.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap()
|
||||
}
|
||||
ByEmail(ref email) => {
|
||||
conn.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM emails
|
||||
WHERE email = ?1
|
||||
",
|
||||
[email.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
fp.flatten()
|
||||
}
|
||||
|
||||
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
conn.execute(
|
||||
"
|
||||
INSERT INTO emails (email, wkd_hash, primary_fingerprint, created_at, updated_at)
|
||||
VALUES (?1, ?2, ?3, datetime('now'), datetime('now'))
|
||||
ON CONFLICT(email) DO UPDATE
|
||||
SET email=excluded.email,
|
||||
wkd_hash=excluded.wkd_hash,
|
||||
primary_fingerprint=excluded.primary_fingerprint,
|
||||
updated_at=datetime('now');
|
||||
",
|
||||
params![
|
||||
email.to_string(),
|
||||
wkd::encode_wkd(email.to_string())?.0,
|
||||
fpr.to_string(),
|
||||
],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
conn.execute(
|
||||
"
|
||||
DELETE FROM emails
|
||||
WHERE email = ?1
|
||||
AND primary_fingerprint = ?2
|
||||
",
|
||||
params![email.to_string(), fpr.to_string(),],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// XXX: Rename to link_fpr_kid
|
||||
fn link_fpr(
|
||||
&self,
|
||||
from: &Fingerprint,
|
||||
primary_fpr: &Fingerprint,
|
||||
) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
conn.execute(
|
||||
"
|
||||
INSERT INTO cert_identifiers
|
||||
(primary_fingerprint, fingerprint, created_at, updated_at)
|
||||
VALUES (?1, ?2, datetime('now'), datetime('now'))
|
||||
ON CONFLICT(fingerprint) DO UPDATE
|
||||
SET fingerprint=excluded.fingerprint,
|
||||
primary_fingerprint=excluded.primary_fingerprint,
|
||||
updated_at=datetime('now');
|
||||
",
|
||||
params![primary_fpr.to_string(), from.to_string(),],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// XXX: Rename to unlink_fpr_kid
|
||||
fn unlink_fpr(
|
||||
&self,
|
||||
from: &Fingerprint,
|
||||
primary_fpr: &Fingerprint,
|
||||
) -> Result<()> {
|
||||
let conn = self.pool.get()?;
|
||||
conn.execute(
|
||||
"
|
||||
DELETE FROM cert_identifiers
|
||||
WHERE primary_fingerprint = ?1
|
||||
AND fingerprint = ?2
|
||||
",
|
||||
params![primary_fpr.to_string(), from.to_string(),],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Lookup straight from certs table, no link resolution
|
||||
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let armored_cert: Option<String> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT full
|
||||
FROM certs
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
[fpr.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap();
|
||||
armored_cert
|
||||
}
|
||||
|
||||
// XXX: rename! to by_primary_fpr_published
|
||||
// Lookup the published cert straight from certs table, no link resolution
|
||||
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let armored_cert: Option<String> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT published_armored
|
||||
FROM certs
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
[fpr.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap();
|
||||
armored_cert
|
||||
}
|
||||
|
||||
// XXX: Rename: armored_cert_by_any_fpr
|
||||
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
|
||||
let primary_fingerprint = self.primary_fpr_by_any_fpr(fpr).unwrap();
|
||||
primary_fingerprint.and_then(|fp| self.by_primary_fpr(&fp))
|
||||
}
|
||||
|
||||
// XXX: slow
|
||||
// XXX: Rename: armored_cert_by_email
|
||||
fn by_email(&self, email: &Email) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let primary_fingerprint: Option<String> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM emails
|
||||
WHERE email = ?1
|
||||
",
|
||||
[email.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap();
|
||||
if let Some(primary_fingerprint) = primary_fingerprint {
|
||||
self.by_primary_fpr(
|
||||
&Fingerprint::from_str(&primary_fingerprint).unwrap(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// XXX: return results
|
||||
// TODO: Test!
|
||||
// XXX: Rename: binary_cert_by_email
|
||||
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let primary_fingerprint: Option<Fingerprint> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT primary_fingerprint
|
||||
FROM emails
|
||||
WHERE email = ?1
|
||||
",
|
||||
[email.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap();
|
||||
match primary_fingerprint {
|
||||
Some(primary_fingerprint) => {
|
||||
let binary_cert: Option<Vec<u8>> = conn
|
||||
.query_row(
|
||||
"
|
||||
SELECT published_binary
|
||||
FROM certs
|
||||
WHERE fingerprint = ?1
|
||||
",
|
||||
[primary_fingerprint.to_string()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.optional()
|
||||
.unwrap();
|
||||
binary_cert
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
// XXX: Rename: armored_cert_by_any_kid
|
||||
fn by_kid(&self, kid: &KeyID) -> Option<String> {
|
||||
// XXX: error handling
|
||||
let primary_fingerprint = self.primary_fpr_by_any_kid(kid).unwrap();
|
||||
primary_fingerprint.and_then(|fp| self.by_primary_fpr(&fp))
|
||||
}
|
||||
|
||||
/// Checks the database for consistency.
|
||||
///
|
||||
/// Note that this operation may take a long time, and is
|
||||
/// generally only useful for testing.
|
||||
fn check_consistency(&self) -> Result<()> {
|
||||
// Check for each published cert:
|
||||
// - all userids (emails) from the published cert point to the cert
|
||||
// - no other userids point to the cert
|
||||
// - all fingerprints of published signing subkeys point to the cert
|
||||
// (cert_identifiers)
|
||||
// - no other subkey fingerprints point to the cert
|
||||
// - all keyids of signing subkeys and of the primary key point to the cert
|
||||
// (cert_identifiers)
|
||||
// - no other subkey fingerprints point to the cert
|
||||
// - Published armored and published binary must match
|
||||
let conn = self.pool.get().unwrap();
|
||||
let mut cert_stmt = conn.prepare(
|
||||
"
|
||||
SELECT fingerprint, published_armored, published_binary
|
||||
FROM certs
|
||||
",
|
||||
)?;
|
||||
for row in cert_stmt.query_map([], |row| {
|
||||
// TODO: create a struct which implements FromSql for this
|
||||
Ok((
|
||||
row.get::<_, Fingerprint>(0)?,
|
||||
row.get::<_, String>(1)?,
|
||||
row.get::<_, Option<Vec<u8>>>(2)?,
|
||||
))
|
||||
})? {
|
||||
let (primary_fp, published_armored, published_binary) = row?;
|
||||
let tpk = Cert::from_str(&published_armored)?;
|
||||
|
||||
// - all userids (emails) from the published cert point to the cert
|
||||
// - no other userids point to the cert
|
||||
let mut published_userids = tpk
|
||||
.userids()
|
||||
.map(|binding| binding.userid().clone())
|
||||
.map(|userid| Email::try_from(&userid).unwrap())
|
||||
.collect::<Vec<Email>>();
|
||||
published_userids.sort_unstable();
|
||||
published_userids.dedup();
|
||||
let mut stmt = conn.prepare(
|
||||
"
|
||||
SELECT email
|
||||
FROM emails
|
||||
WHERE primary_fingerprint = ?1
|
||||
",
|
||||
)?;
|
||||
let mut linking_userids = stmt
|
||||
.query_map([&primary_fp.to_string()], |row| {
|
||||
row.get::<_, Email>(0)
|
||||
})?
|
||||
.collect::<RusqliteResult<Vec<Email>>>()?;
|
||||
linking_userids.sort_unstable();
|
||||
if linking_userids != published_userids {
|
||||
return Err(anyhow!(
|
||||
"For fingerprint {}, published {:?} but linked {:?}",
|
||||
primary_fp,
|
||||
published_userids,
|
||||
linking_userids
|
||||
));
|
||||
}
|
||||
|
||||
// - all fingerprints of published signing subkeys point to the cert
|
||||
// (cert_identifiers)
|
||||
// - no other subkey fingerprints point to the cert
|
||||
let policy = &POLICY;
|
||||
let mut published_fps = tpk
|
||||
.keys()
|
||||
.with_policy(policy, None)
|
||||
.for_certification()
|
||||
.for_signing()
|
||||
.map(|amalgamation| amalgamation.key().fingerprint())
|
||||
.flat_map(Fingerprint::try_from)
|
||||
.collect::<Vec<_>>();
|
||||
published_fps.sort_unstable();
|
||||
published_fps.dedup();
|
||||
let mut stmt = conn.prepare(
|
||||
"
|
||||
SELECT fingerprint
|
||||
FROM cert_identifiers
|
||||
WHERE primary_fingerprint = ?1
|
||||
",
|
||||
)?;
|
||||
let mut linking_fps = stmt
|
||||
.query_map([&primary_fp.to_string()], |row| {
|
||||
row.get::<_, Fingerprint>(0)
|
||||
})?
|
||||
.collect::<RusqliteResult<Vec<Fingerprint>>>()?;
|
||||
linking_fps.sort_unstable();
|
||||
if linking_fps != published_fps {
|
||||
return Err(anyhow!(
|
||||
"For fingerprint {}, published subkeys Fingerprints {:?}
|
||||
but linked {:?}",
|
||||
primary_fp,
|
||||
published_fps,
|
||||
linking_fps
|
||||
));
|
||||
}
|
||||
|
||||
// - all keyids of signing subkeys and of the primary key point to the cert
|
||||
// (cert_identifiers)
|
||||
// - no other subkey fingerprints point to the cert
|
||||
let policy = &POLICY;
|
||||
let mut published_kids = tpk
|
||||
.keys()
|
||||
.with_policy(policy, None)
|
||||
.for_certification()
|
||||
.for_signing()
|
||||
.map(|amalgamation| amalgamation.key().fingerprint())
|
||||
.flat_map(KeyID::try_from)
|
||||
.collect::<Vec<_>>();
|
||||
published_kids.sort_unstable();
|
||||
published_kids.dedup();
|
||||
let mut stmt = conn.prepare(
|
||||
"
|
||||
SELECT keyid
|
||||
FROM cert_identifiers
|
||||
WHERE primary_fingerprint = ?1
|
||||
",
|
||||
)?;
|
||||
let mut linking_kids = stmt
|
||||
.query_map([&primary_fp.to_string()], |row| {
|
||||
row.get::<_, KeyID>(0)
|
||||
})?
|
||||
.collect::<RusqliteResult<Vec<KeyID>>>()?;
|
||||
linking_kids.sort_unstable();
|
||||
if linking_kids != published_kids {
|
||||
return Err(anyhow!(
|
||||
"For fingerprint {}, published subkey KeyIDs {:?}
|
||||
but linked {:?}",
|
||||
primary_fp,
|
||||
published_kids,
|
||||
linking_kids
|
||||
));
|
||||
}
|
||||
|
||||
// - Published armored and published binary must match
|
||||
if let Some(pna) = published_binary {
|
||||
if Cert::from_bytes(&pna)? != tpk {
|
||||
return Err(anyhow!(
|
||||
"For fingerprint {}, published_armored and
|
||||
published_binary do not match",
|
||||
primary_fp,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use openpgp::cert::CertBuilder;
|
||||
use tempfile::TempDir;
|
||||
use test;
|
||||
|
||||
const DATA_1: &str = "data, content doesn't matter";
|
||||
const DATA_2: &str = "other data, content doesn't matter";
|
||||
const FINGERPRINT_1: &str = "D4AB192964F76A7F8F8A9B357BD18320DEADFA11";
|
||||
|
||||
fn open_db() -> (TempDir, Sqlite, PathBuf) {
|
||||
let tmpdir = TempDir::new().unwrap();
|
||||
|
||||
let db = Sqlite::new(tmpdir.path()).unwrap();
|
||||
let log_path = db.keys_dir_log.join(db.get_current_log_filename());
|
||||
|
||||
(tmpdir, db, log_path)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new() {
|
||||
use crate::ImportResult;
|
||||
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
let k1 = CertBuilder::new()
|
||||
.add_userid("a@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let k2 = CertBuilder::new()
|
||||
.add_userid("b@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let k3 = CertBuilder::new()
|
||||
.add_userid("c@invalid.example.org")
|
||||
.add_userid("d@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
assert!(matches!(
|
||||
db.merge(k1).unwrap(),
|
||||
ImportResult::New(status) if status.email_status.len() == 1));
|
||||
assert!(matches!(
|
||||
db.merge(k2.clone()).unwrap(),
|
||||
ImportResult::New(status) if status.email_status.len() == 1));
|
||||
assert!(matches!(
|
||||
db.merge(k2).unwrap(),
|
||||
ImportResult::Unchanged(status) if status.email_status.len() == 1));
|
||||
assert!(matches!(
|
||||
db.merge(k3.clone()).unwrap(),
|
||||
ImportResult::New(status) if status.email_status.len() == 2));
|
||||
assert!(matches!(
|
||||
db.merge(k3).unwrap(),
|
||||
ImportResult::Unchanged(status) if status.email_status.len() == 2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn xx_by_fpr_full() -> Result<()> {
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||
|
||||
db.move_tmp_to_full(db.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||
db.link_fpr(&fpr1, &fpr1)?;
|
||||
|
||||
assert_eq!(db.by_fpr_full(&fpr1).expect("must find key"), DATA_1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn xx_by_kid() -> Result<()> {
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||
|
||||
db.move_tmp_to_full(db.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||
db.move_tmp_to_published(db.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
|
||||
db.link_fpr(&fpr1, &fpr1)?;
|
||||
|
||||
assert_eq!(db.by_kid(&fpr1.into()).expect("must find key"), DATA_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn xx_by_primary_fpr() -> Result<()> {
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||
|
||||
db.move_tmp_to_full(db.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||
db.move_tmp_to_published(db.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
|
||||
|
||||
assert_eq!(db.by_primary_fpr(&fpr1).expect("must find key"), DATA_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lookup_primary_fingerprint() -> Result<()> {
|
||||
use TryFrom;
|
||||
let (_tmp_dir, db, _log_path) = open_db();
|
||||
|
||||
let email = Email::from_str("a@invalid.example.org")?;
|
||||
let cert = CertBuilder::new()
|
||||
.add_userid(email.to_string())
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let expected_fp =
|
||||
Fingerprint::try_from(cert.primary_key().fingerprint())?;
|
||||
|
||||
db.merge(cert)?;
|
||||
db.link_email(&email, &expected_fp)?;
|
||||
|
||||
assert_eq!(
|
||||
expected_fp,
|
||||
db.lookup_primary_fingerprint(&crate::Query::ByFingerprint(
|
||||
expected_fp.clone()
|
||||
))
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
expected_fp,
|
||||
db.lookup_primary_fingerprint(&crate::Query::ByKeyID(
|
||||
expected_fp.clone().into()
|
||||
))
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
expected_fp,
|
||||
db.lookup_primary_fingerprint(&crate::Query::ByEmail(email))
|
||||
.unwrap()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_verification() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_uid_verification(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_deletion() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_uid_deletion(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subkey_lookup() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_subkey_lookup(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kid_lookup() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_kid_lookup(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn upload_revoked_tpk() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_upload_revoked_tpk(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_revocation() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_uid_revocation(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regenerate() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_regenerate(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn key_reupload() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_reupload(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_replacement() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_uid_replacement(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_unlinking() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_unlink_uid(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_1() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_same_email_1(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_2() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_same_email_2(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_3() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_same_email_3(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_4() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_same_email_4(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_selfsig() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_no_selfsig(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_uids() {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::test_bad_uids(&mut db, &log_path);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reverse_fingerprint_to_path() {
|
||||
let tmpdir = TempDir::new().unwrap();
|
||||
let db = Sqlite::new(tmpdir.path()).unwrap();
|
||||
|
||||
let fp: Fingerprint =
|
||||
"CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
|
||||
|
||||
// XXX: fixme
|
||||
//assert_eq!(Sqlite::path_to_fingerprint(&db.link_by_fingerprint(&fp)),
|
||||
// Some(fp.clone()));
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn attested_key_signatures() -> Result<()> {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::attested_key_signatures(&mut db, &log_path)?;
|
||||
db.check_consistency()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexportable_sigs() -> Result<()> {
|
||||
let (_tmp_dir, mut db, log_path) = open_db();
|
||||
test::nonexportable_sigs(&mut db, &log_path)?;
|
||||
db.check_consistency()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
@@ -77,7 +77,22 @@ impl FromStr for Email {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
use r2d2_sqlite::rusqlite::types::{
|
||||
FromSql, FromSqlError, FromSqlResult, ValueRef,
|
||||
};
|
||||
|
||||
impl FromSql for Email {
|
||||
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||
value
|
||||
.as_str()
|
||||
.and_then(|v| {
|
||||
Email::from_str(v)
|
||||
.map_err(|err| FromSqlError::Other(err.into()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Fingerprint([u8; 20]);
|
||||
|
||||
impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
||||
@@ -135,7 +150,18 @@ impl FromStr for Fingerprint {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
impl FromSql for Fingerprint {
|
||||
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||
value
|
||||
.as_str()
|
||||
.and_then(|v| {
|
||||
Fingerprint::from_str(v)
|
||||
.map_err(|err| FromSqlError::Other(err.into()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct KeyID([u8; 8]);
|
||||
|
||||
impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
|
||||
@@ -183,13 +209,25 @@ impl FromStr for KeyID {
|
||||
fn from_str(s: &str) -> Result<KeyID> {
|
||||
match sequoia_openpgp::KeyID::from_hex(s)? {
|
||||
sequoia_openpgp::KeyID::V4(a) => Ok(KeyID(a)),
|
||||
sequoia_openpgp::KeyID::Invalid(_) =>
|
||||
Err(anyhow!("'{}' is not a valid long key ID", s)),
|
||||
sequoia_openpgp::KeyID::Invalid(_) => {
|
||||
Err(anyhow!("'{}' is not a valid long key ID", s))
|
||||
}
|
||||
_ => Err(anyhow!("unknown keyid type")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql for KeyID {
|
||||
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||
value
|
||||
.as_str()
|
||||
.and_then(|v| {
|
||||
KeyID::from_str(v)
|
||||
.map_err(|err| FromSqlError::Other(err.into()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@@ -114,15 +114,12 @@ impl <'a> ImportStats<'a> {
|
||||
|
||||
fn import_from_files(
|
||||
config: &HagridConfig,
|
||||
dry_run: bool,
|
||||
_dry_run: bool,
|
||||
input_files: Vec<PathBuf>,
|
||||
multi_progress: Arc<MultiProgress>,
|
||||
) -> Result<()> {
|
||||
let db = KeyDatabase::new_internal(
|
||||
config.keys_internal_dir.as_ref().unwrap(),
|
||||
config.keys_external_dir.as_ref().unwrap(),
|
||||
config.tmp_dir.as_ref().unwrap(),
|
||||
dry_run,
|
||||
let db = KeyDatabase::new(
|
||||
config.keys_base_dir.as_ref().unwrap(),
|
||||
)?;
|
||||
|
||||
for input_file in input_files {
|
||||
|
@@ -21,7 +21,7 @@ use anyhow::Result;
|
||||
use clap::{Arg, App, SubCommand};
|
||||
|
||||
mod import;
|
||||
mod regenerate;
|
||||
//mod regenerate;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct HagridConfigs {
|
||||
@@ -36,8 +36,7 @@ pub struct HagridConfigs {
|
||||
#[derive(Deserialize,Clone)]
|
||||
pub struct HagridConfig {
|
||||
_template_dir: Option<PathBuf>,
|
||||
keys_internal_dir: Option<PathBuf>,
|
||||
keys_external_dir: Option<PathBuf>,
|
||||
keys_base_dir: Option<PathBuf>,
|
||||
_assets_dir: Option<PathBuf>,
|
||||
_token_dir: Option<PathBuf>,
|
||||
tmp_dir: Option<PathBuf>,
|
||||
@@ -61,8 +60,8 @@ fn main() -> Result<()> {
|
||||
.takes_value(true)
|
||||
.default_value("prod")
|
||||
.possible_values(&["dev","stage","prod"]))
|
||||
.subcommand(SubCommand::with_name("regenerate")
|
||||
.about("Regenerate symlink directory"))
|
||||
//.subcommand(SubCommand::with_name("regenerate")
|
||||
// .about("Regenerate symlink directory"))
|
||||
.subcommand(SubCommand::with_name("import")
|
||||
.about("Import keys into Hagrid")
|
||||
.arg(Arg::with_name("dry run")
|
||||
@@ -94,8 +93,8 @@ fn main() -> Result<()> {
|
||||
.map(|arg| PathBuf::from_str(arg).unwrap())
|
||||
.collect();
|
||||
import::do_import(&config, dry_run, keyrings)?;
|
||||
} else if let Some(_matches) = matches.subcommand_matches("regenerate") {
|
||||
regenerate::do_regenerate(&config)?;
|
||||
//} else if let Some(_matches) = matches.subcommand_matches("regenerate") {
|
||||
// regenerate::do_regenerate(&config)?;
|
||||
} else {
|
||||
println!("{}", matches.usage());
|
||||
}
|
||||
|
@@ -17,6 +17,7 @@ pkgs.mkShell {
|
||||
];
|
||||
|
||||
buildInputs = with pkgs; [
|
||||
sqlite
|
||||
openssl
|
||||
|
||||
clang
|
||||
|
@@ -50,7 +50,7 @@ fn main() {
|
||||
|
||||
fn real_main() -> Result<()> {
|
||||
let opt = Opt::from_args();
|
||||
let db = KeyDatabase::new_from_base(opt.base.canonicalize()?)?;
|
||||
let db = KeyDatabase::new(opt.base.canonicalize()?)?;
|
||||
delete(&db, &opt.query.parse()?, opt.all_bindings, opt.all)
|
||||
}
|
||||
|
||||
|
@@ -444,11 +444,9 @@ fn configure_prometheus(config: &Config) -> Option<PrometheusMetrics> {
|
||||
|
||||
fn configure_db_service(config: &Config) -> Result<KeyDatabase> {
|
||||
let keys_internal_dir: PathBuf = config.get_str("keys_internal_dir")?.into();
|
||||
let keys_external_dir: PathBuf = config.get_str("keys_external_dir")?.into();
|
||||
let tmp_dir: PathBuf = config.get_str("tmp_dir")?.into();
|
||||
|
||||
let fs_db = KeyDatabase::new(keys_internal_dir, keys_external_dir, tmp_dir)?;
|
||||
Ok(fs_db)
|
||||
let sqlite_db = KeyDatabase::new(keys_internal_dir)?;
|
||||
Ok(sqlite_db)
|
||||
}
|
||||
|
||||
fn configure_hagrid_state(config: &Config) -> Result<HagridState> {
|
||||
|
Reference in New Issue
Block a user