Compare commits

...

28 Commits

Author SHA1 Message Date
Vincent Breitmoser
3293dd8f78 version 2.0 2025-02-28 22:07:16 +01:00
Vincent Breitmoser
c7a032eb69 nix: add nix flake 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
475bcbffb8 nginx: route all requests via hagrid 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
dafed3d492 db: don't use sq's export logic for our certs 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
a504b0ea12 hagridctl: update for sqlite 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
df6bfb2d84 db: improve typings for sqlite 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
b5b5879474 db: add DatabaseTransaction abstraction 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
5778aaed84 db: work on sqlite, make tests pass 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
7beb5209af db: add sqlite query tracing during tests 2025-02-28 22:05:32 +01:00
puzzlewolf
4787816581 db: start work on rusqlite 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
359475f89f docker: add sqlite dep 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
253d672d47 db: abstract over log path interface 2025-02-28 22:05:32 +01:00
Vincent Breitmoser
e0aeef7ddc mail: support sending via local smtp server 2025-02-28 21:53:25 +01:00
Vincent Breitmoser
44db398a1c cargo: downgrade sequoia-openpgp to 1.17.0 (for now)
Starting with 1.18.0, the retain_userids method starts working
differently, returning an empty cert if no signed user ids or direct key
signature is left. Since we need this, we'll stay on 1.17.0 for now.
2024-11-17 19:08:23 +01:00
Vincent Breitmoser
8ea89d3e0e hagrid: fix tokens test 2024-11-17 14:15:34 +01:00
Vincent Breitmoser
0d25da7138 cargo: cargo fmt --all 2024-11-17 14:03:12 +01:00
Vincent Breitmoser
e0f8352ac6 docker: update docker-build for new rust-toolchain 2024-11-17 13:49:44 +01:00
Vincent Breitmoser
dca8afa1e6 cargo: cargo update 2024-11-17 13:47:22 +01:00
Vincent Breitmoser
ea44f52a16 rust-toolchain: update to 1.82.0 2024-11-17 13:46:25 +01:00
Vincent Breitmoser
b4d92f0ec1 use rust-crypto instead of ring for sealed state
Newer versions of ring are very obscure, and I couldn't figure out how
to use its interface in a reasonable time. I used the rust-crypto
methods instead where things were straightforward.
2024-11-17 13:46:24 +01:00
Vincent Breitmoser
26ef2f6e1c db: fix tests 2024-03-24 23:50:56 +01:00
Vincent Breitmoser
cfd9fd8eb3 tester: add gen-reqs command 2024-03-24 13:09:04 +01:00
Vincent Breitmoser
13ddd4ff3a tester: add tester workspace, adding tools for testing 2024-03-24 13:09:04 +01:00
Vincent Breitmoser
a9440c6d0a hagridctl: add dump command to dump entire database 2024-03-24 13:09:04 +01:00
Vincent Breitmoser
fe2337507a hagridctl: import public keys publishing emails 2024-03-24 13:09:04 +01:00
Vincent Breitmoser
36dff563fc docker: use bullseye base image 2024-03-24 13:09:04 +01:00
Vincent Breitmoser
da5648488b ci: actually use correct dep package name 2024-01-27 10:24:26 +01:00
Vincent Breitmoser
7f304929ea ci: update gitlab for openssl dep 2024-01-27 10:22:47 +01:00
33 changed files with 2434 additions and 1166 deletions

2
.gitignore vendored
View File

@@ -7,3 +7,5 @@
target
*.po~
/dist/templates/localized
result

View File

@@ -3,7 +3,7 @@ build, test and lint:
interruptible: true
script:
- apt update -qy
- apt install -qy build-essential pkg-config clang libclang-dev nettle-dev gettext zsh
- apt install -qy build-essential pkg-config clang libclang-dev libssl-dev gettext zsh
- rustup component add clippy
- rustup component add rustfmt
- ./make-translated-templates

1399
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package]
name = "hagrid"
version = "1.3.0"
version = "2.0.0"
authors = ["Vincent Breitmoser <look@my.amazin.horse>", "Kai Michaelis <kai@sequoia-pgp.org>", "Justus Winter <justus@sequoia-pgp.org>"]
build = "build.rs"
default-run = "hagrid"
@@ -10,6 +10,7 @@ edition = "2018"
members = [
"database",
"hagridctl",
"tester",
]
[dependencies]
@@ -19,7 +20,7 @@ anyhow = "1"
rocket = { version = "0.5", features = [ "json" ] }
rocket_dyn_templates = { version = "0.1", features = ["handlebars"] }
rocket_codegen = "0.5"
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
sequoia-openpgp = { version = "=1.17.0", default-features = false, features = ["crypto-openssl"] }
multipart = "0"
serde = "1"
serde_derive = "1"
@@ -29,7 +30,8 @@ tempfile = "3"
structopt = "0.2"
url = "1"
num_cpus = "1"
ring = "0.13"
aes-gcm = "0.10"
sha2 = "0.10"
base64 = "0.10"
uuid = { version = "0.7", features = [ "v4" ] }
rocket_prometheus = "0.10"
@@ -40,7 +42,7 @@ gettext = "0.4"
glob = "0.3"
hyperx = "1.4"
# this is a slightly annoying update, so keeping this back for now
lettre = { version = "=0.10.0-rc.5", default-features = false, features = ["builder", "file-transport", "sendmail-transport"] }
lettre = { version = "=0.10.0-rc.5", default-features = false, features = ["builder", "file-transport", "sendmail-transport", "smtp-transport"] }
[dependencies.rocket_i18n]
# git = "https://github.com/Plume-org/rocket_i18n"

View File

@@ -10,6 +10,11 @@ Please note that Hagrid is built and maintained only for the service at
keys.openpgp.org. It is not maintained or officially supported as
deployable software.
Compatibility note: Hagrid v2.0 uses an sqlite certificate store instead of the
previous file based database. This means that it also no longer supports serving
certificates directly via reverse proxy. You can us hagridctl to dump and import
an old database.
License
-------
@@ -37,7 +42,7 @@ Additionally, some external dependencies are required.
Get them (on Debian or Ubuntu) with
```bash
sudo apt install gnutls-bin libssl-dev gcc llvm-dev libclang-dev build-essential pkg-config gettext
sudo apt install gnutls-bin libssl-dev gcc llvm-dev libclang-dev build-essential pkg-config gettext libsqlite3-dev
```
After Rust and the other dependencies are installed, copy the config file, then simply compile and run:
@@ -55,18 +60,6 @@ will be statically built, and can be copied anywhere. You will also need to
adjust `Rocket.toml` accordingly. Hagrid uses `sendmail` for mailing, so you
also need a working local mailer setup.
Reverse Proxy
-------------
Hagrid is designed to defer lookups to reverse proxy server like Nginx.
Lookups via `/vks/v1/by-finingerprint`, `/vks/v1/by-keyid`, and
`/vks/v1/by-email` can be handled by a robust and performant HTTP server.
A sample configuration for nginx is part of the repository (`nginx.conf`,
`hagrid-routes.conf`).
Note that we make use of
[ngx_http_lua_module](https://github.com/openresty/lua-nginx-module) to
perform some request rewrites.
Community
---------

View File

@@ -5,7 +5,7 @@ authors = ["Kai Michaelis <kai@sequoia-pgp.org>"]
[dependencies]
anyhow = "1"
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
sequoia-openpgp = { version = "=1.17.0", default-features = false, features = ["crypto-openssl"] }
log = "0"
rand = "0.6"
serde = { version = "1.0", features = ["derive"] }
@@ -22,6 +22,10 @@ fs2 = "0.4"
walkdir = "2"
chrono = "0.4"
zbase32 = "0.1"
r2d2 = "0.8"
r2d2_sqlite = "0.24"
rusqlite = { version = "0.31", features = ["trace"] }
self_cell = "1"
[lib]
name = "hagrid_database"

View File

@@ -24,6 +24,8 @@ use tempfile::NamedTempFile;
use openpgp::Cert;
use openpgp_utils::POLICY;
use crate::DatabaseTransaction;
pub struct Filesystem {
tmp_dir: PathBuf,
@@ -263,54 +265,6 @@ impl Filesystem {
}
}
fn link_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let path = self.fingerprint_to_path_published(fpr);
let link = self.link_by_email(email);
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
if link == target {
return Ok(());
}
symlink(&target, ensure_parent(&link)?)
}
fn link_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let path = self.fingerprint_to_path_published_wkd(fpr);
let link = self.link_wkd_by_email(email);
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
if link == target {
return Ok(());
}
symlink(&target, ensure_parent(&link)?)
}
fn unlink_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let link = self.link_by_email(email);
let expected = diff_paths(
&self.fingerprint_to_path_published(fpr),
link.parent().unwrap(),
)
.unwrap();
symlink_unlink_with_check(&link, &expected)
}
fn unlink_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let link = self.link_wkd_by_email(email);
let expected = diff_paths(
&self.fingerprint_to_path_published_wkd(fpr),
link.parent().unwrap(),
)
.unwrap();
symlink_unlink_with_check(&link, &expected)
}
fn open_logfile(&self, file_name: &str) -> Result<File> {
let file_path = self.keys_dir_log.join(file_name);
Ok(OpenOptions::new()
@@ -387,52 +341,93 @@ fn symlink_unlink_with_check(link: &Path, expected: &Path) -> Result<()> {
Ok(())
}
impl Database for Filesystem {
type MutexGuard = FlockMutexGuard;
pub struct FilesystemTransaction<'a> {
db: &'a Filesystem,
_flock: FlockMutexGuard,
}
impl<'a> FilesystemTransaction<'a> {
fn link_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let path = self.db.fingerprint_to_path_published(fpr);
let link = self.db.link_by_email(email);
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
if link == target {
return Ok(());
}
symlink(&target, ensure_parent(&link)?)
}
fn link_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let path = self.db.fingerprint_to_path_published_wkd(fpr);
let link = self.db.link_wkd_by_email(email);
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
if link == target {
return Ok(());
}
symlink(&target, ensure_parent(&link)?)
}
fn unlink_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let link = self.db.link_by_email(email);
let expected = diff_paths(
&self.db.fingerprint_to_path_published(fpr),
link.parent().unwrap(),
)
.unwrap();
symlink_unlink_with_check(&link, &expected)
}
fn unlink_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let link = self.db.link_wkd_by_email(email);
let expected = diff_paths(
&self.db.fingerprint_to_path_published_wkd(fpr),
link.parent().unwrap(),
)
.unwrap();
symlink_unlink_with_check(&link, &expected)
}
}
impl<'a> DatabaseTransaction<'a> for FilesystemTransaction<'a> {
type TempCert = NamedTempFile;
fn lock(&self) -> Result<Self::MutexGuard> {
FlockMutexGuard::lock(&self.keys_internal_dir)
fn commit(self) -> Result<()> {
Ok(())
}
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert> {
let mut tempfile = tempfile::Builder::new()
.prefix("key")
.rand_bytes(16)
.tempfile_in(&self.tmp_dir)?;
.tempfile_in(&self.db.tmp_dir)?;
tempfile.write_all(content).unwrap();
Ok(tempfile)
}
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()> {
let timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let fingerprint_line = format!("{:010} {}\n", timestamp, fpr_primary);
self.open_logfile(filename)?
.write_all(fingerprint_line.as_bytes())?;
Ok(())
}
fn move_tmp_to_full(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
if self.dry_run {
if self.db.dry_run {
return Ok(());
}
set_permissions(file.path(), Permissions::from_mode(0o640))?;
let target = self.fingerprint_to_path_full(fpr);
let target = self.db.fingerprint_to_path_full(fpr);
file.persist(ensure_parent(&target)?)?;
Ok(())
}
fn move_tmp_to_published(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
if self.dry_run {
if self.db.dry_run {
return Ok(());
}
set_permissions(file.path(), Permissions::from_mode(0o644))?;
let target = self.fingerprint_to_path_published(fpr);
let target = self.db.fingerprint_to_path_published(fpr);
file.persist(ensure_parent(&target)?)?;
Ok(())
}
@@ -442,10 +437,10 @@ impl Database for Filesystem {
file: Option<Self::TempCert>,
fpr: &Fingerprint,
) -> Result<()> {
if self.dry_run {
if self.db.dry_run {
return Ok(());
}
let target = self.fingerprint_to_path_published_wkd(fpr);
let target = self.db.fingerprint_to_path_published_wkd(fpr);
if let Some(file) = file {
set_permissions(file.path(), Permissions::from_mode(0o644))?;
file.persist(ensure_parent(&target)?)?;
@@ -460,65 +455,17 @@ impl Database for Filesystem {
let mut tempfile = tempfile::Builder::new()
.prefix("key")
.rand_bytes(16)
.tempfile_in(&self.tmp_dir)?;
.tempfile_in(&self.db.tmp_dir)?;
tempfile.write_all(content).unwrap();
let target = self.fingerprint_to_path_quarantined(fpr);
let target = self.db.fingerprint_to_path_quarantined(fpr);
tempfile.persist(ensure_parent(&target)?)?;
Ok(())
}
fn check_link_fpr(
&self,
fpr: &Fingerprint,
fpr_target: &Fingerprint,
) -> Result<Option<Fingerprint>> {
let link_keyid = self.link_by_keyid(&fpr.into());
let link_fpr = self.link_by_fingerprint(fpr);
let path_published = self.fingerprint_to_path_published(fpr_target);
if let Ok(link_fpr_target) = link_fpr.canonicalize() {
if !link_fpr_target.ends_with(&path_published) {
info!("Fingerprint points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_fpr_target);
return Err(anyhow!(format!("Fingerprint collision for key {}", fpr)));
}
}
if let Ok(link_keyid_target) = link_keyid.canonicalize() {
if !link_keyid_target.ends_with(&path_published) {
info!(
"KeyID points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_keyid_target
);
return Err(anyhow!(format!("KeyID collision for key {}", fpr)));
}
}
if !link_fpr.exists() || !link_keyid.exists() {
Ok(Some(fpr.clone()))
} else {
Ok(None)
}
}
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
use super::Query::*;
let path = match term {
ByFingerprint(ref fp) => self.link_by_fingerprint(fp),
ByKeyID(ref keyid) => self.link_by_keyid(keyid),
ByEmail(ref email) => self.link_by_email(email),
_ => return None,
};
path.read_link()
.ok()
.and_then(|link_path| Filesystem::path_to_fingerprint(&link_path))
}
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
if self.dry_run {
if self.db.dry_run {
return Ok(());
}
@@ -535,14 +482,14 @@ impl Database for Filesystem {
}
fn link_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
if self.dry_run {
if self.db.dry_run {
return Ok(());
}
let link_fpr = self.link_by_fingerprint(from);
let link_keyid = self.link_by_keyid(&from.into());
let link_fpr = self.db.link_by_fingerprint(from);
let link_keyid = self.db.link_by_keyid(&from.into());
let target = diff_paths(
&self.fingerprint_to_path_published(primary_fpr),
&self.db.fingerprint_to_path_published(primary_fpr),
link_fpr.parent().unwrap(),
)
.unwrap();
@@ -552,10 +499,10 @@ impl Database for Filesystem {
}
fn unlink_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
let link_fpr = self.link_by_fingerprint(from);
let link_keyid = self.link_by_keyid(&from.into());
let link_fpr = self.db.link_by_fingerprint(from);
let link_keyid = self.db.link_by_keyid(&from.into());
let expected = diff_paths(
&self.fingerprint_to_path_published(primary_fpr),
&self.db.fingerprint_to_path_published(primary_fpr),
link_fpr.parent().unwrap(),
)
.unwrap();
@@ -573,6 +520,44 @@ impl Database for Filesystem {
Ok(())
}
}
impl<'a> Database<'a> for Filesystem {
type Transaction = FilesystemTransaction<'a>;
fn transaction(&'a self) -> Result<FilesystemTransaction<'a>> {
let flock = FlockMutexGuard::lock(&self.keys_internal_dir)?;
Ok(FilesystemTransaction {
db: self,
_flock: flock,
})
}
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()> {
let timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let fingerprint_line = format!("{:010} {}\n", timestamp, fpr_primary);
self.open_logfile(filename)?
.write_all(fingerprint_line.as_bytes())?;
Ok(())
}
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
use super::Query::*;
let path = match term {
ByFingerprint(ref fp) => self.link_by_fingerprint(fp),
ByKeyID(ref keyid) => self.link_by_keyid(keyid),
ByEmail(ref email) => self.link_by_email(email),
_ => return None,
};
path.read_link()
.ok()
.and_then(|link_path| Filesystem::path_to_fingerprint(&link_path))
}
// XXX: slow
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String> {
@@ -616,6 +601,57 @@ impl Database for Filesystem {
self.read_from_path(&path, false)
}
fn get_last_log_entry(&self) -> Result<Fingerprint> {
use std::fs;
use std::str::FromStr;
let filename = self.keys_dir_log.join(self.get_current_log_filename());
let log_data = fs::read_to_string(filename)?;
let last_entry = log_data
.lines()
.last()
.ok_or_else(|| anyhow!("malformed log file"))?
.split(' ')
.last()
.ok_or_else(|| anyhow!("malformed log file"))?;
Fingerprint::from_str(last_entry)
}
fn check_link_fpr(
&self,
fpr: &Fingerprint,
fpr_target: &Fingerprint,
) -> Result<Option<Fingerprint>> {
let link_keyid = self.link_by_keyid(&fpr.into());
let link_fpr = self.link_by_fingerprint(fpr);
let path_published = self.fingerprint_to_path_published(fpr_target);
if let Ok(link_fpr_target) = link_fpr.canonicalize() {
if !link_fpr_target.ends_with(&path_published) {
info!("Fingerprint points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_fpr_target);
return Err(anyhow!(format!("Fingerprint collision for key {}", fpr)));
}
}
if let Ok(link_keyid_target) = link_keyid.canonicalize() {
if !link_keyid_target.ends_with(&path_published) {
info!(
"KeyID points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_keyid_target
);
return Err(anyhow!(format!("KeyID collision for key {}", fpr)));
}
}
if !link_fpr.exists() || !link_keyid.exists() {
Ok(Some(fpr.clone()))
} else {
Ok(None)
}
}
/// Checks the database for consistency.
///
/// Note that this operation may take a long time, and is
@@ -810,17 +846,16 @@ mod tests {
let _ = Filesystem::new_from_base(tmpdir.path()).unwrap();
}
fn open_db() -> (TempDir, Filesystem, PathBuf) {
fn open_db() -> (TempDir, Filesystem) {
let tmpdir = TempDir::new().unwrap();
let db = Filesystem::new_from_base(tmpdir.path()).unwrap();
let log_path = db.keys_dir_log.join(db.get_current_log_filename());
(tmpdir, db, log_path)
(tmpdir, db)
}
#[test]
fn new() {
let (_tmp_dir, db, _log_path) = open_db();
let (_tmp_dir, db) = open_db();
let k1 = CertBuilder::new()
.add_userid("a@invalid.example.org")
.generate()
@@ -869,120 +904,120 @@ mod tests {
#[test]
fn uid_verification() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_uid_verification(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_uid_verification(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_deletion() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_uid_deletion(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_uid_deletion(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn subkey_lookup() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_subkey_lookup(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_subkey_lookup(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn kid_lookup() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_kid_lookup(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_kid_lookup(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn upload_revoked_tpk() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_upload_revoked_tpk(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_upload_revoked_tpk(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_revocation() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_uid_revocation(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_uid_revocation(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn regenerate() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_regenerate(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_regenerate(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn key_reupload() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_reupload(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_reupload(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_replacement() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_uid_replacement(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_uid_replacement(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_unlinking() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_unlink_uid(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_unlink_uid(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_1() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_same_email_1(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_same_email_1(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_2() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_same_email_2(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_same_email_2(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_3() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_same_email_3(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_same_email_3(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_4() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_same_email_4(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_same_email_4(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn no_selfsig() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_no_selfsig(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_no_selfsig(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn bad_uids() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_bad_uids(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_bad_uids(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn unsigned_uids() {
let (_tmp_dir, mut db, log_path) = open_db();
test::test_unsigned_uids(&mut db, &log_path);
let (_tmp_dir, mut db) = open_db();
test::test_unsigned_uids(&mut db);
db.check_consistency().expect("inconsistent database");
}
@@ -1002,16 +1037,16 @@ mod tests {
#[test]
fn attested_key_signatures() -> Result<()> {
let (_tmp_dir, mut db, log_path) = open_db();
test::attested_key_signatures(&mut db, &log_path)?;
let (_tmp_dir, mut db) = open_db();
test::attested_key_signatures(&mut db)?;
db.check_consistency()?;
Ok(())
}
#[test]
fn nonexportable_sigs() -> Result<()> {
let (_tmp_dir, mut db, log_path) = open_db();
test::nonexportable_sigs(&mut db, &log_path)?;
let (_tmp_dir, mut db) = open_db();
test::nonexportable_sigs(&mut db)?;
db.check_consistency()?;
Ok(())
}

View File

@@ -17,7 +17,9 @@ extern crate log;
extern crate chrono;
extern crate hex;
extern crate pathdiff;
extern crate r2d2_sqlite;
extern crate rand;
extern crate self_cell;
extern crate serde;
extern crate serde_json;
extern crate tempfile;
@@ -36,7 +38,8 @@ pub mod sync;
pub mod wkd;
mod fs;
pub use self::fs::Filesystem as KeyDatabase;
mod sqlite;
pub use self::sqlite::Sqlite as KeyDatabase;
mod stateful_tokens;
pub use stateful_tokens::StatefulTokens;
@@ -106,6 +109,14 @@ impl ImportResult {
ImportResult::Unchanged(status) => status,
}
}
pub fn as_tpk_status(&self) -> &TpkStatus {
match self {
ImportResult::New(status) => status,
ImportResult::Updated(status) => status,
ImportResult::Unchanged(status) => status,
}
}
}
#[derive(Debug, PartialEq)]
@@ -120,19 +131,10 @@ pub enum RegenerateResult {
Unchanged,
}
pub trait Database: Sync + Send {
type MutexGuard;
pub trait DatabaseTransaction<'a> {
type TempCert;
/// Lock the DB for a complex update.
///
/// All basic write operations are atomic so we don't need to lock
/// read operations to ensure that we return something sane.
fn lock(&self) -> Result<Self::MutexGuard>;
/// Queries the database using Fingerprint, KeyID, or
/// email-address, returning the primary fingerprint.
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint>;
fn commit(self) -> Result<()>;
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
@@ -140,21 +142,6 @@ pub trait Database: Sync + Send {
fn link_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
fn unlink_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String>;
fn by_kid(&self, kid: &KeyID) -> Option<String>;
fn by_email(&self, email: &Email) -> Option<String>;
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>>;
fn by_domain_and_hash_wkd(&self, domain: &str, hash: &str) -> Option<Vec<u8>>;
fn check_link_fpr(
&self,
fpr: &Fingerprint,
target: &Fingerprint,
) -> Result<Option<Fingerprint>>;
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert>;
fn move_tmp_to_full(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
fn move_tmp_to_published(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
@@ -164,8 +151,39 @@ pub trait Database: Sync + Send {
fpr: &Fingerprint,
) -> Result<()>;
fn write_to_quarantine(&self, fpr: &Fingerprint, content: &[u8]) -> Result<()>;
}
pub trait Database<'a>: Sync + Send {
type Transaction: DatabaseTransaction<'a>;
/// Lock the DB for a complex update.
///
/// All basic write operations are atomic so we don't need to lock
/// read operations to ensure that we return something sane.
fn transaction(&'a self) -> Result<Self::Transaction>;
/// Queries the database using Fingerprint, KeyID, or
/// email-address, returning the primary fingerprint.
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint>;
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String>;
fn by_kid(&self, kid: &KeyID) -> Option<String>;
fn by_email(&self, email: &Email) -> Option<String>;
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>>;
fn by_domain_and_hash_wkd(&self, domain: &str, hash: &str) -> Option<Vec<u8>>;
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
fn get_last_log_entry(&self) -> Result<Fingerprint>;
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()>;
fn check_link_fpr(
&self,
fpr: &Fingerprint,
target: &Fingerprint,
) -> Result<Option<Fingerprint>>;
fn check_consistency(&self) -> Result<()>;
/// Queries the database using Fingerprint, KeyID, or
@@ -197,10 +215,10 @@ pub trait Database: Sync + Send {
/// - abort if any problems come up!
/// 5. Move full and published temporary Cert to their location
/// 6. Update all symlinks
fn merge(&self, new_tpk: Cert) -> Result<ImportResult> {
fn merge(&'a self, new_tpk: Cert) -> Result<ImportResult> {
let fpr_primary = Fingerprint::try_from(new_tpk.primary_key().fingerprint())?;
let _lock = self.lock()?;
let tx = self.transaction()?;
let known_uids: Vec<UserID> = new_tpk
.userids()
@@ -311,21 +329,21 @@ pub trait Database: Sync + Send {
.collect::<Result<Vec<_>>>();
if fpr_checks.is_err() {
self.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
tx.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
}
let fpr_checks = fpr_checks?;
let fpr_not_linked = fpr_checks.into_iter().flatten();
let full_tpk_tmp = self.write_to_temp(&tpk_to_string(&full_tpk_new)?)?;
let full_tpk_tmp = tx.write_to_temp(&tpk_to_string(&full_tpk_new)?)?;
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
// these are very unlikely to fail. but if it happens,
// database consistency might be compromised!
self.move_tmp_to_full(full_tpk_tmp, &fpr_primary)?;
self.move_tmp_to_published(published_tpk_tmp, &fpr_primary)?;
self.regenerate_wkd(&fpr_primary, &published_tpk_clean)?;
tx.move_tmp_to_full(full_tpk_tmp, &fpr_primary)?;
tx.move_tmp_to_published(published_tpk_tmp, &fpr_primary)?;
self.regenerate_wkd(&tx, &fpr_primary, &published_tpk_clean)?;
let published_tpk_changed = published_tpk_old
.map(|tpk| tpk != published_tpk_clean)
@@ -335,13 +353,13 @@ pub trait Database: Sync + Send {
}
for fpr in fpr_not_linked {
if let Err(e) = self.link_fpr(&fpr, &fpr_primary) {
if let Err(e) = tx.link_fpr(&fpr, &fpr_primary) {
info!("Error ensuring symlink! {} {} {:?}", &fpr, &fpr_primary, e);
}
}
for revoked_email in newly_revoked_emails {
if let Err(e) = self.unlink_email(revoked_email, &fpr_primary) {
if let Err(e) = tx.unlink_email(revoked_email, &fpr_primary) {
info!(
"Error ensuring symlink! {} {} {:?}",
&fpr_primary, &revoked_email, e
@@ -349,6 +367,8 @@ pub trait Database: Sync + Send {
}
}
tx.commit()?;
if is_update {
Ok(ImportResult::Updated(TpkStatus {
is_revoked,
@@ -448,10 +468,10 @@ pub trait Database: Sync + Send {
/// - abort if any problems come up!
/// 5. Move full and published temporary Cert to their location
/// 6. Update all symlinks
fn set_email_published(&self, fpr_primary: &Fingerprint, email_new: &Email) -> Result<()> {
let _lock = self.lock()?;
fn set_email_published(&'a self, fpr_primary: &Fingerprint, email_new: &Email) -> Result<()> {
let tx = self.transaction()?;
self.nolock_unlink_email_if_other(fpr_primary, email_new)?;
self.unlink_email_if_other(&tx, fpr_primary, email_new)?;
let full_tpk = self
.by_fpr_full(fpr_primary)
@@ -494,25 +514,28 @@ pub trait Database: Sync + Send {
}
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
self.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
self.regenerate_wkd(fpr_primary, &published_tpk_clean)?;
tx.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
self.regenerate_wkd(&tx, fpr_primary, &published_tpk_clean)?;
self.update_write_log(fpr_primary);
if let Err(e) = self.link_email(email_new, fpr_primary) {
if let Err(e) = tx.link_email(email_new, fpr_primary) {
info!(
"Error ensuring email symlink! {} -> {} {:?}",
&email_new, &fpr_primary, e
);
}
tx.commit()?;
Ok(())
}
fn nolock_unlink_email_if_other(
fn unlink_email_if_other(
&self,
tx: &Self::Transaction,
fpr_primary: &Fingerprint,
unlink_email: &Email,
) -> Result<()> {
@@ -520,7 +543,7 @@ pub trait Database: Sync + Send {
self.lookup_primary_fingerprint(&Query::ByEmail(unlink_email.clone()));
if let Some(current_fpr) = current_link_fpr {
if current_fpr != *fpr_primary {
self.nolock_set_email_unpublished_filter(&current_fpr, |uid| {
self.set_email_unpublished_filter(&tx, &current_fpr, |uid| {
Email::try_from(uid)
.map(|email| email != *unlink_email)
.unwrap_or(false)
@@ -545,15 +568,7 @@ pub trait Database: Sync + Send {
/// 6. Update all symlinks
fn set_email_unpublished_filter(
&self,
fpr_primary: &Fingerprint,
email_remove: impl Fn(&UserID) -> bool,
) -> Result<()> {
let _lock = self.lock()?;
self.nolock_set_email_unpublished_filter(fpr_primary, email_remove)
}
fn nolock_set_email_unpublished_filter(
&self,
tx: &Self::Transaction,
fpr_primary: &Fingerprint,
email_remove: impl Fn(&UserID) -> bool,
) -> Result<()> {
@@ -581,15 +596,15 @@ pub trait Database: Sync + Send {
.filter(|email| !published_emails_new.contains(email));
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
self.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
self.regenerate_wkd(fpr_primary, &published_tpk_clean)?;
tx.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
self.regenerate_wkd(&tx, fpr_primary, &published_tpk_clean)?;
self.update_write_log(fpr_primary);
for unpublished_email in unpublished_emails {
if let Err(e) = self.unlink_email(unpublished_email, fpr_primary) {
if let Err(e) = tx.unlink_email(unpublished_email, fpr_primary) {
info!(
"Error deleting email symlink! {} -> {} {:?}",
&unpublished_email, &fpr_primary, e
@@ -600,19 +615,31 @@ pub trait Database: Sync + Send {
Ok(())
}
fn set_email_unpublished(&self, fpr_primary: &Fingerprint, email_remove: &Email) -> Result<()> {
self.set_email_unpublished_filter(fpr_primary, |uid| {
fn set_email_unpublished(
&'a self,
fpr_primary: &Fingerprint,
email_remove: &Email,
) -> Result<()> {
let tx = self.transaction().unwrap();
self.set_email_unpublished_filter(&tx, fpr_primary, |uid| {
Email::try_from(uid)
.map(|email| email != *email_remove)
.unwrap_or(false)
})
})?;
tx.commit()?;
Ok(())
}
fn set_email_unpublished_all(&self, fpr_primary: &Fingerprint) -> Result<()> {
self.set_email_unpublished_filter(fpr_primary, |_| false)
fn set_email_unpublished_all(&'a self, fpr_primary: &Fingerprint) -> Result<()> {
let tx = self.transaction().unwrap();
self.set_email_unpublished_filter(&tx, fpr_primary, |_| false)?;
tx.commit()?;
Ok(())
}
fn regenerate_links(&self, fpr_primary: &Fingerprint) -> Result<RegenerateResult> {
fn regenerate_links(&'a self, fpr_primary: &Fingerprint) -> Result<RegenerateResult> {
let tx = self.transaction().unwrap();
let tpk = self
.by_primary_fpr(fpr_primary)
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
@@ -624,7 +651,7 @@ pub trait Database: Sync + Send {
.flatten()
.collect();
self.regenerate_wkd(fpr_primary, &tpk)?;
self.regenerate_wkd(&tx, fpr_primary, &tpk)?;
let fingerprints = tpk_get_linkable_fprs(&tpk);
@@ -642,14 +669,16 @@ pub trait Database: Sync + Send {
for fpr in fpr_not_linked {
keys_linked += 1;
self.link_fpr(&fpr, fpr_primary)?;
tx.link_fpr(&fpr, fpr_primary)?;
}
for email in published_emails {
emails_linked += 1;
self.link_email(&email, fpr_primary)?;
tx.link_email(&email, fpr_primary)?;
}
tx.commit()?;
if keys_linked != 0 || emails_linked != 0 {
Ok(RegenerateResult::Updated)
} else {
@@ -657,13 +686,18 @@ pub trait Database: Sync + Send {
}
}
fn regenerate_wkd(&self, fpr_primary: &Fingerprint, published_tpk: &Cert) -> Result<()> {
fn regenerate_wkd(
&self,
tx: &Self::Transaction,
fpr_primary: &Fingerprint,
published_tpk: &Cert,
) -> Result<()> {
let published_wkd_tpk_tmp = if published_tpk.userids().next().is_some() {
Some(self.write_to_temp(&published_tpk.export_to_vec()?)?)
Some(tx.write_to_temp(&published_tpk.export_to_vec()?)?)
} else {
None
};
self.move_tmp_to_published_wkd(published_wkd_tpk_tmp, fpr_primary)?;
tx.move_tmp_to_published_wkd(published_wkd_tpk_tmp, fpr_primary)?;
Ok(())
}

View File

@@ -19,7 +19,7 @@ pub fn is_status_revoked(status: RevocationStatus) -> bool {
}
pub fn tpk_to_string(tpk: &Cert) -> Result<Vec<u8>> {
tpk.armored().export_to_vec()
tpk.armored().to_vec()
}
pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
@@ -85,7 +85,10 @@ pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
/// Filters the Cert, keeping only UserIDs that aren't revoked, and whose emails match the given list
pub fn tpk_filter_alive_emails(tpk: &Cert, emails: &[Email]) -> Cert {
tpk.clone().retain_userids(|uid| {
if is_status_revoked(uid.revocation_status(&POLICY, None)) {
let is_exportable = uid.self_signatures().any(|s| s.exportable().is_ok());
if !is_exportable {
false
} else if is_status_revoked(uid.revocation_status(&POLICY, None)) {
false
} else if let Ok(email) = Email::try_from(uid.userid()) {
emails.contains(&email)

734
database/src/sqlite.rs Normal file
View File

@@ -0,0 +1,734 @@
use self_cell::self_cell;
use std::convert::TryFrom;
use std::fs::create_dir_all;
use std::path::PathBuf;
use std::str::FromStr;
use std::time::{SystemTime, UNIX_EPOCH};
use openpgp::policy::StandardPolicy;
use types::{Email, Fingerprint, KeyID};
use Result;
use {Database, Query};
use openpgp::Cert;
use r2d2_sqlite::rusqlite::params;
use r2d2_sqlite::rusqlite::OptionalExtension;
use r2d2_sqlite::rusqlite::ToSql;
use r2d2_sqlite::rusqlite::Transaction;
use r2d2_sqlite::SqliteConnectionManager;
use crate::{wkd, DatabaseTransaction};
pub const POLICY: StandardPolicy = StandardPolicy::new();
pub struct Sqlite {
pool: r2d2::Pool<SqliteConnectionManager>,
}
impl Sqlite {
pub fn new_file(base_dir: impl Into<PathBuf>) -> Result<Self> {
let base_dir: PathBuf = base_dir.into();
let db_file = base_dir.join("keys.sqlite");
let manager = SqliteConnectionManager::file(db_file);
Self::new_internal(base_dir, manager)
}
#[cfg(test)]
fn build_pool(manager: SqliteConnectionManager) -> Result<r2d2::Pool<SqliteConnectionManager>> {
#[derive(Copy, Clone, Debug)]
pub struct LogConnectionCustomizer;
impl<E> r2d2::CustomizeConnection<rusqlite::Connection, E> for LogConnectionCustomizer {
fn on_acquire(&self, conn: &mut rusqlite::Connection) -> std::result::Result<(), E> {
println!("Acquiring sqlite pool connection: {:?}", conn);
conn.trace(Some(|query| {
println!("{}", query);
}));
std::result::Result::Ok(())
}
fn on_release(&self, conn: rusqlite::Connection) {
println!("Releasing pool connection: {:?}", conn);
}
}
Ok(r2d2::Pool::builder()
.connection_customizer(Box::new(LogConnectionCustomizer {}))
.build(manager)?)
}
#[cfg(not(test))]
fn build_pool(manager: SqliteConnectionManager) -> Result<r2d2::Pool<SqliteConnectionManager>> {
Ok(r2d2::Pool::builder().build(manager)?)
}
fn new_internal(base_dir: PathBuf, manager: SqliteConnectionManager) -> Result<Self> {
let keys_dir_log = base_dir.join("log");
create_dir_all(&keys_dir_log)?;
let pool = Self::build_pool(manager)?;
let conn = pool.get()?;
conn.pragma_update(None, "journal_mode", "wal")?;
conn.pragma_update(None, "synchronous", "normal")?;
conn.pragma_update(None, "user_version", "1")?;
conn.execute_batch(
"
CREATE TABLE IF NOT EXISTS certs (
primary_fingerprint TEXT NOT NULL PRIMARY KEY,
full TEXT NOT NULL,
published TEXT,
published_not_armored BLOB,
updated_at TIMESTAMP NOT NULL,
created_at TIMESTAMP NOT NULL
);
CREATE TABLE IF NOT EXISTS cert_identifiers (
fingerprint TEXT NOT NULL PRIMARY KEY,
keyid TEXT NOT NULL,
primary_fingerprint TEXT NOT NULL,
created_at TIMESTAMP NOT NULL
);
CREATE TABLE IF NOT EXISTS emails (
email TEXT NOT NULL PRIMARY KEY,
domain TEXT NOT NULL,
wkd_hash TEXT NOT NULL,
primary_fingerprint TEXT NOT NULL,
created_at TIMESTAMP NOT NULL
);
",
)?;
Ok(Self { pool })
}
}
self_cell! {
pub struct SqliteTransaction {
owner: r2d2::PooledConnection<SqliteConnectionManager>,
#[covariant]
dependent: Transaction,
}
}
impl SqliteTransaction {
fn start(pool: &r2d2::Pool<SqliteConnectionManager>) -> Result<Self> {
let conn = pool.get()?;
Ok(Self::new(conn, |c| {
Transaction::new_unchecked(c, rusqlite::TransactionBehavior::Deferred).unwrap()
}))
}
fn tx(&self) -> &Transaction {
self.borrow_dependent()
}
}
fn query_simple<T: rusqlite::types::FromSql>(
conn: &r2d2::PooledConnection<SqliteConnectionManager>,
query: &str,
params: &[&dyn ToSql],
) -> Option<T> {
conn.prepare_cached(query)
.expect("query must be valid")
.query_row(params, |row| row.get(0))
.optional()
.expect("query exection must not fail")
}
impl<'a> DatabaseTransaction<'a> for SqliteTransaction {
type TempCert = Vec<u8>;
fn commit(self) -> Result<()> {
// we can't use tx().commit(), but we can cheat :)
self.tx().execute_batch("COMMIT")?;
Ok(())
}
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert> {
Ok(content.to_vec())
}
fn move_tmp_to_full(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis() as u64;
let file = String::from_utf8(file)?;
self.tx().execute(
"
INSERT INTO certs (primary_fingerprint, full, created_at, updated_at)
VALUES (?1, ?2, ?3, ?3)
ON CONFLICT(primary_fingerprint) DO UPDATE SET full=excluded.full, updated_at = excluded.updated_at
",
params![fpr, file, now],
)?;
Ok(())
}
fn move_tmp_to_published(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis() as u64;
let file = String::from_utf8(file)?;
self.tx().execute(
"UPDATE certs SET published = ?2, updated_at = ?3 WHERE primary_fingerprint = ?1",
params![fpr, file, now],
)?;
Ok(())
}
fn move_tmp_to_published_wkd(
&self,
file: Option<Self::TempCert>,
fpr: &Fingerprint,
) -> Result<()> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis() as u64;
self.tx().execute(
"UPDATE certs SET published_not_armored = ?2, updated_at = ?3 WHERE primary_fingerprint = ?1",
params![fpr, file, now],
)?;
Ok(())
}
fn write_to_quarantine(&self, _fpr: &Fingerprint, _content: &[u8]) -> Result<()> {
Ok(())
}
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis() as u64;
let (domain, wkd_hash) = wkd::encode_wkd(email.as_str()).expect("email must be vaild");
self.tx().execute(
"
INSERT INTO emails (email, wkd_hash, domain, primary_fingerprint, created_at)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(email) DO UPDATE SET primary_fingerprint = excluded.primary_fingerprint
",
params![email, domain, wkd_hash, fpr, now],
)?;
Ok(())
}
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
self.tx()
.execute(
"DELETE FROM emails WHERE email = ?1 AND primary_fingerprint = ?2",
params![email, fpr],
)
.unwrap();
Ok(())
}
fn link_fpr(&self, from_fpr: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis() as u64;
self.tx().execute(
"
INSERT INTO cert_identifiers (fingerprint, keyid, primary_fingerprint, created_at)
VALUES (?1, ?2, ?3, ?4)
ON CONFLICT(fingerprint) DO UPDATE SET primary_fingerprint = excluded.primary_fingerprint;
",
params![
from_fpr,
KeyID::from(from_fpr),
primary_fpr,
now,
],
)?;
Ok(())
}
fn unlink_fpr(&self, from_fpr: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
self.tx().execute(
"DELETE FROM cert_identifiers WHERE primary_fingerprint = ?1 AND fingerprint = ?2 AND keyid = ?3",
params![primary_fpr, from_fpr, KeyID::from(from_fpr)],
)?;
Ok(())
}
}
impl<'a> Database<'a> for Sqlite {
type Transaction = SqliteTransaction;
fn transaction(&'a self) -> Result<Self::Transaction> {
SqliteTransaction::start(&self.pool)
}
fn write_log_append(&self, _filename: &str, _fpr_primary: &Fingerprint) -> Result<()> {
// this is done implicitly via created_at in sqlite, no need to do anything here
Ok(())
}
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
use super::Query::*;
let conn = self.pool.get().unwrap();
match term {
ByFingerprint(ref fp) => query_simple(
&conn,
"SELECT primary_fingerprint FROM cert_identifiers WHERE fingerprint = ?1",
params![fp],
),
ByKeyID(ref keyid) => query_simple(
&conn,
"SELECT primary_fingerprint FROM cert_identifiers WHERE keyid = ?1",
params![keyid],
),
ByEmail(ref email) => query_simple(
&conn,
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
params![email],
),
_ => return None,
}
}
// Lookup straight from certs table, no link resolution
fn by_fpr_full(&self, primary_fpr: &Fingerprint) -> Option<String> {
let conn = self.pool.get().unwrap();
query_simple(
&conn,
"SELECT full FROM certs WHERE primary_fingerprint = ?1",
params![primary_fpr],
)
}
// XXX: rename! to by_primary_fpr_published
// Lookup the published cert straight from certs table, no link resolution
fn by_primary_fpr(&self, primary_fpr: &Fingerprint) -> Option<String> {
let conn = self.pool.get().unwrap();
query_simple(
&conn,
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
params![primary_fpr],
)
}
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
let conn = self.pool.get().unwrap();
query_simple::<Fingerprint>(
&conn,
"SELECT primary_fingerprint FROM cert_identifiers WHERE fingerprint = ?1",
params![fpr],
)
.and_then(|primary_fpr| {
query_simple(
&conn,
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
params![&primary_fpr],
)
})
}
fn by_email(&self, email: &Email) -> Option<String> {
let conn = self.pool.get().unwrap();
query_simple::<Fingerprint>(
&conn,
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
params![email],
)
.and_then(|primary_fpr| {
query_simple(
&conn,
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
params![&primary_fpr],
)
})
}
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>> {
let conn = self.pool.get().unwrap();
query_simple::<Fingerprint>(
&conn,
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
params![email],
)
.and_then(|primary_fpr| {
query_simple(
&conn,
"SELECT published_not_armored FROM certs WHERE primary_fingerprint = ?1",
params![&primary_fpr],
)
})
}
fn by_kid(&self, kid: &KeyID) -> Option<String> {
let conn = self.pool.get().unwrap();
query_simple::<Fingerprint>(
&conn,
"SELECT primary_fingerprint FROM cert_identifiers WHERE keyid = ?1",
params![kid],
)
.and_then(|primary_fpr| {
query_simple(
&conn,
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
params![primary_fpr],
)
})
}
fn by_domain_and_hash_wkd(&self, domain: &str, wkd_hash: &str) -> Option<Vec<u8>> {
let conn = self.pool.get().unwrap();
query_simple::<Fingerprint>(
&conn,
"SELECT primary_fingerprint FROM emails WHERE domain = ?1 AND wkd_hash = ?2",
params![domain, wkd_hash],
)
.and_then(|primary_fpr| {
query_simple(
&conn,
"SELECT published_not_armored FROM certs WHERE primary_fingerprint = ?1",
params![primary_fpr],
)
})
}
fn check_link_fpr(
&self,
fpr: &Fingerprint,
_fpr_target: &Fingerprint,
) -> Result<Option<Fingerprint>> {
// a desync here cannot happen structurally, so always return true here
Ok(Some(fpr.clone()))
}
/// Checks the database for consistency.
///
/// Note that this operation may take a long time, and is
/// generally only useful for testing.
fn check_consistency(&self) -> Result<()> {
let conn = self.pool.get().unwrap();
let mut stmt = conn.prepare("SELECT primary_fingerprint, published FROM certs")?;
let mut rows = stmt.query([])?;
while let Some(row) = rows.next()? {
let primary_fpr: Fingerprint = row.get(0)?;
let published: String = row.get(1)?;
let cert = Cert::from_str(&published).unwrap();
let mut cert_emails: Vec<Email> = cert
.userids()
.map(|uid| uid.userid().email2().unwrap())
.flatten()
.map(|email| Email::from_str(&email))
.flatten()
.collect();
let mut db_emails: Vec<Email> = conn
.prepare("SELECT email FROM emails WHERE primary_fingerprint = ?1")?
.query_map([&primary_fpr], |row| row.get::<_, String>(0))
.unwrap()
.map(|email| Email::from_str(&email.unwrap()))
.flatten()
.collect();
cert_emails.sort();
cert_emails.dedup();
db_emails.sort();
if cert_emails != db_emails {
return Err(format_err!(
"{:?} does not have correct emails indexed, cert ${:?} db {:?}",
&primary_fpr,
cert_emails,
db_emails,
));
}
let policy = &POLICY;
let mut cert_fprs: Vec<Fingerprint> = cert
.keys()
.with_policy(policy, None)
.for_certification()
.for_signing()
.map(|amalgamation| amalgamation.key().fingerprint())
.map(Fingerprint::try_from)
.flatten()
.collect();
let mut db_fprs: Vec<Fingerprint> = conn
.prepare("SELECT fingerprint FROM cert_identifiers WHERE primary_fingerprint = ?1")?
.query_map([&primary_fpr], |row| row.get::<_, Fingerprint>(0))
.unwrap()
.flatten()
.collect();
cert_fprs.sort();
db_fprs.sort();
if cert_fprs != db_fprs {
return Err(format_err!(
"{:?} does not have correct fingerprints indexed, cert ${:?} db {:?}",
&primary_fpr,
cert_fprs,
db_fprs,
));
}
}
Ok(())
}
fn get_last_log_entry(&self) -> Result<Fingerprint> {
let conn = self.pool.get().unwrap();
Ok(conn.query_row(
"SELECT primary_fingerprint FROM certs ORDER BY updated_at DESC LIMIT 1",
[],
|row| row.get::<_, Fingerprint>(0),
)?)
}
}
#[cfg(test)]
mod tests {
use super::*;
use openpgp::cert::CertBuilder;
use tempfile::TempDir;
use test;
const DATA_1: &str = "data, content doesn't matter";
const DATA_2: &str = "other data, content doesn't matter";
const FINGERPRINT_1: &str = "D4AB192964F76A7F8F8A9B357BD18320DEADFA11";
fn open_db() -> (TempDir, Sqlite) {
let tmpdir = TempDir::new().unwrap();
let db = Sqlite::new_file(tmpdir.path()).unwrap();
(tmpdir, db)
}
#[test]
fn new() {
let (_tmp_dir, db) = open_db();
let k1 = CertBuilder::new()
.add_userid("a@invalid.example.org")
.generate()
.unwrap()
.0;
let k2 = CertBuilder::new()
.add_userid("b@invalid.example.org")
.generate()
.unwrap()
.0;
let k3 = CertBuilder::new()
.add_userid("c@invalid.example.org")
.generate()
.unwrap()
.0;
assert!(db.merge(k1).unwrap().into_tpk_status().email_status.len() > 0);
assert!(
db.merge(k2.clone())
.unwrap()
.into_tpk_status()
.email_status
.len()
> 0
);
assert!(!db.merge(k2).unwrap().into_tpk_status().email_status.len() > 0);
assert!(
db.merge(k3.clone())
.unwrap()
.into_tpk_status()
.email_status
.len()
> 0
);
assert!(
!db.merge(k3.clone())
.unwrap()
.into_tpk_status()
.email_status
.len()
> 0
);
assert!(!db.merge(k3).unwrap().into_tpk_status().email_status.len() > 0);
}
#[test]
fn xx_by_fpr_full() -> Result<()> {
let (_tmp_dir, db) = open_db();
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
let lock = db.transaction().unwrap();
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
lock.link_fpr(&fpr1, &fpr1)?;
lock.commit().unwrap();
assert_eq!(db.by_fpr_full(&fpr1).expect("must find key"), DATA_1);
Ok(())
}
#[test]
fn xx_by_kid() -> Result<()> {
let (_tmp_dir, db) = open_db();
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
let lock = db.transaction().unwrap();
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
lock.move_tmp_to_published(lock.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
lock.link_fpr(&fpr1, &fpr1)?;
lock.commit().unwrap();
assert_eq!(db.by_kid(&fpr1.into()).expect("must find key"), DATA_2);
Ok(())
}
#[test]
fn xx_by_primary_fpr() -> Result<()> {
let (_tmp_dir, db) = open_db();
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
let lock = db.transaction().unwrap();
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
lock.move_tmp_to_published(lock.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
lock.commit().unwrap();
assert_eq!(db.by_primary_fpr(&fpr1).expect("must find key"), DATA_2);
Ok(())
}
#[test]
fn uid_verification() {
let (_tmp_dir, mut db) = open_db();
test::test_uid_verification(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_deletion() {
let (_tmp_dir, mut db) = open_db();
test::test_uid_deletion(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn subkey_lookup() {
let (_tmp_dir, mut db) = open_db();
test::test_subkey_lookup(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn kid_lookup() {
let (_tmp_dir, mut db) = open_db();
test::test_kid_lookup(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn upload_revoked_tpk() {
let (_tmp_dir, mut db) = open_db();
test::test_upload_revoked_tpk(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_revocation() {
let (_tmp_dir, mut db) = open_db();
test::test_uid_revocation(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn regenerate() {
let (_tmp_dir, mut db) = open_db();
test::test_regenerate(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn key_reupload() {
let (_tmp_dir, mut db) = open_db();
test::test_reupload(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_replacement() {
let (_tmp_dir, mut db) = open_db();
test::test_uid_replacement(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn uid_unlinking() {
let (_tmp_dir, mut db) = open_db();
test::test_unlink_uid(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_1() {
let (_tmp_dir, mut db) = open_db();
test::test_same_email_1(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_2() {
let (_tmp_dir, mut db) = open_db();
test::test_same_email_2(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_3() {
let (_tmp_dir, mut db) = open_db();
test::test_same_email_3(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn same_email_4() {
let (_tmp_dir, mut db) = open_db();
test::test_same_email_4(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn no_selfsig() {
let (_tmp_dir, mut db) = open_db();
test::test_no_selfsig(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn bad_uids() {
let (_tmp_dir, mut db) = open_db();
test::test_bad_uids(&mut db);
db.check_consistency().expect("inconsistent database");
}
#[test]
fn reverse_fingerprint_to_path() {
let tmpdir = TempDir::new().unwrap();
let db = Sqlite::new_file(tmpdir.path()).unwrap();
let _fp: Fingerprint = "CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
// XXX: fixme
//assert_eq!(Sqlite::path_to_fingerprint(&db.link_by_fingerprint(&fp)),
// Some(fp.clone()));
db.check_consistency().expect("inconsistent database");
}
#[test]
fn attested_key_signatures() -> Result<()> {
let (_tmp_dir, mut db) = open_db();
test::attested_key_signatures(&mut db)?;
db.check_consistency()?;
Ok(())
}
#[test]
fn nonexportable_sigs() -> Result<()> {
let (_tmp_dir, mut db) = open_db();
test::nonexportable_sigs(&mut db)?;
db.check_consistency()?;
Ok(())
}
}

View File

@@ -26,8 +26,6 @@ use openpgp::{
types::RevocationStatus,
Cert, Packet,
};
use std::fs;
use std::path::Path;
use types::{Email, Fingerprint, KeyID};
use Database;
use Query;
@@ -37,17 +35,19 @@ use openpgp_utils::POLICY;
use EmailAddressStatus;
use TpkStatus;
fn check_mail_none(db: &impl Database, email: &Email) {
use crate::DatabaseTransaction;
fn check_mail_none<'a>(db: &impl Database<'a>, email: &Email) {
assert!(db.by_email(email).is_none());
assert!(db.by_email_wkd(email).is_none());
}
fn check_mail_some(db: &impl Database, email: &Email) {
fn check_mail_some<'a>(db: &impl Database<'a>, email: &Email) {
assert!(db.by_email(email).is_some());
assert!(db.by_email_wkd(email).is_some());
}
pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
pub fn test_uid_verification<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "Test A <test_a@example.com>";
let str_uid2 = "Test B <test_b@example.com>";
let tpk = CertBuilder::new()
@@ -64,7 +64,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
// upload key
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
@@ -169,7 +169,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
}
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -275,7 +275,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
}*/
}
pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
pub fn test_regenerate<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "Test A <test_a@example.com>";
let tpk = CertBuilder::new()
.add_userid(str_uid1)
@@ -303,7 +303,7 @@ pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
// upload key
db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
db.regenerate_links(&fpr).unwrap();
check_mail_none(db, &email1);
@@ -313,23 +313,35 @@ pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
db.set_email_published(&fpr, &email1).unwrap();
db.unlink_email(&email1, &fpr).unwrap();
{
let lock = db.transaction().unwrap();
lock.unlink_email(&email1, &fpr).unwrap();
lock.commit().unwrap();
}
assert!(db.check_consistency().is_err());
db.regenerate_links(&fpr).unwrap();
assert!(db.check_consistency().is_ok());
db.check_consistency().expect("consistency must return Ok");
db.unlink_fpr(&fpr, &fpr).unwrap();
{
let lock = db.transaction().unwrap();
lock.unlink_fpr(&fpr, &fpr).unwrap();
lock.commit().unwrap();
}
assert!(db.check_consistency().is_err());
db.regenerate_links(&fpr).unwrap();
assert!(db.check_consistency().is_ok());
db.check_consistency().expect("consistency must return Ok");
db.unlink_fpr(&fpr_sign, &fpr).unwrap();
{
let lock = db.transaction().unwrap();
lock.unlink_fpr(&fpr_sign, &fpr).unwrap();
lock.commit().unwrap();
}
assert!(db.check_consistency().is_err());
db.regenerate_links(&fpr).unwrap();
assert!(db.check_consistency().is_ok());
db.check_consistency().expect("consistency must return Ok");
}
pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
pub fn test_reupload<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "Test A <test_a@example.com>";
let str_uid2 = "Test B <test_b@example.com>";
let tpk = CertBuilder::new()
@@ -344,7 +356,7 @@ pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
// upload key
db.merge(tpk.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
// verify 1st uid
db.set_email_published(&fpr, &email1).unwrap();
@@ -367,7 +379,7 @@ pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
assert!(db.by_email(&email2).is_none() ^ db.by_email(&email1).is_none());
}
pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
pub fn test_uid_replacement<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "Test A <test_a@example.com>";
let tpk1 = CertBuilder::new()
.add_userid(str_uid1)
@@ -390,9 +402,9 @@ pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
// upload both keys
db.merge(tpk1).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr1);
check_log_entry(db, &fpr1);
db.merge(tpk2).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr2);
check_log_entry(db, &fpr2);
// verify 1st uid
db.set_email_published(&fpr1, &email1).unwrap();
@@ -445,7 +457,7 @@ pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
);
}
pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
pub fn test_uid_deletion<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "Test A <test_a@example.com>";
let str_uid2 = "Test B <test_b@example.com>";
let tpk = CertBuilder::new()
@@ -463,7 +475,7 @@ pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
// upload key and verify uids
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -509,7 +521,7 @@ pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
assert_eq!(tpk.keys().subkeys().count(), n_subkeys);
}
pub fn test_subkey_lookup(db: &mut impl Database, _log_path: &Path) {
pub fn test_subkey_lookup<'a>(db: &'a mut impl Database<'a>) {
let tpk = CertBuilder::new()
.add_userid("Testy <test@example.com>")
.add_signing_subkey()
@@ -549,7 +561,7 @@ pub fn test_subkey_lookup(db: &mut impl Database, _log_path: &Path) {
assert_eq!(raw1, raw2);
}
pub fn test_kid_lookup(db: &mut impl Database, _log_path: &Path) {
pub fn test_kid_lookup<'a>(db: &'a mut impl Database<'a>) {
let tpk = CertBuilder::new()
.add_userid("Testy <test@example.com>")
.add_signing_subkey()
@@ -588,7 +600,7 @@ pub fn test_kid_lookup(db: &mut impl Database, _log_path: &Path) {
assert_eq!(raw1, raw2);
}
pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
pub fn test_upload_revoked_tpk<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "Test A <test_a@example.com>";
let str_uid2 = "Test B <test_b@example.com>";
let (mut tpk, revocation) = CertBuilder::new()
@@ -616,7 +628,7 @@ pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
// upload key
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: true,
@@ -633,7 +645,7 @@ pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
check_mail_none(db, &email2);
}
pub fn test_uid_revocation(db: &mut impl Database, log_path: &Path) {
pub fn test_uid_revocation<'a>(db: &'a mut impl Database<'a>) {
use std::{thread, time};
let str_uid1 = "Test A <test_a@example.com>";
@@ -651,7 +663,7 @@ pub fn test_uid_revocation(db: &mut impl Database, log_path: &Path) {
// upload key
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -804,7 +816,7 @@ pub fn test_uid_revocation_fake(db: &mut D) {
}
*/
pub fn test_unlink_uid(db: &mut impl Database, log_path: &Path) {
pub fn test_unlink_uid<'a>(db: &'a mut impl Database<'a>) {
let uid = "Test A <test_a@example.com>";
let email = Email::from_str(uid).unwrap();
@@ -853,7 +865,7 @@ pub fn test_unlink_uid(db: &mut impl Database, log_path: &Path) {
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
let tpk_evil = tpk_evil.insert_packets(sig).unwrap();
let tpk_status = db.merge(tpk_evil).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr_evil);
check_log_entry(db, &fpr_evil);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -883,7 +895,7 @@ pub fn get_userids(armored: &str) -> Vec<UserID> {
// If multiple keys have the same email address, make sure things work
// as expected.
pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
pub fn test_same_email_1<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "A <test@example.com>";
let tpk1 = CertBuilder::new()
.add_userid(str_uid1)
@@ -906,7 +918,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
// upload keys.
let tpk_status1 = db.merge(tpk1).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr1);
check_log_entry(db, &fpr1);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -916,7 +928,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
tpk_status1
);
let tpk_status2 = db.merge(tpk2.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr2);
check_log_entry(db, &fpr2);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -984,7 +996,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
let tpk2 = tpk2.insert_packets(sig).unwrap();
let tpk_status2 = db.merge(tpk2).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr2);
check_log_entry(db, &fpr2);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -1003,7 +1015,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
// sure things still work. We do this twice (see above), to
// make sure the order isn't relevant when revoking one user id
// but leaving the other.
pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
pub fn test_same_email_2<'a>(db: &'a mut impl Database<'a>) {
use std::{thread, time};
let str_uid1 = "A <test@example.com>";
@@ -1021,7 +1033,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
// upload key
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
// verify uid1
assert_eq!(
@@ -1074,7 +1086,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
let tpk = tpk.insert_packets(sig).unwrap();
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -1092,7 +1104,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
// sure things still work. We do this twice (see above), to
// make sure the order isn't relevant when revoking one user id
// but leaving the other.
pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
pub fn test_same_email_3<'a>(db: &'a mut impl Database<'a>) {
use std::{thread, time};
let str_uid1 = "A <test@example.com>";
@@ -1110,7 +1122,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
// upload key
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
// verify uid1
assert_eq!(
@@ -1163,7 +1175,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
let tpk = tpk.insert_packets(sig).unwrap();
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -1186,7 +1198,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
// If a key has a verified email address, make sure newly uploaded user
// ids with the same email are published as well.
pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
pub fn test_same_email_4<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "A <test@example.com>";
let str_uid2 = "B <test@example.com>";
let tpk = CertBuilder::new()
@@ -1205,7 +1217,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
// upload key
let tpk_status = db.merge(cert_uid_1).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
db.set_email_published(&fpr, &tpk_status.email_status[0].0)
.unwrap();
assert_eq!(
@@ -1214,7 +1226,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
);
let tpk_status = db.merge(cert_uid_2).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -1231,7 +1243,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
);
}
pub fn test_bad_uids(db: &mut impl Database, log_path: &Path) {
pub fn test_bad_uids<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "foo@bar.example <foo@bar.example>";
let str_uid2 = "A <test@example.com>";
let str_uid3 = "lalalalaaaaa";
@@ -1247,7 +1259,7 @@ pub fn test_bad_uids(db: &mut impl Database, log_path: &Path) {
let email2 = Email::from_str(str_uid2).unwrap();
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -1293,7 +1305,7 @@ fn cert_without_signature_at(cert: Cert, mut index: i32) -> Cert {
Cert::from_packets(packets).unwrap()
}
pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
pub fn test_unsigned_uids<'a>(db: &'a mut impl Database<'a>) {
let str_uid1 = "test1@example.com";
let str_uid2 = "test2@example.com";
let tpk = CertBuilder::new()
@@ -1308,7 +1320,7 @@ pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
let tpk = cert_without_signature_at(tpk, 1);
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: false,
@@ -1319,7 +1331,7 @@ pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
);
}
pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
pub fn test_no_selfsig<'a>(db: &'a mut impl Database<'a>) {
let (mut tpk, revocation) = CertBuilder::new().generate().unwrap();
let fpr = Fingerprint::try_from(tpk.fingerprint()).unwrap();
@@ -1329,7 +1341,7 @@ pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
// with revocation, it's ok
tpk = tpk.insert_packets(revocation).unwrap();
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
check_log_entry(log_path, &fpr);
check_log_entry(db, &fpr);
assert_eq!(
TpkStatus {
is_revoked: true,
@@ -1341,7 +1353,7 @@ pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
}
/// Makes sure that attested key signatures are correctly handled.
pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Result<()> {
pub fn attested_key_signatures<'a>(db: &'a mut impl Database<'a>) -> Result<()> {
use openpgp::types::*;
use std::time::{Duration, SystemTime};
let t0 = SystemTime::now() - Duration::new(5 * 60, 0);
@@ -1390,7 +1402,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
// Now for the test. First, import Bob's cert as is.
db.merge(bob.clone())?;
check_log_entry(log_path, &bobs_fp);
check_log_entry(db, &bobs_fp);
// Confirm the email so that we can inspect the userid component.
db.set_email_published(&bobs_fp, &Email::from_str("bob@bar.com")?)?;
@@ -1399,7 +1411,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
// certification is stripped.
let bob = bob.insert_packets(vec![alice_certifies_bob.clone()])?;
db.merge(bob.clone())?;
check_log_entry(log_path, &bobs_fp);
check_log_entry(db, &bobs_fp);
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
assert_eq!(bob_.bad_signatures().count(), 0);
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
@@ -1408,7 +1420,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
// certification is now included.
let bob_attested = bob.clone().insert_packets(vec![attestation])?;
db.merge(bob_attested.clone())?;
check_log_entry(log_path, &bobs_fp);
check_log_entry(db, &bobs_fp);
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
assert_eq!(bob_.bad_signatures().count(), 0);
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
@@ -1434,7 +1446,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
// Make a random merge with Bob's unattested cert, demonstrating
// that the attestation still works.
db.merge(bob.clone())?;
check_log_entry(log_path, &bobs_fp);
check_log_entry(db, &bobs_fp);
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
assert_eq!(bob_.bad_signatures().count(), 0);
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
@@ -1471,7 +1483,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
);
db.merge(bob)?;
check_log_entry(log_path, &bobs_fp);
check_log_entry(db, &bobs_fp);
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
assert_eq!(bob_.bad_signatures().count(), 0);
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
@@ -1497,10 +1509,9 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
Ok(())
}
fn check_log_entry(log_path: &Path, fpr: &Fingerprint) {
let log_data = fs::read_to_string(log_path).unwrap();
let last_entry = log_data.lines().last().unwrap().split(' ').last().unwrap();
assert_eq!(last_entry, fpr.to_string());
fn check_log_entry<'a>(db: &impl Database<'a>, fpr: &Fingerprint) {
let last_entry = db.get_last_log_entry().expect("must have log entry");
assert_eq!(last_entry.to_string(), fpr.to_string());
}
fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
@@ -1514,7 +1525,7 @@ fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
Cert::from_packets(packets).unwrap()
}
pub fn nonexportable_sigs(db: &mut impl Database, _log_path: &Path) -> Result<()> {
pub fn nonexportable_sigs<'a>(db: &'a mut impl Database<'a>) -> Result<()> {
let str_uid1 = "Test A <test_a@example.org>";
let str_uid2 = "Test B <test_b@example.org>";

View File

@@ -4,7 +4,13 @@ use std::result;
use std::str::FromStr;
use anyhow::Error;
use hex::ToHex;
use openpgp::packet::UserID;
use r2d2_sqlite::rusqlite::types::FromSql;
use r2d2_sqlite::rusqlite::types::FromSqlError;
use r2d2_sqlite::rusqlite::types::FromSqlResult;
use r2d2_sqlite::rusqlite::types::ToSql;
use r2d2_sqlite::rusqlite::types::ValueRef;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use Result;
@@ -26,6 +32,22 @@ impl Email {
}
}
impl FromSql for Email {
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
value
.as_str()
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
}
}
impl ToSql for Email {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
Ok(rusqlite::types::ToSqlOutput::Borrowed(
rusqlite::types::ValueRef::Text(self.0.as_bytes()),
))
}
}
impl TryFrom<&UserID> for Email {
type Error = Error;
@@ -77,9 +99,25 @@ impl FromStr for Email {
}
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Fingerprint([u8; 20]);
impl FromSql for Fingerprint {
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
value
.as_str()
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
}
}
impl ToSql for Fingerprint {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
Ok(rusqlite::types::ToSqlOutput::Owned(
rusqlite::types::Value::Text(self.to_string()),
))
}
}
impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
type Error = Error;
@@ -94,7 +132,6 @@ impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
impl fmt::Display for Fingerprint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use hex::ToHex;
self.0.write_hex_upper(f)
}
}
@@ -137,6 +174,22 @@ impl FromStr for Fingerprint {
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
pub struct KeyID([u8; 8]);
impl FromSql for KeyID {
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
value
.as_str()
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
}
}
impl ToSql for KeyID {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
Ok(rusqlite::types::ToSqlOutput::Owned(
rusqlite::types::Value::Text(self.to_string()),
))
}
}
impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
type Error = Error;
@@ -169,7 +222,6 @@ impl From<Fingerprint> for KeyID {
impl fmt::Display for KeyID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use hex::ToHex;
self.0.write_hex_upper(f)
}
}

36
default.nix Normal file
View File

@@ -0,0 +1,36 @@
{ lib, rustPlatform, sqlite, openssl, gettext, pkg-config }:
rustPlatform.buildRustPackage rec {
pname = "hagrid";
version = "1.0.0";
src = ./.;
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"rocket_i18n-0.5.0" = "sha256-EbUE8Z3TQBnDnptl9qWK6JvsACCgP7EXTxcA7pouYbc=";
};
};
postInstall = ''
cp -r dist $out
'';
nativeBuildInputs = [
pkg-config
gettext
];
buildInputs = [
sqlite
openssl
];
meta = with lib; {
description = "A verifying keyserver";
homepage = "https://gitlab.com/keys.openpgp.org/hagrid";
license = with licenses; [ gpl3 ];
maintainers = with maintainers; [ valodim ];
platforms = platforms.all;
};
}

View File

@@ -1,10 +1,10 @@
FROM rustlang/rust:nightly
FROM rust:bullseye
RUN apt update -qy
RUN apt install -qy libclang-dev build-essential pkg-config clang libssl-dev gettext zsh
RUN apt install -qy libclang-dev build-essential pkg-config clang libssl-dev libsqlite3-dev gettext zsh
RUN useradd -u 1000 -d /home/user user && mkdir /home/user && chown user:user /home/user
USER user
RUN rustup install 1.70.0
RUN rustup install 1.82.0
WORKDIR /home/user/src

61
flake.lock generated Normal file
View File

@@ -0,0 +1,61 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1739357830,
"narHash": "sha256-9xim3nJJUFbVbJCz48UP4fGRStVW5nv4VdbimbKxJ3I=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "0ff09db9d034a04acd4e8908820ba0b410d7a33a",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-24.11",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"utils": "utils"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

16
flake.nix Normal file
View File

@@ -0,0 +1,16 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, utils }:
utils.lib.eachDefaultSystem (system: let
pkgs = nixpkgs.legacyPackages."${system}";
in rec {
packages.hagrid = pkgs.callPackage ./. { };
packages.default = packages.hagrid;
}) // {
overlays.hagrid = (final: prev: { hagrid = self.packages."${final.system}".hagrid; });
overlays.default = self.overlays.hagrid;
};
}

View File

@@ -36,12 +36,12 @@ location /vks {
limit_req zone=search_fpr_keyid burst=1000 nodelay;
error_page 404 /errors-static/404-by-fpr.htm;
default_type application/pgp-keys;
add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
# default_type application/pgp-keys;
# add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Cache-Control' 'no-cache' always;
etag off;
try_files /keys/links/by-fpr/$1/$2/$3 =404;
proxy_pass http://127.0.0.1:8080;
}
location ~ ^/vks/v1/by-keyid/(?:0x)?([^/][^/])([^/][^/])(.*)$ {
@@ -49,12 +49,12 @@ location /vks {
error_page 429 /errors-static/429-rate-limit-vks-fpr.htm;
error_page 404 /errors-static/404-by-keyid.htm;
default_type application/pgp-keys;
add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
# default_type application/pgp-keys;
# add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Cache-Control' 'no-cache' always;
etag off;
try_files /keys/links/by-keyid/$1/$2/$3 =404;
proxy_pass http://127.0.0.1:8080;
}
location /vks/v1/by-email/ {
@@ -110,12 +110,12 @@ location /.well-known/openpgpkey {
error_page 429 /errors-static/429-rate-limit-vks-email.htm;
error_page 404 /errors-static/404-wkd.htm;
default_type application/octet-stream;
add_header Content-Disposition 'attachment; filename="$2$3$4.asc"';
# default_type application/octet-stream;
# add_header Content-Disposition 'attachment; filename="$2$3$4.asc"';
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Cache-Control' 'no-cache' always;
etag off;
try_files /keys/links/wkd/$1/$2/$3/$4 =404;
proxy_pass http://127.0.0.1:8080;
}
location ~ "^/.well-known/openpgpkey/([^/]+)/policy$" {

View File

@@ -6,7 +6,7 @@ authors = ["Vincent Breitmoser <look@my.amazin.horse>"]
[dependencies]
hagrid-database = { path = "../database" }
anyhow = "1"
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
sequoia-openpgp = { version = "1.17.0", default-features = false, features = ["crypto-openssl"] }
multipart = "0"
log = "0"
rand = "0.6"

View File

@@ -1,4 +1,5 @@
use std::cmp::min;
use std::convert::TryInto;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
@@ -14,7 +15,7 @@ use openpgp::parse::{PacketParser, PacketParserResult, Parse};
use openpgp::Packet;
extern crate hagrid_database as database;
use database::{Database, ImportResult, KeyDatabase};
use database::{Database, EmailAddressStatus, ImportResult, KeyDatabase};
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
@@ -26,7 +27,7 @@ use HagridConfig;
const NUM_THREADS_MAX: usize = 3;
#[allow(clippy::needless_collect)]
pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>) -> Result<()> {
pub fn do_import(config: &HagridConfig, input_files: Vec<PathBuf>) -> Result<()> {
let num_threads = min(NUM_THREADS_MAX, input_files.len());
let input_file_chunks = setup_chunks(input_files, num_threads);
@@ -39,7 +40,7 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
let config = config.clone();
let multi_progress = multi_progress.clone();
thread::spawn(move || {
import_from_files(&config, dry_run, input_file_chunk, multi_progress).unwrap();
import_from_files(&config, input_file_chunk, multi_progress).unwrap();
})
})
.collect();
@@ -116,16 +117,10 @@ impl<'a> ImportStats<'a> {
fn import_from_files(
config: &HagridConfig,
dry_run: bool,
input_files: Vec<PathBuf>,
multi_progress: Arc<MultiProgress>,
) -> Result<()> {
let db = KeyDatabase::new_internal(
config.keys_internal_dir.as_ref().unwrap(),
config.keys_external_dir.as_ref().unwrap(),
config.tmp_dir.as_ref().unwrap(),
dry_run,
)?;
let db = KeyDatabase::new_file(config.keys_internal_dir.as_ref().unwrap())?;
for input_file in input_files {
import_from_file(&db, &input_file, &multi_progress)?;
@@ -152,15 +147,33 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
read_file_to_tpks(input_reader, &mut |acc| {
let primary_key = acc[0].clone();
let key_fpr = match primary_key {
Packet::PublicKey(key) => key.fingerprint(),
Packet::SecretKey(key) => key.fingerprint(),
_ => return (),
};
let result = import_key(db, acc);
if let Ok(ref result) = result {
let tpk_status = result.as_tpk_status();
if !tpk_status.is_revoked {
for (email, status) in &tpk_status.email_status {
if status == &EmailAddressStatus::NotPublished {
db.set_email_published(&key_fpr.clone().try_into().unwrap(), &email)
.unwrap();
}
}
}
}
if let Err(ref e) = result {
let key_fpr = match primary_key {
Packet::PublicKey(key) => key.fingerprint().to_hex(),
Packet::SecretKey(key) => key.fingerprint().to_hex(),
_ => "Unknown".to_owned(),
};
let error = format!("{}:{:05}:{}: {}", filename, stats.count_total, key_fpr, e);
let error = format!(
"{}:{:05}:{}: {}",
filename,
stats.count_total,
key_fpr.to_hex(),
e
);
progress_bar.println(error);
return ();
}
stats.update(result);
})?;
@@ -198,46 +211,3 @@ fn read_file_to_tpks(
fn import_key(db: &KeyDatabase, packets: Vec<Packet>) -> Result<ImportResult> {
openpgp::Cert::from_packets(packets.into_iter()).and_then(|tpk| db.merge(tpk))
}
/*
#[cfg(test)]
mod import_tests {
use std::fs::File;
use tempfile::tempdir;
use openpgp::serialize::Serialize;
use super::*;
#[test]
fn import() {
let root = tempdir().unwrap();
let db = KeyDatabase::new_from_base(root.path().to_path_buf()).unwrap();
// Generate a key and import it.
let (tpk, _) = openpgp::tpk::TPKBuilder::autocrypt(
None, Some("foo@invalid.example.com".into()))
.generate().unwrap();
let import_me = root.path().join("import-me");
tpk.serialize(&mut File::create(&import_me).unwrap()).unwrap();
do_import(root.path().to_path_buf(), vec![import_me]).unwrap();
let check = |query: &str| {
let tpk_ = db.lookup(&query.parse().unwrap()).unwrap().unwrap();
assert_eq!(tpk.fingerprint(), tpk_.fingerprint());
assert_eq!(tpk.subkeys().map(|skb| skb.subkey().fingerprint())
.collect::<Vec<_>>(),
tpk_.subkeys().map(|skb| skb.subkey().fingerprint())
.collect::<Vec<_>>());
assert_eq!(tpk_.userids().count(), 0);
};
check(&format!("{}", tpk.primary().fingerprint()));
check(&format!("{}", tpk.primary().fingerprint().to_keyid()));
check(&format!("{}", tpk.subkeys().nth(0).unwrap().subkey()
.fingerprint()));
check(&format!("{}", tpk.subkeys().nth(0).unwrap().subkey()
.fingerprint().to_keyid()));
}
}
*/

View File

@@ -18,7 +18,6 @@ use anyhow::Result;
use clap::{App, Arg, SubCommand};
mod import;
mod regenerate;
#[derive(Deserialize)]
pub struct HagridConfigs {
@@ -34,10 +33,10 @@ pub struct HagridConfigs {
pub struct HagridConfig {
_template_dir: Option<PathBuf>,
keys_internal_dir: Option<PathBuf>,
keys_external_dir: Option<PathBuf>,
_keys_external_dir: Option<PathBuf>,
_assets_dir: Option<PathBuf>,
_token_dir: Option<PathBuf>,
tmp_dir: Option<PathBuf>,
_tmp_dir: Option<PathBuf>,
_maintenance_file: Option<PathBuf>,
}
@@ -62,16 +61,9 @@ fn main() -> Result<()> {
.default_value("prod")
.possible_values(&["dev", "stage", "prod"]),
)
.subcommand(SubCommand::with_name("regenerate").about("Regenerate symlink directory"))
.subcommand(
SubCommand::with_name("import")
.about("Import keys into Hagrid")
.arg(
Arg::with_name("dry run")
.short("n")
.long("dry-run")
.help("don't actually keep imported keys"),
)
.arg(
Arg::with_name("keyring files")
.required(true)
@@ -91,16 +83,13 @@ fn main() -> Result<()> {
};
if let Some(matches) = matches.subcommand_matches("import") {
let dry_run = matches.occurrences_of("dry run") > 0;
let keyrings: Vec<PathBuf> = matches
.values_of_lossy("keyring files")
.unwrap()
.iter()
.map(|arg| PathBuf::from_str(arg).unwrap())
.collect();
import::do_import(&config, dry_run, keyrings)?;
} else if let Some(_matches) = matches.subcommand_matches("regenerate") {
regenerate::do_regenerate(&config)?;
import::do_import(&config, keyrings)?;
} else {
println!("{}", matches.usage());
}

View File

@@ -1,133 +0,0 @@
use anyhow::Result;
use std::path::Path;
use std::time::Instant;
use indicatif::{ProgressBar, ProgressStyle};
use walkdir::WalkDir;
use database::types::Fingerprint;
use database::{Database, KeyDatabase, RegenerateResult};
use HagridConfig;
struct RegenerateStats<'a> {
progress: &'a ProgressBar,
prefix: String,
count_total: u64,
count_err: u64,
count_updated: u64,
count_unchanged: u64,
count_partial: u64,
start_time_partial: Instant,
kps_partial: u64,
}
impl<'a> RegenerateStats<'a> {
fn new(progress: &'a ProgressBar) -> Self {
Self {
progress,
prefix: "".to_owned(),
count_total: 0,
count_err: 0,
count_updated: 0,
count_unchanged: 0,
count_partial: 0,
start_time_partial: Instant::now(),
kps_partial: 0,
}
}
fn update(&mut self, result: Result<RegenerateResult>, fpr: Fingerprint) {
// If a new TPK starts, parse and import.
self.count_total += 1;
self.count_partial += 1;
if (self.count_total % 10) == 0 {
self.prefix = fpr.to_string()[0..4].to_owned();
}
match result {
Err(e) => {
self.progress.println(format!("{}: {}", fpr, e));
self.count_err += 1;
}
Ok(RegenerateResult::Updated) => self.count_updated += 1,
Ok(RegenerateResult::Unchanged) => self.count_unchanged += 1,
}
self.progress_update();
}
fn progress_update(&mut self) {
if (self.count_total % 10) != 0 {
return;
}
if self.count_partial >= 1000 {
let runtime = (self.start_time_partial.elapsed().as_millis() + 1) as u64;
self.kps_partial = (self.count_partial * 1000) / runtime;
self.start_time_partial = Instant::now();
self.count_partial = 0;
}
self.progress.set_message(&format!(
"prefix {} regenerated {:5} keys, {:5} Updated {:5} Unchanged {:5} Errors ({:3} keys/s)",
self.prefix, self.count_total, self.count_updated, self.count_unchanged, self.count_err, self.kps_partial));
}
}
pub fn do_regenerate(config: &HagridConfig) -> Result<()> {
let db = KeyDatabase::new_internal(
config.keys_internal_dir.as_ref().unwrap(),
config.keys_external_dir.as_ref().unwrap(),
config.tmp_dir.as_ref().unwrap(),
false,
)?;
let published_dir = config
.keys_external_dir
.as_ref()
.unwrap()
.join("links")
.join("by-email");
let dirs: Vec<_> = WalkDir::new(published_dir)
.min_depth(1)
.max_depth(1)
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.flatten()
.map(|entry| entry.into_path())
.collect();
let progress_bar = ProgressBar::new(dirs.len() as u64);
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
.progress_chars("##-"),
);
let mut stats = RegenerateStats::new(&progress_bar);
for dir in dirs {
progress_bar.inc(1);
regenerate_dir_recursively(&db, &mut stats, &dir)?;
}
progress_bar.finish();
Ok(())
}
fn regenerate_dir_recursively(
db: &KeyDatabase,
stats: &mut RegenerateStats,
dir: &Path,
) -> Result<()> {
for path in WalkDir::new(dir)
.follow_links(true)
.into_iter()
.flatten()
.filter(|e| e.file_type().is_file())
.map(|entry| entry.into_path())
{
let fpr = KeyDatabase::path_to_primary(&path).unwrap();
let result = db.regenerate_links(&fpr);
stats.update(result, fpr);
}
Ok(())
}

View File

@@ -1 +1 @@
1.70.0
1.82.0

View File

@@ -17,6 +17,7 @@ pkgs.mkShell {
];
buildInputs = with pkgs; [
sqlite
openssl
clang

View File

@@ -50,7 +50,7 @@ fn main() {
fn real_main() -> Result<()> {
let opt = Opt::from_args();
let db = KeyDatabase::new_from_base(opt.base.canonicalize()?)?;
let db = KeyDatabase::new_file(opt.base.canonicalize()?)?;
delete(&db, &opt.query.parse()?, opt.all_bindings, opt.all)
}

View File

@@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
use crate::counters;
use lettre::message::{header, Mailbox, MultiPart, SinglePart};
use lettre::{FileTransport, SendmailTransport, Transport as LettreTransport};
use lettre::{FileTransport, SendmailTransport, SmtpTransport, Transport as LettreTransport};
use rocket_dyn_templates::handlebars::Handlebars;
use serde::Serialize;
use uuid::Uuid;
@@ -53,6 +53,7 @@ pub struct Service {
}
enum Transport {
LocalSmtp,
Sendmail,
Filemail(PathBuf),
}
@@ -63,6 +64,11 @@ impl Service {
Self::new(from, base_uri, template_dir, Transport::Sendmail)
}
/// Sends mail via local smtp server.
pub fn localsmtp(from: &str, base_uri: &str, template_dir: &Path) -> Result<Self> {
Self::new(from, base_uri, template_dir, Transport::LocalSmtp)
}
/// Sends mail by storing it in the given directory.
pub fn filemail(from: &str, base_uri: &str, template_dir: &Path, path: &Path) -> Result<Self> {
Self::new(
@@ -246,6 +252,10 @@ impl Service {
)?;
match self.transport {
Transport::LocalSmtp => {
let transport = SmtpTransport::unencrypted_localhost();
transport.send(&email)?;
}
Transport::Sendmail => {
let transport = SendmailTransport::new();
transport.send(&email)?;

View File

@@ -1,64 +1,50 @@
use ring::aead::{open_in_place, seal_in_place, Algorithm, AES_256_GCM};
use ring::aead::{OpeningKey, SealingKey};
use ring::digest;
use ring::hmac;
use ring::rand::{SecureRandom, SystemRandom};
use aes_gcm::{
aead::{Aead, OsRng},
AeadCore, Aes256Gcm, Key, KeyInit, Nonce,
};
use sha2::{Digest, Sha256};
// Keep these in sync, and keep the key len synced with the `private` docs as
// well as the `KEYS_INFO` const in secure::Key.
static ALGO: &Algorithm = &AES_256_GCM;
const NONCE_LEN: usize = 12;
pub struct SealedState {
sealing_key: SealingKey,
opening_key: OpeningKey,
cipher: Aes256Gcm,
}
impl SealedState {
pub fn new(secret: &str) -> Self {
let salt = hmac::SigningKey::new(&digest::SHA256, b"hagrid");
let mut key = vec![0; 32];
ring::hkdf::extract_and_expand(&salt, secret.as_bytes(), b"", &mut key);
let mut hash = Sha256::new();
hash.update(b"hagrid");
hash.update(secret);
let hashed_secret = hash.finalize();
let key = Key::<Aes256Gcm>::from_slice(&hashed_secret);
let cipher = Aes256Gcm::new(&key);
let sealing_key = SealingKey::new(ALGO, key.as_ref()).expect("sealing key creation");
let opening_key = OpeningKey::new(ALGO, key.as_ref()).expect("sealing key creation");
SealedState {
sealing_key,
opening_key,
}
SealedState { cipher }
}
pub fn unseal(&self, data: &[u8]) -> Result<String, &'static str> {
if data.len() < NONCE_LEN {
return Err("invalid sealed value: too short");
}
let (nonce, sealed) = data.split_at(NONCE_LEN);
let mut sealed_copy = sealed.to_vec();
let unsealed = open_in_place(&self.opening_key, nonce, &[], 0, &mut sealed_copy)
let (sealed, nonce) = data.split_at(data.len() - NONCE_LEN);
let unsealed = self
.cipher
.decrypt(Nonce::from_slice(nonce), sealed)
.map_err(|_| "invalid key/nonce/value: bad seal")?;
::std::str::from_utf8(unsealed)
core::str::from_utf8(&unsealed)
.map(|s| s.to_string())
.map_err(|_| "bad unsealed utf8")
}
pub fn seal(&self, input: &str) -> Vec<u8> {
let mut data;
let output_len = {
let overhead = ALGO.tag_len();
data = vec![0; NONCE_LEN + input.len() + overhead];
let (nonce, in_out) = data.split_at_mut(NONCE_LEN);
SystemRandom::new()
.fill(nonce)
.expect("couldn't random fill nonce");
in_out[..input.len()].copy_from_slice(input.as_bytes());
seal_in_place(&self.sealing_key, nonce, &[], in_out, overhead).expect("in-place seal")
};
data[..(NONCE_LEN + output_len)].to_vec()
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
let mut sealed = self
.cipher
.encrypt(&nonce, input.as_bytes())
.expect("sealing works");
sealed.extend(nonce);
sealed
}
}
@@ -69,8 +55,10 @@ mod tests {
#[test]
fn test_encrypt_decrypt() {
let sv = SealedState::new("swag");
let sealed = sv.seal("test");
// use a different instance to make sure no internal state remains
let sv = SealedState::new("swag");
let unsealed = sv.unseal(sealed.as_slice()).unwrap();
assert_eq!("test", unsealed);

View File

@@ -113,7 +113,7 @@ mod tests {
let payload = TestStruct1 {
payload: "hello".to_owned(),
};
let token = "rwM_S9gZaRQaf6DLvmWtZSipQhH_G5ronSIJv2FrMdwGBPSYYQ-1jaP58dTHU5WuC14vb8jxmz2Xf_b3pqzpCGTEJj9drm4t";
let token = "C6fCPAGv93nZqDQXodl-bsDgzkxqbjDtbeR6Be4v_UHJfL2UJxG2imzmUlK1PfLT4QzNIRWsdFDYWrx_aCgLZ4MgVQWYyazn";
let mt = Service::init("secret", 60);
let check_result = mt.check(token);

View File

@@ -319,7 +319,7 @@ pub fn key_to_response_plain(
return MyResponse::not_found_plain(describe_query_error(&i18n, &query));
};
match db.by_fpr(&fp) {
match db.by_primary_fpr(&fp) {
Some(armored) => MyResponse::key(armored, &fp),
None => MyResponse::not_found_plain(describe_query_error(&i18n, &query)),
}
@@ -516,11 +516,9 @@ fn configure_prometheus(config: &Figment) -> Option<PrometheusMetrics> {
fn configure_db_service(config: &Figment) -> Result<KeyDatabase> {
let keys_internal_dir: PathBuf = config.extract_inner("keys_internal_dir")?;
let keys_external_dir: PathBuf = config.extract_inner("keys_external_dir")?;
let tmp_dir: PathBuf = config.extract_inner("tmp_dir")?;
let fs_db = KeyDatabase::new(keys_internal_dir, keys_external_dir, tmp_dir)?;
Ok(fs_db)
let sqlite_db = KeyDatabase::new_file(keys_internal_dir)?;
Ok(sqlite_db)
}
fn configure_hagrid_state(config: &Figment) -> Result<HagridState> {
@@ -557,9 +555,12 @@ fn configure_mail_service(config: &Figment) -> Result<mail::Service> {
let from: String = config.extract_inner("from")?;
let filemail_into: Option<PathBuf> = config.extract_inner::<PathBuf>("filemail_into").ok();
let local_smtp: Option<bool> = config.extract_inner::<bool>("local_smtp").ok();
if let Some(path) = filemail_into {
mail::Service::filemail(&from, &base_uri, &email_template_dir, &path)
} else if local_smtp == Some(true) {
mail::Service::localsmtp(&from, &base_uri, &email_template_dir)
} else {
mail::Service::sendmail(&from, &base_uri, &email_template_dir)
}

21
tester/Cargo.toml Normal file
View File

@@ -0,0 +1,21 @@
[package]
name = "tester"
version = "0.1.0"
authors = ["Vincent Breitmoser <look@my.amazin.horse>"]
[dependencies]
anyhow = "1"
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
log = "0"
rand = "0.6"
serde = { version = "1.0", features = ["derive"] }
serde_derive = "1"
serde_json = "1"
time = "0.1"
url = "1"
hex = "0.3"
base64 = "0.10"
idna = "0.1"
fs2 = "0.4"
clap = "2"
indicatif = "0.11"

37
tester/src/generate.rs Normal file
View File

@@ -0,0 +1,37 @@
use std::{fs::File, io::Write, path::Path};
use anyhow::Result;
use indicatif::{ProgressBar, ProgressStyle};
use openpgp::{cert::CertBuilder, serialize::Serialize};
use crate::util;
pub fn do_generate(count: u64, output_path: &Path, fprs_path: Option<&Path>) -> Result<()> {
let progress_bar = ProgressBar::new(count);
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos}/{len} {msg}")
.progress_chars("##-"),
);
progress_bar.set_draw_delta(count / 100);
let mut output = File::create(output_path)?;
let mut output_fprs = if let Some(p) = fprs_path {
Some(File::create(p)?)
} else {
None
};
for i in 0..count {
let (cert, _) = CertBuilder::general_purpose(None, Some(util::gen_email(i))).generate()?;
cert.serialize(&mut output)?;
if let Some(ref mut output_fprs) = output_fprs {
writeln!(output_fprs, "{}", cert)?;
}
progress_bar.inc(1);
}
progress_bar.finish();
Ok(())
}

49
tester/src/genreqs.rs Normal file
View File

@@ -0,0 +1,49 @@
use std::io::Write;
use std::{fs::File, io, io::BufRead, path::Path};
use anyhow::Result;
use rand::seq::SliceRandom;
use rand::{thread_rng, Rng};
use crate::util;
pub fn do_genreqs(host: &str, fprs_path: &Path) -> Result<()> {
let file = File::open(fprs_path)?;
let fingerprints: Vec<String> = io::BufReader::new(file).lines().flatten().collect();
/* possible requests:
* /vks/v1/by-fingerprint/
* /vks/v1/by-keyid/
* /vks/v1/by-email/
*/
let mut rng = thread_rng();
let mut stdout = io::LineWriter::new(io::stdout());
loop {
let result = match rng.gen_range(0, 3) {
0 => {
let email = util::gen_email(rng.gen_range(0, fingerprints.len() as u64));
stdout.write_fmt(format_args!("GET {}/vks/v1/by-email/{}\n", host, email))
}
1 => {
let random_fpr = fingerprints.choose(&mut rng).unwrap();
stdout.write_fmt(format_args!(
"GET {}/vks/v1/by-keyid/{}\n",
host,
&random_fpr[24..40]
))
}
_ => {
let random_fpr = fingerprints.choose(&mut rng).unwrap();
stdout.write_fmt(format_args!(
"GET {}/vks/v1/by-fingerprint/{}\n",
host, random_fpr
))
}
};
if result.is_err() {
return Ok(());
}
}
}

92
tester/src/main.rs Normal file
View File

@@ -0,0 +1,92 @@
extern crate anyhow;
extern crate clap;
extern crate indicatif;
extern crate rand;
extern crate sequoia_openpgp as openpgp;
extern crate serde_derive;
use std::path::PathBuf;
use anyhow::Result;
use clap::{App, Arg, SubCommand};
mod generate;
mod genreqs;
mod util;
fn main() -> Result<()> {
let matches = App::new("Hagrid Tester")
.version("0.1")
.about("Control hagrid database externally")
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file")
.takes_value(true),
)
.subcommand(
SubCommand::with_name("generate")
.about("Generate a test set of certificates")
.arg(
Arg::with_name("cert count")
.long("cert-count")
.default_value("100000")
.help("number of certifictes to generate"),
)
.arg(
Arg::with_name("certs output file")
.long("output-file")
.default_value("keyring.pub.pgp")
.help("path to file to store the certificates in"),
)
.arg(
Arg::with_name("fingerprints output file")
.long("fingerprints-file")
.default_value("fingerprints.txt")
.help("path to file to store fingerprints in"),
),
)
.subcommand(
SubCommand::with_name("gen-reqs")
.about("generate requests")
.arg(
Arg::with_name("fingerprints file")
.long("fingerprints-file")
.default_value("fingerprints.txt")
.help("path to read fingerprints from"),
)
.arg(Arg::with_name("host").index(1).required(true)),
)
.get_matches();
if let Some(matches) = matches.subcommand_matches("generate") {
let count: u64 = matches.value_of("cert count").unwrap().parse().unwrap();
let output_certs: PathBuf = matches
.value_of("certs output file")
.unwrap()
.parse()
.unwrap();
let output_fprs: Option<PathBuf> = matches
.value_of("fingerprints output file")
.map(|s| s.parse().unwrap());
generate::do_generate(
count,
output_certs.as_path(),
output_fprs.as_ref().map(|f| f.as_path()),
)?;
} else if let Some(matches) = matches.subcommand_matches("gen-reqs") {
let host = matches.value_of("host").unwrap();
let fprs_file: PathBuf = matches
.value_of("fingerprints file")
.map(|s| s.parse().unwrap())
.unwrap();
genreqs::do_genreqs(host, fprs_file.as_path())?;
} else {
println!("{}", matches.usage());
}
Ok(())
}

3
tester/src/util.rs Normal file
View File

@@ -0,0 +1,3 @@
pub fn gen_email(i: u64) -> String {
format!("{:07}@hagrid.invalid", i)
}