mirror of
https://gitlab.com/keys.openpgp.org/hagrid.git
synced 2025-10-07 09:02:41 +02:00
Compare commits
28 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
3293dd8f78 | ||
|
c7a032eb69 | ||
|
475bcbffb8 | ||
|
dafed3d492 | ||
|
a504b0ea12 | ||
|
df6bfb2d84 | ||
|
b5b5879474 | ||
|
5778aaed84 | ||
|
7beb5209af | ||
|
4787816581 | ||
|
359475f89f | ||
|
253d672d47 | ||
|
e0aeef7ddc | ||
|
44db398a1c | ||
|
8ea89d3e0e | ||
|
0d25da7138 | ||
|
e0f8352ac6 | ||
|
dca8afa1e6 | ||
|
ea44f52a16 | ||
|
b4d92f0ec1 | ||
|
26ef2f6e1c | ||
|
cfd9fd8eb3 | ||
|
13ddd4ff3a | ||
|
a9440c6d0a | ||
|
fe2337507a | ||
|
36dff563fc | ||
|
da5648488b | ||
|
7f304929ea |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -7,3 +7,5 @@
|
|||||||
target
|
target
|
||||||
*.po~
|
*.po~
|
||||||
/dist/templates/localized
|
/dist/templates/localized
|
||||||
|
|
||||||
|
result
|
||||||
|
@@ -3,7 +3,7 @@ build, test and lint:
|
|||||||
interruptible: true
|
interruptible: true
|
||||||
script:
|
script:
|
||||||
- apt update -qy
|
- apt update -qy
|
||||||
- apt install -qy build-essential pkg-config clang libclang-dev nettle-dev gettext zsh
|
- apt install -qy build-essential pkg-config clang libclang-dev libssl-dev gettext zsh
|
||||||
- rustup component add clippy
|
- rustup component add clippy
|
||||||
- rustup component add rustfmt
|
- rustup component add rustfmt
|
||||||
- ./make-translated-templates
|
- ./make-translated-templates
|
||||||
|
1399
Cargo.lock
generated
1399
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "hagrid"
|
name = "hagrid"
|
||||||
version = "1.3.0"
|
version = "2.0.0"
|
||||||
authors = ["Vincent Breitmoser <look@my.amazin.horse>", "Kai Michaelis <kai@sequoia-pgp.org>", "Justus Winter <justus@sequoia-pgp.org>"]
|
authors = ["Vincent Breitmoser <look@my.amazin.horse>", "Kai Michaelis <kai@sequoia-pgp.org>", "Justus Winter <justus@sequoia-pgp.org>"]
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
default-run = "hagrid"
|
default-run = "hagrid"
|
||||||
@@ -10,6 +10,7 @@ edition = "2018"
|
|||||||
members = [
|
members = [
|
||||||
"database",
|
"database",
|
||||||
"hagridctl",
|
"hagridctl",
|
||||||
|
"tester",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@@ -19,7 +20,7 @@ anyhow = "1"
|
|||||||
rocket = { version = "0.5", features = [ "json" ] }
|
rocket = { version = "0.5", features = [ "json" ] }
|
||||||
rocket_dyn_templates = { version = "0.1", features = ["handlebars"] }
|
rocket_dyn_templates = { version = "0.1", features = ["handlebars"] }
|
||||||
rocket_codegen = "0.5"
|
rocket_codegen = "0.5"
|
||||||
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
sequoia-openpgp = { version = "=1.17.0", default-features = false, features = ["crypto-openssl"] }
|
||||||
multipart = "0"
|
multipart = "0"
|
||||||
serde = "1"
|
serde = "1"
|
||||||
serde_derive = "1"
|
serde_derive = "1"
|
||||||
@@ -29,7 +30,8 @@ tempfile = "3"
|
|||||||
structopt = "0.2"
|
structopt = "0.2"
|
||||||
url = "1"
|
url = "1"
|
||||||
num_cpus = "1"
|
num_cpus = "1"
|
||||||
ring = "0.13"
|
aes-gcm = "0.10"
|
||||||
|
sha2 = "0.10"
|
||||||
base64 = "0.10"
|
base64 = "0.10"
|
||||||
uuid = { version = "0.7", features = [ "v4" ] }
|
uuid = { version = "0.7", features = [ "v4" ] }
|
||||||
rocket_prometheus = "0.10"
|
rocket_prometheus = "0.10"
|
||||||
@@ -40,7 +42,7 @@ gettext = "0.4"
|
|||||||
glob = "0.3"
|
glob = "0.3"
|
||||||
hyperx = "1.4"
|
hyperx = "1.4"
|
||||||
# this is a slightly annoying update, so keeping this back for now
|
# this is a slightly annoying update, so keeping this back for now
|
||||||
lettre = { version = "=0.10.0-rc.5", default-features = false, features = ["builder", "file-transport", "sendmail-transport"] }
|
lettre = { version = "=0.10.0-rc.5", default-features = false, features = ["builder", "file-transport", "sendmail-transport", "smtp-transport"] }
|
||||||
|
|
||||||
[dependencies.rocket_i18n]
|
[dependencies.rocket_i18n]
|
||||||
# git = "https://github.com/Plume-org/rocket_i18n"
|
# git = "https://github.com/Plume-org/rocket_i18n"
|
||||||
|
19
README.md
19
README.md
@@ -10,6 +10,11 @@ Please note that Hagrid is built and maintained only for the service at
|
|||||||
keys.openpgp.org. It is not maintained or officially supported as
|
keys.openpgp.org. It is not maintained or officially supported as
|
||||||
deployable software.
|
deployable software.
|
||||||
|
|
||||||
|
Compatibility note: Hagrid v2.0 uses an sqlite certificate store instead of the
|
||||||
|
previous file based database. This means that it also no longer supports serving
|
||||||
|
certificates directly via reverse proxy. You can us hagridctl to dump and import
|
||||||
|
an old database.
|
||||||
|
|
||||||
License
|
License
|
||||||
-------
|
-------
|
||||||
|
|
||||||
@@ -37,7 +42,7 @@ Additionally, some external dependencies are required.
|
|||||||
Get them (on Debian or Ubuntu) with
|
Get them (on Debian or Ubuntu) with
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo apt install gnutls-bin libssl-dev gcc llvm-dev libclang-dev build-essential pkg-config gettext
|
sudo apt install gnutls-bin libssl-dev gcc llvm-dev libclang-dev build-essential pkg-config gettext libsqlite3-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
After Rust and the other dependencies are installed, copy the config file, then simply compile and run:
|
After Rust and the other dependencies are installed, copy the config file, then simply compile and run:
|
||||||
@@ -55,18 +60,6 @@ will be statically built, and can be copied anywhere. You will also need to
|
|||||||
adjust `Rocket.toml` accordingly. Hagrid uses `sendmail` for mailing, so you
|
adjust `Rocket.toml` accordingly. Hagrid uses `sendmail` for mailing, so you
|
||||||
also need a working local mailer setup.
|
also need a working local mailer setup.
|
||||||
|
|
||||||
Reverse Proxy
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Hagrid is designed to defer lookups to reverse proxy server like Nginx.
|
|
||||||
Lookups via `/vks/v1/by-finingerprint`, `/vks/v1/by-keyid`, and
|
|
||||||
`/vks/v1/by-email` can be handled by a robust and performant HTTP server.
|
|
||||||
A sample configuration for nginx is part of the repository (`nginx.conf`,
|
|
||||||
`hagrid-routes.conf`).
|
|
||||||
Note that we make use of
|
|
||||||
[ngx_http_lua_module](https://github.com/openresty/lua-nginx-module) to
|
|
||||||
perform some request rewrites.
|
|
||||||
|
|
||||||
Community
|
Community
|
||||||
---------
|
---------
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@ authors = ["Kai Michaelis <kai@sequoia-pgp.org>"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
sequoia-openpgp = { version = "=1.17.0", default-features = false, features = ["crypto-openssl"] }
|
||||||
log = "0"
|
log = "0"
|
||||||
rand = "0.6"
|
rand = "0.6"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
@@ -22,6 +22,10 @@ fs2 = "0.4"
|
|||||||
walkdir = "2"
|
walkdir = "2"
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
zbase32 = "0.1"
|
zbase32 = "0.1"
|
||||||
|
r2d2 = "0.8"
|
||||||
|
r2d2_sqlite = "0.24"
|
||||||
|
rusqlite = { version = "0.31", features = ["trace"] }
|
||||||
|
self_cell = "1"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "hagrid_database"
|
name = "hagrid_database"
|
||||||
|
@@ -24,6 +24,8 @@ use tempfile::NamedTempFile;
|
|||||||
use openpgp::Cert;
|
use openpgp::Cert;
|
||||||
use openpgp_utils::POLICY;
|
use openpgp_utils::POLICY;
|
||||||
|
|
||||||
|
use crate::DatabaseTransaction;
|
||||||
|
|
||||||
pub struct Filesystem {
|
pub struct Filesystem {
|
||||||
tmp_dir: PathBuf,
|
tmp_dir: PathBuf,
|
||||||
|
|
||||||
@@ -263,54 +265,6 @@ impl Filesystem {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn link_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
|
||||||
let path = self.fingerprint_to_path_published(fpr);
|
|
||||||
let link = self.link_by_email(email);
|
|
||||||
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
|
|
||||||
|
|
||||||
if link == target {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
symlink(&target, ensure_parent(&link)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn link_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
|
||||||
let path = self.fingerprint_to_path_published_wkd(fpr);
|
|
||||||
let link = self.link_wkd_by_email(email);
|
|
||||||
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
|
|
||||||
|
|
||||||
if link == target {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
symlink(&target, ensure_parent(&link)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unlink_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
|
||||||
let link = self.link_by_email(email);
|
|
||||||
|
|
||||||
let expected = diff_paths(
|
|
||||||
&self.fingerprint_to_path_published(fpr),
|
|
||||||
link.parent().unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
symlink_unlink_with_check(&link, &expected)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unlink_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
|
||||||
let link = self.link_wkd_by_email(email);
|
|
||||||
|
|
||||||
let expected = diff_paths(
|
|
||||||
&self.fingerprint_to_path_published_wkd(fpr),
|
|
||||||
link.parent().unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
symlink_unlink_with_check(&link, &expected)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn open_logfile(&self, file_name: &str) -> Result<File> {
|
fn open_logfile(&self, file_name: &str) -> Result<File> {
|
||||||
let file_path = self.keys_dir_log.join(file_name);
|
let file_path = self.keys_dir_log.join(file_name);
|
||||||
Ok(OpenOptions::new()
|
Ok(OpenOptions::new()
|
||||||
@@ -387,52 +341,93 @@ fn symlink_unlink_with_check(link: &Path, expected: &Path) -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Database for Filesystem {
|
pub struct FilesystemTransaction<'a> {
|
||||||
type MutexGuard = FlockMutexGuard;
|
db: &'a Filesystem,
|
||||||
|
_flock: FlockMutexGuard,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> FilesystemTransaction<'a> {
|
||||||
|
fn link_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let path = self.db.fingerprint_to_path_published(fpr);
|
||||||
|
let link = self.db.link_by_email(email);
|
||||||
|
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
|
||||||
|
|
||||||
|
if link == target {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
symlink(&target, ensure_parent(&link)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn link_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let path = self.db.fingerprint_to_path_published_wkd(fpr);
|
||||||
|
let link = self.db.link_wkd_by_email(email);
|
||||||
|
let target = diff_paths(&path, link.parent().unwrap()).unwrap();
|
||||||
|
|
||||||
|
if link == target {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
symlink(&target, ensure_parent(&link)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unlink_email_vks(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let link = self.db.link_by_email(email);
|
||||||
|
|
||||||
|
let expected = diff_paths(
|
||||||
|
&self.db.fingerprint_to_path_published(fpr),
|
||||||
|
link.parent().unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
symlink_unlink_with_check(&link, &expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unlink_email_wkd(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let link = self.db.link_wkd_by_email(email);
|
||||||
|
|
||||||
|
let expected = diff_paths(
|
||||||
|
&self.db.fingerprint_to_path_published_wkd(fpr),
|
||||||
|
link.parent().unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
symlink_unlink_with_check(&link, &expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> DatabaseTransaction<'a> for FilesystemTransaction<'a> {
|
||||||
type TempCert = NamedTempFile;
|
type TempCert = NamedTempFile;
|
||||||
|
|
||||||
fn lock(&self) -> Result<Self::MutexGuard> {
|
fn commit(self) -> Result<()> {
|
||||||
FlockMutexGuard::lock(&self.keys_internal_dir)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert> {
|
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert> {
|
||||||
let mut tempfile = tempfile::Builder::new()
|
let mut tempfile = tempfile::Builder::new()
|
||||||
.prefix("key")
|
.prefix("key")
|
||||||
.rand_bytes(16)
|
.rand_bytes(16)
|
||||||
.tempfile_in(&self.tmp_dir)?;
|
.tempfile_in(&self.db.tmp_dir)?;
|
||||||
tempfile.write_all(content).unwrap();
|
tempfile.write_all(content).unwrap();
|
||||||
Ok(tempfile)
|
Ok(tempfile)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()> {
|
|
||||||
let timestamp = SystemTime::now()
|
|
||||||
.duration_since(SystemTime::UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_secs();
|
|
||||||
let fingerprint_line = format!("{:010} {}\n", timestamp, fpr_primary);
|
|
||||||
|
|
||||||
self.open_logfile(filename)?
|
|
||||||
.write_all(fingerprint_line.as_bytes())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn move_tmp_to_full(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
|
fn move_tmp_to_full(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
|
||||||
if self.dry_run {
|
if self.db.dry_run {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
set_permissions(file.path(), Permissions::from_mode(0o640))?;
|
set_permissions(file.path(), Permissions::from_mode(0o640))?;
|
||||||
let target = self.fingerprint_to_path_full(fpr);
|
let target = self.db.fingerprint_to_path_full(fpr);
|
||||||
file.persist(ensure_parent(&target)?)?;
|
file.persist(ensure_parent(&target)?)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn move_tmp_to_published(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
|
fn move_tmp_to_published(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
|
||||||
if self.dry_run {
|
if self.db.dry_run {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
set_permissions(file.path(), Permissions::from_mode(0o644))?;
|
set_permissions(file.path(), Permissions::from_mode(0o644))?;
|
||||||
let target = self.fingerprint_to_path_published(fpr);
|
let target = self.db.fingerprint_to_path_published(fpr);
|
||||||
file.persist(ensure_parent(&target)?)?;
|
file.persist(ensure_parent(&target)?)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -442,10 +437,10 @@ impl Database for Filesystem {
|
|||||||
file: Option<Self::TempCert>,
|
file: Option<Self::TempCert>,
|
||||||
fpr: &Fingerprint,
|
fpr: &Fingerprint,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if self.dry_run {
|
if self.db.dry_run {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
let target = self.fingerprint_to_path_published_wkd(fpr);
|
let target = self.db.fingerprint_to_path_published_wkd(fpr);
|
||||||
if let Some(file) = file {
|
if let Some(file) = file {
|
||||||
set_permissions(file.path(), Permissions::from_mode(0o644))?;
|
set_permissions(file.path(), Permissions::from_mode(0o644))?;
|
||||||
file.persist(ensure_parent(&target)?)?;
|
file.persist(ensure_parent(&target)?)?;
|
||||||
@@ -460,65 +455,17 @@ impl Database for Filesystem {
|
|||||||
let mut tempfile = tempfile::Builder::new()
|
let mut tempfile = tempfile::Builder::new()
|
||||||
.prefix("key")
|
.prefix("key")
|
||||||
.rand_bytes(16)
|
.rand_bytes(16)
|
||||||
.tempfile_in(&self.tmp_dir)?;
|
.tempfile_in(&self.db.tmp_dir)?;
|
||||||
tempfile.write_all(content).unwrap();
|
tempfile.write_all(content).unwrap();
|
||||||
|
|
||||||
let target = self.fingerprint_to_path_quarantined(fpr);
|
let target = self.db.fingerprint_to_path_quarantined(fpr);
|
||||||
tempfile.persist(ensure_parent(&target)?)?;
|
tempfile.persist(ensure_parent(&target)?)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_link_fpr(
|
|
||||||
&self,
|
|
||||||
fpr: &Fingerprint,
|
|
||||||
fpr_target: &Fingerprint,
|
|
||||||
) -> Result<Option<Fingerprint>> {
|
|
||||||
let link_keyid = self.link_by_keyid(&fpr.into());
|
|
||||||
let link_fpr = self.link_by_fingerprint(fpr);
|
|
||||||
|
|
||||||
let path_published = self.fingerprint_to_path_published(fpr_target);
|
|
||||||
|
|
||||||
if let Ok(link_fpr_target) = link_fpr.canonicalize() {
|
|
||||||
if !link_fpr_target.ends_with(&path_published) {
|
|
||||||
info!("Fingerprint points to different key for {} (expected {:?} to be suffix of {:?})",
|
|
||||||
fpr, &path_published, &link_fpr_target);
|
|
||||||
return Err(anyhow!(format!("Fingerprint collision for key {}", fpr)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(link_keyid_target) = link_keyid.canonicalize() {
|
|
||||||
if !link_keyid_target.ends_with(&path_published) {
|
|
||||||
info!(
|
|
||||||
"KeyID points to different key for {} (expected {:?} to be suffix of {:?})",
|
|
||||||
fpr, &path_published, &link_keyid_target
|
|
||||||
);
|
|
||||||
return Err(anyhow!(format!("KeyID collision for key {}", fpr)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !link_fpr.exists() || !link_keyid.exists() {
|
|
||||||
Ok(Some(fpr.clone()))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
|
|
||||||
use super::Query::*;
|
|
||||||
let path = match term {
|
|
||||||
ByFingerprint(ref fp) => self.link_by_fingerprint(fp),
|
|
||||||
ByKeyID(ref keyid) => self.link_by_keyid(keyid),
|
|
||||||
ByEmail(ref email) => self.link_by_email(email),
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
path.read_link()
|
|
||||||
.ok()
|
|
||||||
.and_then(|link_path| Filesystem::path_to_fingerprint(&link_path))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
if self.dry_run {
|
if self.db.dry_run {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -535,14 +482,14 @@ impl Database for Filesystem {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn link_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
|
fn link_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
|
||||||
if self.dry_run {
|
if self.db.dry_run {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let link_fpr = self.link_by_fingerprint(from);
|
let link_fpr = self.db.link_by_fingerprint(from);
|
||||||
let link_keyid = self.link_by_keyid(&from.into());
|
let link_keyid = self.db.link_by_keyid(&from.into());
|
||||||
let target = diff_paths(
|
let target = diff_paths(
|
||||||
&self.fingerprint_to_path_published(primary_fpr),
|
&self.db.fingerprint_to_path_published(primary_fpr),
|
||||||
link_fpr.parent().unwrap(),
|
link_fpr.parent().unwrap(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -552,10 +499,10 @@ impl Database for Filesystem {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn unlink_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
|
fn unlink_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
|
||||||
let link_fpr = self.link_by_fingerprint(from);
|
let link_fpr = self.db.link_by_fingerprint(from);
|
||||||
let link_keyid = self.link_by_keyid(&from.into());
|
let link_keyid = self.db.link_by_keyid(&from.into());
|
||||||
let expected = diff_paths(
|
let expected = diff_paths(
|
||||||
&self.fingerprint_to_path_published(primary_fpr),
|
&self.db.fingerprint_to_path_published(primary_fpr),
|
||||||
link_fpr.parent().unwrap(),
|
link_fpr.parent().unwrap(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -573,6 +520,44 @@ impl Database for Filesystem {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Database<'a> for Filesystem {
|
||||||
|
type Transaction = FilesystemTransaction<'a>;
|
||||||
|
|
||||||
|
fn transaction(&'a self) -> Result<FilesystemTransaction<'a>> {
|
||||||
|
let flock = FlockMutexGuard::lock(&self.keys_internal_dir)?;
|
||||||
|
Ok(FilesystemTransaction {
|
||||||
|
db: self,
|
||||||
|
_flock: flock,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()> {
|
||||||
|
let timestamp = SystemTime::now()
|
||||||
|
.duration_since(SystemTime::UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_secs();
|
||||||
|
let fingerprint_line = format!("{:010} {}\n", timestamp, fpr_primary);
|
||||||
|
|
||||||
|
self.open_logfile(filename)?
|
||||||
|
.write_all(fingerprint_line.as_bytes())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
|
||||||
|
use super::Query::*;
|
||||||
|
let path = match term {
|
||||||
|
ByFingerprint(ref fp) => self.link_by_fingerprint(fp),
|
||||||
|
ByKeyID(ref keyid) => self.link_by_keyid(keyid),
|
||||||
|
ByEmail(ref email) => self.link_by_email(email),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
path.read_link()
|
||||||
|
.ok()
|
||||||
|
.and_then(|link_path| Filesystem::path_to_fingerprint(&link_path))
|
||||||
|
}
|
||||||
|
|
||||||
// XXX: slow
|
// XXX: slow
|
||||||
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String> {
|
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String> {
|
||||||
@@ -616,6 +601,57 @@ impl Database for Filesystem {
|
|||||||
self.read_from_path(&path, false)
|
self.read_from_path(&path, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_last_log_entry(&self) -> Result<Fingerprint> {
|
||||||
|
use std::fs;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
let filename = self.keys_dir_log.join(self.get_current_log_filename());
|
||||||
|
let log_data = fs::read_to_string(filename)?;
|
||||||
|
let last_entry = log_data
|
||||||
|
.lines()
|
||||||
|
.last()
|
||||||
|
.ok_or_else(|| anyhow!("malformed log file"))?
|
||||||
|
.split(' ')
|
||||||
|
.last()
|
||||||
|
.ok_or_else(|| anyhow!("malformed log file"))?;
|
||||||
|
Fingerprint::from_str(last_entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_link_fpr(
|
||||||
|
&self,
|
||||||
|
fpr: &Fingerprint,
|
||||||
|
fpr_target: &Fingerprint,
|
||||||
|
) -> Result<Option<Fingerprint>> {
|
||||||
|
let link_keyid = self.link_by_keyid(&fpr.into());
|
||||||
|
let link_fpr = self.link_by_fingerprint(fpr);
|
||||||
|
|
||||||
|
let path_published = self.fingerprint_to_path_published(fpr_target);
|
||||||
|
|
||||||
|
if let Ok(link_fpr_target) = link_fpr.canonicalize() {
|
||||||
|
if !link_fpr_target.ends_with(&path_published) {
|
||||||
|
info!("Fingerprint points to different key for {} (expected {:?} to be suffix of {:?})",
|
||||||
|
fpr, &path_published, &link_fpr_target);
|
||||||
|
return Err(anyhow!(format!("Fingerprint collision for key {}", fpr)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(link_keyid_target) = link_keyid.canonicalize() {
|
||||||
|
if !link_keyid_target.ends_with(&path_published) {
|
||||||
|
info!(
|
||||||
|
"KeyID points to different key for {} (expected {:?} to be suffix of {:?})",
|
||||||
|
fpr, &path_published, &link_keyid_target
|
||||||
|
);
|
||||||
|
return Err(anyhow!(format!("KeyID collision for key {}", fpr)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !link_fpr.exists() || !link_keyid.exists() {
|
||||||
|
Ok(Some(fpr.clone()))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Checks the database for consistency.
|
/// Checks the database for consistency.
|
||||||
///
|
///
|
||||||
/// Note that this operation may take a long time, and is
|
/// Note that this operation may take a long time, and is
|
||||||
@@ -810,17 +846,16 @@ mod tests {
|
|||||||
let _ = Filesystem::new_from_base(tmpdir.path()).unwrap();
|
let _ = Filesystem::new_from_base(tmpdir.path()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_db() -> (TempDir, Filesystem, PathBuf) {
|
fn open_db() -> (TempDir, Filesystem) {
|
||||||
let tmpdir = TempDir::new().unwrap();
|
let tmpdir = TempDir::new().unwrap();
|
||||||
let db = Filesystem::new_from_base(tmpdir.path()).unwrap();
|
let db = Filesystem::new_from_base(tmpdir.path()).unwrap();
|
||||||
let log_path = db.keys_dir_log.join(db.get_current_log_filename());
|
|
||||||
|
|
||||||
(tmpdir, db, log_path)
|
(tmpdir, db)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn new() {
|
fn new() {
|
||||||
let (_tmp_dir, db, _log_path) = open_db();
|
let (_tmp_dir, db) = open_db();
|
||||||
let k1 = CertBuilder::new()
|
let k1 = CertBuilder::new()
|
||||||
.add_userid("a@invalid.example.org")
|
.add_userid("a@invalid.example.org")
|
||||||
.generate()
|
.generate()
|
||||||
@@ -869,120 +904,120 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn uid_verification() {
|
fn uid_verification() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_uid_verification(&mut db, &log_path);
|
test::test_uid_verification(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn uid_deletion() {
|
fn uid_deletion() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_uid_deletion(&mut db, &log_path);
|
test::test_uid_deletion(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn subkey_lookup() {
|
fn subkey_lookup() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_subkey_lookup(&mut db, &log_path);
|
test::test_subkey_lookup(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn kid_lookup() {
|
fn kid_lookup() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_kid_lookup(&mut db, &log_path);
|
test::test_kid_lookup(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn upload_revoked_tpk() {
|
fn upload_revoked_tpk() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_upload_revoked_tpk(&mut db, &log_path);
|
test::test_upload_revoked_tpk(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn uid_revocation() {
|
fn uid_revocation() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_uid_revocation(&mut db, &log_path);
|
test::test_uid_revocation(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn regenerate() {
|
fn regenerate() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_regenerate(&mut db, &log_path);
|
test::test_regenerate(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn key_reupload() {
|
fn key_reupload() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_reupload(&mut db, &log_path);
|
test::test_reupload(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn uid_replacement() {
|
fn uid_replacement() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_uid_replacement(&mut db, &log_path);
|
test::test_uid_replacement(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn uid_unlinking() {
|
fn uid_unlinking() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_unlink_uid(&mut db, &log_path);
|
test::test_unlink_uid(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn same_email_1() {
|
fn same_email_1() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_same_email_1(&mut db, &log_path);
|
test::test_same_email_1(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn same_email_2() {
|
fn same_email_2() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_same_email_2(&mut db, &log_path);
|
test::test_same_email_2(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn same_email_3() {
|
fn same_email_3() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_same_email_3(&mut db, &log_path);
|
test::test_same_email_3(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn same_email_4() {
|
fn same_email_4() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_same_email_4(&mut db, &log_path);
|
test::test_same_email_4(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn no_selfsig() {
|
fn no_selfsig() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_no_selfsig(&mut db, &log_path);
|
test::test_no_selfsig(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bad_uids() {
|
fn bad_uids() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_bad_uids(&mut db, &log_path);
|
test::test_bad_uids(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unsigned_uids() {
|
fn unsigned_uids() {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::test_unsigned_uids(&mut db, &log_path);
|
test::test_unsigned_uids(&mut db);
|
||||||
db.check_consistency().expect("inconsistent database");
|
db.check_consistency().expect("inconsistent database");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1002,16 +1037,16 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn attested_key_signatures() -> Result<()> {
|
fn attested_key_signatures() -> Result<()> {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::attested_key_signatures(&mut db, &log_path)?;
|
test::attested_key_signatures(&mut db)?;
|
||||||
db.check_consistency()?;
|
db.check_consistency()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn nonexportable_sigs() -> Result<()> {
|
fn nonexportable_sigs() -> Result<()> {
|
||||||
let (_tmp_dir, mut db, log_path) = open_db();
|
let (_tmp_dir, mut db) = open_db();
|
||||||
test::nonexportable_sigs(&mut db, &log_path)?;
|
test::nonexportable_sigs(&mut db)?;
|
||||||
db.check_consistency()?;
|
db.check_consistency()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@@ -17,7 +17,9 @@ extern crate log;
|
|||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
extern crate hex;
|
extern crate hex;
|
||||||
extern crate pathdiff;
|
extern crate pathdiff;
|
||||||
|
extern crate r2d2_sqlite;
|
||||||
extern crate rand;
|
extern crate rand;
|
||||||
|
extern crate self_cell;
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
extern crate serde_json;
|
extern crate serde_json;
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
@@ -36,7 +38,8 @@ pub mod sync;
|
|||||||
pub mod wkd;
|
pub mod wkd;
|
||||||
|
|
||||||
mod fs;
|
mod fs;
|
||||||
pub use self::fs::Filesystem as KeyDatabase;
|
mod sqlite;
|
||||||
|
pub use self::sqlite::Sqlite as KeyDatabase;
|
||||||
|
|
||||||
mod stateful_tokens;
|
mod stateful_tokens;
|
||||||
pub use stateful_tokens::StatefulTokens;
|
pub use stateful_tokens::StatefulTokens;
|
||||||
@@ -106,6 +109,14 @@ impl ImportResult {
|
|||||||
ImportResult::Unchanged(status) => status,
|
ImportResult::Unchanged(status) => status,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_tpk_status(&self) -> &TpkStatus {
|
||||||
|
match self {
|
||||||
|
ImportResult::New(status) => status,
|
||||||
|
ImportResult::Updated(status) => status,
|
||||||
|
ImportResult::Unchanged(status) => status,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
@@ -120,19 +131,10 @@ pub enum RegenerateResult {
|
|||||||
Unchanged,
|
Unchanged,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Database: Sync + Send {
|
pub trait DatabaseTransaction<'a> {
|
||||||
type MutexGuard;
|
|
||||||
type TempCert;
|
type TempCert;
|
||||||
|
|
||||||
/// Lock the DB for a complex update.
|
fn commit(self) -> Result<()>;
|
||||||
///
|
|
||||||
/// All basic write operations are atomic so we don't need to lock
|
|
||||||
/// read operations to ensure that we return something sane.
|
|
||||||
fn lock(&self) -> Result<Self::MutexGuard>;
|
|
||||||
|
|
||||||
/// Queries the database using Fingerprint, KeyID, or
|
|
||||||
/// email-address, returning the primary fingerprint.
|
|
||||||
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint>;
|
|
||||||
|
|
||||||
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
|
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
|
||||||
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
|
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
|
||||||
@@ -140,21 +142,6 @@ pub trait Database: Sync + Send {
|
|||||||
fn link_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
|
fn link_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
|
||||||
fn unlink_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
|
fn unlink_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
|
||||||
|
|
||||||
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
|
||||||
fn by_kid(&self, kid: &KeyID) -> Option<String>;
|
|
||||||
fn by_email(&self, email: &Email) -> Option<String>;
|
|
||||||
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>>;
|
|
||||||
fn by_domain_and_hash_wkd(&self, domain: &str, hash: &str) -> Option<Vec<u8>>;
|
|
||||||
|
|
||||||
fn check_link_fpr(
|
|
||||||
&self,
|
|
||||||
fpr: &Fingerprint,
|
|
||||||
target: &Fingerprint,
|
|
||||||
) -> Result<Option<Fingerprint>>;
|
|
||||||
|
|
||||||
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
|
|
||||||
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
|
||||||
|
|
||||||
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert>;
|
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert>;
|
||||||
fn move_tmp_to_full(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
|
fn move_tmp_to_full(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
|
||||||
fn move_tmp_to_published(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
|
fn move_tmp_to_published(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
|
||||||
@@ -164,8 +151,39 @@ pub trait Database: Sync + Send {
|
|||||||
fpr: &Fingerprint,
|
fpr: &Fingerprint,
|
||||||
) -> Result<()>;
|
) -> Result<()>;
|
||||||
fn write_to_quarantine(&self, fpr: &Fingerprint, content: &[u8]) -> Result<()>;
|
fn write_to_quarantine(&self, fpr: &Fingerprint, content: &[u8]) -> Result<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Database<'a>: Sync + Send {
|
||||||
|
type Transaction: DatabaseTransaction<'a>;
|
||||||
|
|
||||||
|
/// Lock the DB for a complex update.
|
||||||
|
///
|
||||||
|
/// All basic write operations are atomic so we don't need to lock
|
||||||
|
/// read operations to ensure that we return something sane.
|
||||||
|
fn transaction(&'a self) -> Result<Self::Transaction>;
|
||||||
|
|
||||||
|
/// Queries the database using Fingerprint, KeyID, or
|
||||||
|
/// email-address, returning the primary fingerprint.
|
||||||
|
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint>;
|
||||||
|
|
||||||
|
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
||||||
|
fn by_kid(&self, kid: &KeyID) -> Option<String>;
|
||||||
|
fn by_email(&self, email: &Email) -> Option<String>;
|
||||||
|
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>>;
|
||||||
|
fn by_domain_and_hash_wkd(&self, domain: &str, hash: &str) -> Option<Vec<u8>>;
|
||||||
|
|
||||||
|
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
|
||||||
|
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
||||||
|
|
||||||
|
fn get_last_log_entry(&self) -> Result<Fingerprint>;
|
||||||
|
|
||||||
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()>;
|
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()>;
|
||||||
|
|
||||||
|
fn check_link_fpr(
|
||||||
|
&self,
|
||||||
|
fpr: &Fingerprint,
|
||||||
|
target: &Fingerprint,
|
||||||
|
) -> Result<Option<Fingerprint>>;
|
||||||
fn check_consistency(&self) -> Result<()>;
|
fn check_consistency(&self) -> Result<()>;
|
||||||
|
|
||||||
/// Queries the database using Fingerprint, KeyID, or
|
/// Queries the database using Fingerprint, KeyID, or
|
||||||
@@ -197,10 +215,10 @@ pub trait Database: Sync + Send {
|
|||||||
/// - abort if any problems come up!
|
/// - abort if any problems come up!
|
||||||
/// 5. Move full and published temporary Cert to their location
|
/// 5. Move full and published temporary Cert to their location
|
||||||
/// 6. Update all symlinks
|
/// 6. Update all symlinks
|
||||||
fn merge(&self, new_tpk: Cert) -> Result<ImportResult> {
|
fn merge(&'a self, new_tpk: Cert) -> Result<ImportResult> {
|
||||||
let fpr_primary = Fingerprint::try_from(new_tpk.primary_key().fingerprint())?;
|
let fpr_primary = Fingerprint::try_from(new_tpk.primary_key().fingerprint())?;
|
||||||
|
|
||||||
let _lock = self.lock()?;
|
let tx = self.transaction()?;
|
||||||
|
|
||||||
let known_uids: Vec<UserID> = new_tpk
|
let known_uids: Vec<UserID> = new_tpk
|
||||||
.userids()
|
.userids()
|
||||||
@@ -311,21 +329,21 @@ pub trait Database: Sync + Send {
|
|||||||
.collect::<Result<Vec<_>>>();
|
.collect::<Result<Vec<_>>>();
|
||||||
|
|
||||||
if fpr_checks.is_err() {
|
if fpr_checks.is_err() {
|
||||||
self.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
|
tx.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
|
||||||
}
|
}
|
||||||
let fpr_checks = fpr_checks?;
|
let fpr_checks = fpr_checks?;
|
||||||
|
|
||||||
let fpr_not_linked = fpr_checks.into_iter().flatten();
|
let fpr_not_linked = fpr_checks.into_iter().flatten();
|
||||||
|
|
||||||
let full_tpk_tmp = self.write_to_temp(&tpk_to_string(&full_tpk_new)?)?;
|
let full_tpk_tmp = tx.write_to_temp(&tpk_to_string(&full_tpk_new)?)?;
|
||||||
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
||||||
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||||
|
|
||||||
// these are very unlikely to fail. but if it happens,
|
// these are very unlikely to fail. but if it happens,
|
||||||
// database consistency might be compromised!
|
// database consistency might be compromised!
|
||||||
self.move_tmp_to_full(full_tpk_tmp, &fpr_primary)?;
|
tx.move_tmp_to_full(full_tpk_tmp, &fpr_primary)?;
|
||||||
self.move_tmp_to_published(published_tpk_tmp, &fpr_primary)?;
|
tx.move_tmp_to_published(published_tpk_tmp, &fpr_primary)?;
|
||||||
self.regenerate_wkd(&fpr_primary, &published_tpk_clean)?;
|
self.regenerate_wkd(&tx, &fpr_primary, &published_tpk_clean)?;
|
||||||
|
|
||||||
let published_tpk_changed = published_tpk_old
|
let published_tpk_changed = published_tpk_old
|
||||||
.map(|tpk| tpk != published_tpk_clean)
|
.map(|tpk| tpk != published_tpk_clean)
|
||||||
@@ -335,13 +353,13 @@ pub trait Database: Sync + Send {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for fpr in fpr_not_linked {
|
for fpr in fpr_not_linked {
|
||||||
if let Err(e) = self.link_fpr(&fpr, &fpr_primary) {
|
if let Err(e) = tx.link_fpr(&fpr, &fpr_primary) {
|
||||||
info!("Error ensuring symlink! {} {} {:?}", &fpr, &fpr_primary, e);
|
info!("Error ensuring symlink! {} {} {:?}", &fpr, &fpr_primary, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for revoked_email in newly_revoked_emails {
|
for revoked_email in newly_revoked_emails {
|
||||||
if let Err(e) = self.unlink_email(revoked_email, &fpr_primary) {
|
if let Err(e) = tx.unlink_email(revoked_email, &fpr_primary) {
|
||||||
info!(
|
info!(
|
||||||
"Error ensuring symlink! {} {} {:?}",
|
"Error ensuring symlink! {} {} {:?}",
|
||||||
&fpr_primary, &revoked_email, e
|
&fpr_primary, &revoked_email, e
|
||||||
@@ -349,6 +367,8 @@ pub trait Database: Sync + Send {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tx.commit()?;
|
||||||
|
|
||||||
if is_update {
|
if is_update {
|
||||||
Ok(ImportResult::Updated(TpkStatus {
|
Ok(ImportResult::Updated(TpkStatus {
|
||||||
is_revoked,
|
is_revoked,
|
||||||
@@ -448,10 +468,10 @@ pub trait Database: Sync + Send {
|
|||||||
/// - abort if any problems come up!
|
/// - abort if any problems come up!
|
||||||
/// 5. Move full and published temporary Cert to their location
|
/// 5. Move full and published temporary Cert to their location
|
||||||
/// 6. Update all symlinks
|
/// 6. Update all symlinks
|
||||||
fn set_email_published(&self, fpr_primary: &Fingerprint, email_new: &Email) -> Result<()> {
|
fn set_email_published(&'a self, fpr_primary: &Fingerprint, email_new: &Email) -> Result<()> {
|
||||||
let _lock = self.lock()?;
|
let tx = self.transaction()?;
|
||||||
|
|
||||||
self.nolock_unlink_email_if_other(fpr_primary, email_new)?;
|
self.unlink_email_if_other(&tx, fpr_primary, email_new)?;
|
||||||
|
|
||||||
let full_tpk = self
|
let full_tpk = self
|
||||||
.by_fpr_full(fpr_primary)
|
.by_fpr_full(fpr_primary)
|
||||||
@@ -494,25 +514,28 @@ pub trait Database: Sync + Send {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
||||||
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||||
|
|
||||||
self.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
tx.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
||||||
self.regenerate_wkd(fpr_primary, &published_tpk_clean)?;
|
self.regenerate_wkd(&tx, fpr_primary, &published_tpk_clean)?;
|
||||||
|
|
||||||
self.update_write_log(fpr_primary);
|
self.update_write_log(fpr_primary);
|
||||||
|
|
||||||
if let Err(e) = self.link_email(email_new, fpr_primary) {
|
if let Err(e) = tx.link_email(email_new, fpr_primary) {
|
||||||
info!(
|
info!(
|
||||||
"Error ensuring email symlink! {} -> {} {:?}",
|
"Error ensuring email symlink! {} -> {} {:?}",
|
||||||
&email_new, &fpr_primary, e
|
&email_new, &fpr_primary, e
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tx.commit()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nolock_unlink_email_if_other(
|
fn unlink_email_if_other(
|
||||||
&self,
|
&self,
|
||||||
|
tx: &Self::Transaction,
|
||||||
fpr_primary: &Fingerprint,
|
fpr_primary: &Fingerprint,
|
||||||
unlink_email: &Email,
|
unlink_email: &Email,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@@ -520,7 +543,7 @@ pub trait Database: Sync + Send {
|
|||||||
self.lookup_primary_fingerprint(&Query::ByEmail(unlink_email.clone()));
|
self.lookup_primary_fingerprint(&Query::ByEmail(unlink_email.clone()));
|
||||||
if let Some(current_fpr) = current_link_fpr {
|
if let Some(current_fpr) = current_link_fpr {
|
||||||
if current_fpr != *fpr_primary {
|
if current_fpr != *fpr_primary {
|
||||||
self.nolock_set_email_unpublished_filter(¤t_fpr, |uid| {
|
self.set_email_unpublished_filter(&tx, ¤t_fpr, |uid| {
|
||||||
Email::try_from(uid)
|
Email::try_from(uid)
|
||||||
.map(|email| email != *unlink_email)
|
.map(|email| email != *unlink_email)
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
@@ -545,15 +568,7 @@ pub trait Database: Sync + Send {
|
|||||||
/// 6. Update all symlinks
|
/// 6. Update all symlinks
|
||||||
fn set_email_unpublished_filter(
|
fn set_email_unpublished_filter(
|
||||||
&self,
|
&self,
|
||||||
fpr_primary: &Fingerprint,
|
tx: &Self::Transaction,
|
||||||
email_remove: impl Fn(&UserID) -> bool,
|
|
||||||
) -> Result<()> {
|
|
||||||
let _lock = self.lock()?;
|
|
||||||
self.nolock_set_email_unpublished_filter(fpr_primary, email_remove)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nolock_set_email_unpublished_filter(
|
|
||||||
&self,
|
|
||||||
fpr_primary: &Fingerprint,
|
fpr_primary: &Fingerprint,
|
||||||
email_remove: impl Fn(&UserID) -> bool,
|
email_remove: impl Fn(&UserID) -> bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@@ -581,15 +596,15 @@ pub trait Database: Sync + Send {
|
|||||||
.filter(|email| !published_emails_new.contains(email));
|
.filter(|email| !published_emails_new.contains(email));
|
||||||
|
|
||||||
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
||||||
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||||
|
|
||||||
self.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
tx.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
||||||
self.regenerate_wkd(fpr_primary, &published_tpk_clean)?;
|
self.regenerate_wkd(&tx, fpr_primary, &published_tpk_clean)?;
|
||||||
|
|
||||||
self.update_write_log(fpr_primary);
|
self.update_write_log(fpr_primary);
|
||||||
|
|
||||||
for unpublished_email in unpublished_emails {
|
for unpublished_email in unpublished_emails {
|
||||||
if let Err(e) = self.unlink_email(unpublished_email, fpr_primary) {
|
if let Err(e) = tx.unlink_email(unpublished_email, fpr_primary) {
|
||||||
info!(
|
info!(
|
||||||
"Error deleting email symlink! {} -> {} {:?}",
|
"Error deleting email symlink! {} -> {} {:?}",
|
||||||
&unpublished_email, &fpr_primary, e
|
&unpublished_email, &fpr_primary, e
|
||||||
@@ -600,19 +615,31 @@ pub trait Database: Sync + Send {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_email_unpublished(&self, fpr_primary: &Fingerprint, email_remove: &Email) -> Result<()> {
|
fn set_email_unpublished(
|
||||||
self.set_email_unpublished_filter(fpr_primary, |uid| {
|
&'a self,
|
||||||
|
fpr_primary: &Fingerprint,
|
||||||
|
email_remove: &Email,
|
||||||
|
) -> Result<()> {
|
||||||
|
let tx = self.transaction().unwrap();
|
||||||
|
self.set_email_unpublished_filter(&tx, fpr_primary, |uid| {
|
||||||
Email::try_from(uid)
|
Email::try_from(uid)
|
||||||
.map(|email| email != *email_remove)
|
.map(|email| email != *email_remove)
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
})
|
})?;
|
||||||
|
tx.commit()?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_email_unpublished_all(&self, fpr_primary: &Fingerprint) -> Result<()> {
|
fn set_email_unpublished_all(&'a self, fpr_primary: &Fingerprint) -> Result<()> {
|
||||||
self.set_email_unpublished_filter(fpr_primary, |_| false)
|
let tx = self.transaction().unwrap();
|
||||||
|
self.set_email_unpublished_filter(&tx, fpr_primary, |_| false)?;
|
||||||
|
tx.commit()?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn regenerate_links(&self, fpr_primary: &Fingerprint) -> Result<RegenerateResult> {
|
fn regenerate_links(&'a self, fpr_primary: &Fingerprint) -> Result<RegenerateResult> {
|
||||||
|
let tx = self.transaction().unwrap();
|
||||||
|
|
||||||
let tpk = self
|
let tpk = self
|
||||||
.by_primary_fpr(fpr_primary)
|
.by_primary_fpr(fpr_primary)
|
||||||
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
|
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
|
||||||
@@ -624,7 +651,7 @@ pub trait Database: Sync + Send {
|
|||||||
.flatten()
|
.flatten()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
self.regenerate_wkd(fpr_primary, &tpk)?;
|
self.regenerate_wkd(&tx, fpr_primary, &tpk)?;
|
||||||
|
|
||||||
let fingerprints = tpk_get_linkable_fprs(&tpk);
|
let fingerprints = tpk_get_linkable_fprs(&tpk);
|
||||||
|
|
||||||
@@ -642,14 +669,16 @@ pub trait Database: Sync + Send {
|
|||||||
|
|
||||||
for fpr in fpr_not_linked {
|
for fpr in fpr_not_linked {
|
||||||
keys_linked += 1;
|
keys_linked += 1;
|
||||||
self.link_fpr(&fpr, fpr_primary)?;
|
tx.link_fpr(&fpr, fpr_primary)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for email in published_emails {
|
for email in published_emails {
|
||||||
emails_linked += 1;
|
emails_linked += 1;
|
||||||
self.link_email(&email, fpr_primary)?;
|
tx.link_email(&email, fpr_primary)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tx.commit()?;
|
||||||
|
|
||||||
if keys_linked != 0 || emails_linked != 0 {
|
if keys_linked != 0 || emails_linked != 0 {
|
||||||
Ok(RegenerateResult::Updated)
|
Ok(RegenerateResult::Updated)
|
||||||
} else {
|
} else {
|
||||||
@@ -657,13 +686,18 @@ pub trait Database: Sync + Send {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn regenerate_wkd(&self, fpr_primary: &Fingerprint, published_tpk: &Cert) -> Result<()> {
|
fn regenerate_wkd(
|
||||||
|
&self,
|
||||||
|
tx: &Self::Transaction,
|
||||||
|
fpr_primary: &Fingerprint,
|
||||||
|
published_tpk: &Cert,
|
||||||
|
) -> Result<()> {
|
||||||
let published_wkd_tpk_tmp = if published_tpk.userids().next().is_some() {
|
let published_wkd_tpk_tmp = if published_tpk.userids().next().is_some() {
|
||||||
Some(self.write_to_temp(&published_tpk.export_to_vec()?)?)
|
Some(tx.write_to_temp(&published_tpk.export_to_vec()?)?)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
self.move_tmp_to_published_wkd(published_wkd_tpk_tmp, fpr_primary)?;
|
tx.move_tmp_to_published_wkd(published_wkd_tpk_tmp, fpr_primary)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@@ -19,7 +19,7 @@ pub fn is_status_revoked(status: RevocationStatus) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn tpk_to_string(tpk: &Cert) -> Result<Vec<u8>> {
|
pub fn tpk_to_string(tpk: &Cert) -> Result<Vec<u8>> {
|
||||||
tpk.armored().export_to_vec()
|
tpk.armored().to_vec()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
|
pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
|
||||||
@@ -85,7 +85,10 @@ pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
|
|||||||
/// Filters the Cert, keeping only UserIDs that aren't revoked, and whose emails match the given list
|
/// Filters the Cert, keeping only UserIDs that aren't revoked, and whose emails match the given list
|
||||||
pub fn tpk_filter_alive_emails(tpk: &Cert, emails: &[Email]) -> Cert {
|
pub fn tpk_filter_alive_emails(tpk: &Cert, emails: &[Email]) -> Cert {
|
||||||
tpk.clone().retain_userids(|uid| {
|
tpk.clone().retain_userids(|uid| {
|
||||||
if is_status_revoked(uid.revocation_status(&POLICY, None)) {
|
let is_exportable = uid.self_signatures().any(|s| s.exportable().is_ok());
|
||||||
|
if !is_exportable {
|
||||||
|
false
|
||||||
|
} else if is_status_revoked(uid.revocation_status(&POLICY, None)) {
|
||||||
false
|
false
|
||||||
} else if let Ok(email) = Email::try_from(uid.userid()) {
|
} else if let Ok(email) = Email::try_from(uid.userid()) {
|
||||||
emails.contains(&email)
|
emails.contains(&email)
|
||||||
|
734
database/src/sqlite.rs
Normal file
734
database/src/sqlite.rs
Normal file
@@ -0,0 +1,734 @@
|
|||||||
|
use self_cell::self_cell;
|
||||||
|
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
use std::fs::create_dir_all;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
use openpgp::policy::StandardPolicy;
|
||||||
|
use types::{Email, Fingerprint, KeyID};
|
||||||
|
use Result;
|
||||||
|
use {Database, Query};
|
||||||
|
|
||||||
|
use openpgp::Cert;
|
||||||
|
|
||||||
|
use r2d2_sqlite::rusqlite::params;
|
||||||
|
use r2d2_sqlite::rusqlite::OptionalExtension;
|
||||||
|
use r2d2_sqlite::rusqlite::ToSql;
|
||||||
|
use r2d2_sqlite::rusqlite::Transaction;
|
||||||
|
use r2d2_sqlite::SqliteConnectionManager;
|
||||||
|
|
||||||
|
use crate::{wkd, DatabaseTransaction};
|
||||||
|
|
||||||
|
pub const POLICY: StandardPolicy = StandardPolicy::new();
|
||||||
|
|
||||||
|
pub struct Sqlite {
|
||||||
|
pool: r2d2::Pool<SqliteConnectionManager>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Sqlite {
|
||||||
|
pub fn new_file(base_dir: impl Into<PathBuf>) -> Result<Self> {
|
||||||
|
let base_dir: PathBuf = base_dir.into();
|
||||||
|
|
||||||
|
let db_file = base_dir.join("keys.sqlite");
|
||||||
|
let manager = SqliteConnectionManager::file(db_file);
|
||||||
|
|
||||||
|
Self::new_internal(base_dir, manager)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn build_pool(manager: SqliteConnectionManager) -> Result<r2d2::Pool<SqliteConnectionManager>> {
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
pub struct LogConnectionCustomizer;
|
||||||
|
impl<E> r2d2::CustomizeConnection<rusqlite::Connection, E> for LogConnectionCustomizer {
|
||||||
|
fn on_acquire(&self, conn: &mut rusqlite::Connection) -> std::result::Result<(), E> {
|
||||||
|
println!("Acquiring sqlite pool connection: {:?}", conn);
|
||||||
|
conn.trace(Some(|query| {
|
||||||
|
println!("{}", query);
|
||||||
|
}));
|
||||||
|
std::result::Result::Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn on_release(&self, conn: rusqlite::Connection) {
|
||||||
|
println!("Releasing pool connection: {:?}", conn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(r2d2::Pool::builder()
|
||||||
|
.connection_customizer(Box::new(LogConnectionCustomizer {}))
|
||||||
|
.build(manager)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(test))]
|
||||||
|
fn build_pool(manager: SqliteConnectionManager) -> Result<r2d2::Pool<SqliteConnectionManager>> {
|
||||||
|
Ok(r2d2::Pool::builder().build(manager)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_internal(base_dir: PathBuf, manager: SqliteConnectionManager) -> Result<Self> {
|
||||||
|
let keys_dir_log = base_dir.join("log");
|
||||||
|
create_dir_all(&keys_dir_log)?;
|
||||||
|
|
||||||
|
let pool = Self::build_pool(manager)?;
|
||||||
|
let conn = pool.get()?;
|
||||||
|
conn.pragma_update(None, "journal_mode", "wal")?;
|
||||||
|
conn.pragma_update(None, "synchronous", "normal")?;
|
||||||
|
conn.pragma_update(None, "user_version", "1")?;
|
||||||
|
conn.execute_batch(
|
||||||
|
"
|
||||||
|
CREATE TABLE IF NOT EXISTS certs (
|
||||||
|
primary_fingerprint TEXT NOT NULL PRIMARY KEY,
|
||||||
|
full TEXT NOT NULL,
|
||||||
|
published TEXT,
|
||||||
|
published_not_armored BLOB,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
created_at TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS cert_identifiers (
|
||||||
|
fingerprint TEXT NOT NULL PRIMARY KEY,
|
||||||
|
keyid TEXT NOT NULL,
|
||||||
|
primary_fingerprint TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS emails (
|
||||||
|
email TEXT NOT NULL PRIMARY KEY,
|
||||||
|
domain TEXT NOT NULL,
|
||||||
|
wkd_hash TEXT NOT NULL,
|
||||||
|
primary_fingerprint TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(Self { pool })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self_cell! {
|
||||||
|
pub struct SqliteTransaction {
|
||||||
|
owner: r2d2::PooledConnection<SqliteConnectionManager>,
|
||||||
|
#[covariant]
|
||||||
|
dependent: Transaction,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SqliteTransaction {
|
||||||
|
fn start(pool: &r2d2::Pool<SqliteConnectionManager>) -> Result<Self> {
|
||||||
|
let conn = pool.get()?;
|
||||||
|
Ok(Self::new(conn, |c| {
|
||||||
|
Transaction::new_unchecked(c, rusqlite::TransactionBehavior::Deferred).unwrap()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tx(&self) -> &Transaction {
|
||||||
|
self.borrow_dependent()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_simple<T: rusqlite::types::FromSql>(
|
||||||
|
conn: &r2d2::PooledConnection<SqliteConnectionManager>,
|
||||||
|
query: &str,
|
||||||
|
params: &[&dyn ToSql],
|
||||||
|
) -> Option<T> {
|
||||||
|
conn.prepare_cached(query)
|
||||||
|
.expect("query must be valid")
|
||||||
|
.query_row(params, |row| row.get(0))
|
||||||
|
.optional()
|
||||||
|
.expect("query exection must not fail")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> DatabaseTransaction<'a> for SqliteTransaction {
|
||||||
|
type TempCert = Vec<u8>;
|
||||||
|
|
||||||
|
fn commit(self) -> Result<()> {
|
||||||
|
// we can't use tx().commit(), but we can cheat :)
|
||||||
|
self.tx().execute_batch("COMMIT")?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert> {
|
||||||
|
Ok(content.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn move_tmp_to_full(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let now = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.expect("Time went backwards")
|
||||||
|
.as_millis() as u64;
|
||||||
|
let file = String::from_utf8(file)?;
|
||||||
|
self.tx().execute(
|
||||||
|
"
|
||||||
|
INSERT INTO certs (primary_fingerprint, full, created_at, updated_at)
|
||||||
|
VALUES (?1, ?2, ?3, ?3)
|
||||||
|
ON CONFLICT(primary_fingerprint) DO UPDATE SET full=excluded.full, updated_at = excluded.updated_at
|
||||||
|
",
|
||||||
|
params![fpr, file, now],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn move_tmp_to_published(&self, file: Self::TempCert, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let now = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.expect("Time went backwards")
|
||||||
|
.as_millis() as u64;
|
||||||
|
let file = String::from_utf8(file)?;
|
||||||
|
self.tx().execute(
|
||||||
|
"UPDATE certs SET published = ?2, updated_at = ?3 WHERE primary_fingerprint = ?1",
|
||||||
|
params![fpr, file, now],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn move_tmp_to_published_wkd(
|
||||||
|
&self,
|
||||||
|
file: Option<Self::TempCert>,
|
||||||
|
fpr: &Fingerprint,
|
||||||
|
) -> Result<()> {
|
||||||
|
let now = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.expect("Time went backwards")
|
||||||
|
.as_millis() as u64;
|
||||||
|
self.tx().execute(
|
||||||
|
"UPDATE certs SET published_not_armored = ?2, updated_at = ?3 WHERE primary_fingerprint = ?1",
|
||||||
|
params![fpr, file, now],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_to_quarantine(&self, _fpr: &Fingerprint, _content: &[u8]) -> Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let now = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.expect("Time went backwards")
|
||||||
|
.as_millis() as u64;
|
||||||
|
let (domain, wkd_hash) = wkd::encode_wkd(email.as_str()).expect("email must be vaild");
|
||||||
|
self.tx().execute(
|
||||||
|
"
|
||||||
|
INSERT INTO emails (email, wkd_hash, domain, primary_fingerprint, created_at)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||||
|
ON CONFLICT(email) DO UPDATE SET primary_fingerprint = excluded.primary_fingerprint
|
||||||
|
",
|
||||||
|
params![email, domain, wkd_hash, fpr, now],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()> {
|
||||||
|
self.tx()
|
||||||
|
.execute(
|
||||||
|
"DELETE FROM emails WHERE email = ?1 AND primary_fingerprint = ?2",
|
||||||
|
params![email, fpr],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn link_fpr(&self, from_fpr: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
|
||||||
|
let now = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.expect("Time went backwards")
|
||||||
|
.as_millis() as u64;
|
||||||
|
self.tx().execute(
|
||||||
|
"
|
||||||
|
INSERT INTO cert_identifiers (fingerprint, keyid, primary_fingerprint, created_at)
|
||||||
|
VALUES (?1, ?2, ?3, ?4)
|
||||||
|
ON CONFLICT(fingerprint) DO UPDATE SET primary_fingerprint = excluded.primary_fingerprint;
|
||||||
|
",
|
||||||
|
params![
|
||||||
|
from_fpr,
|
||||||
|
KeyID::from(from_fpr),
|
||||||
|
primary_fpr,
|
||||||
|
now,
|
||||||
|
],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unlink_fpr(&self, from_fpr: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
|
||||||
|
self.tx().execute(
|
||||||
|
"DELETE FROM cert_identifiers WHERE primary_fingerprint = ?1 AND fingerprint = ?2 AND keyid = ?3",
|
||||||
|
params![primary_fpr, from_fpr, KeyID::from(from_fpr)],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Database<'a> for Sqlite {
|
||||||
|
type Transaction = SqliteTransaction;
|
||||||
|
|
||||||
|
fn transaction(&'a self) -> Result<Self::Transaction> {
|
||||||
|
SqliteTransaction::start(&self.pool)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_log_append(&self, _filename: &str, _fpr_primary: &Fingerprint) -> Result<()> {
|
||||||
|
// this is done implicitly via created_at in sqlite, no need to do anything here
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
|
||||||
|
use super::Query::*;
|
||||||
|
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
match term {
|
||||||
|
ByFingerprint(ref fp) => query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM cert_identifiers WHERE fingerprint = ?1",
|
||||||
|
params![fp],
|
||||||
|
),
|
||||||
|
ByKeyID(ref keyid) => query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM cert_identifiers WHERE keyid = ?1",
|
||||||
|
params![keyid],
|
||||||
|
),
|
||||||
|
ByEmail(ref email) => query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
|
||||||
|
params![email],
|
||||||
|
),
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lookup straight from certs table, no link resolution
|
||||||
|
fn by_fpr_full(&self, primary_fpr: &Fingerprint) -> Option<String> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT full FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![primary_fpr],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// XXX: rename! to by_primary_fpr_published
|
||||||
|
// Lookup the published cert straight from certs table, no link resolution
|
||||||
|
fn by_primary_fpr(&self, primary_fpr: &Fingerprint) -> Option<String> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![primary_fpr],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple::<Fingerprint>(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM cert_identifiers WHERE fingerprint = ?1",
|
||||||
|
params![fpr],
|
||||||
|
)
|
||||||
|
.and_then(|primary_fpr| {
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![&primary_fpr],
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn by_email(&self, email: &Email) -> Option<String> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple::<Fingerprint>(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
|
||||||
|
params![email],
|
||||||
|
)
|
||||||
|
.and_then(|primary_fpr| {
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![&primary_fpr],
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple::<Fingerprint>(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
|
||||||
|
params![email],
|
||||||
|
)
|
||||||
|
.and_then(|primary_fpr| {
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT published_not_armored FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![&primary_fpr],
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn by_kid(&self, kid: &KeyID) -> Option<String> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple::<Fingerprint>(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM cert_identifiers WHERE keyid = ?1",
|
||||||
|
params![kid],
|
||||||
|
)
|
||||||
|
.and_then(|primary_fpr| {
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![primary_fpr],
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn by_domain_and_hash_wkd(&self, domain: &str, wkd_hash: &str) -> Option<Vec<u8>> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
query_simple::<Fingerprint>(
|
||||||
|
&conn,
|
||||||
|
"SELECT primary_fingerprint FROM emails WHERE domain = ?1 AND wkd_hash = ?2",
|
||||||
|
params![domain, wkd_hash],
|
||||||
|
)
|
||||||
|
.and_then(|primary_fpr| {
|
||||||
|
query_simple(
|
||||||
|
&conn,
|
||||||
|
"SELECT published_not_armored FROM certs WHERE primary_fingerprint = ?1",
|
||||||
|
params![primary_fpr],
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_link_fpr(
|
||||||
|
&self,
|
||||||
|
fpr: &Fingerprint,
|
||||||
|
_fpr_target: &Fingerprint,
|
||||||
|
) -> Result<Option<Fingerprint>> {
|
||||||
|
// a desync here cannot happen structurally, so always return true here
|
||||||
|
Ok(Some(fpr.clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks the database for consistency.
|
||||||
|
///
|
||||||
|
/// Note that this operation may take a long time, and is
|
||||||
|
/// generally only useful for testing.
|
||||||
|
fn check_consistency(&self) -> Result<()> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
let mut stmt = conn.prepare("SELECT primary_fingerprint, published FROM certs")?;
|
||||||
|
let mut rows = stmt.query([])?;
|
||||||
|
while let Some(row) = rows.next()? {
|
||||||
|
let primary_fpr: Fingerprint = row.get(0)?;
|
||||||
|
let published: String = row.get(1)?;
|
||||||
|
let cert = Cert::from_str(&published).unwrap();
|
||||||
|
|
||||||
|
let mut cert_emails: Vec<Email> = cert
|
||||||
|
.userids()
|
||||||
|
.map(|uid| uid.userid().email2().unwrap())
|
||||||
|
.flatten()
|
||||||
|
.map(|email| Email::from_str(&email))
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
let mut db_emails: Vec<Email> = conn
|
||||||
|
.prepare("SELECT email FROM emails WHERE primary_fingerprint = ?1")?
|
||||||
|
.query_map([&primary_fpr], |row| row.get::<_, String>(0))
|
||||||
|
.unwrap()
|
||||||
|
.map(|email| Email::from_str(&email.unwrap()))
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
cert_emails.sort();
|
||||||
|
cert_emails.dedup();
|
||||||
|
db_emails.sort();
|
||||||
|
if cert_emails != db_emails {
|
||||||
|
return Err(format_err!(
|
||||||
|
"{:?} does not have correct emails indexed, cert ${:?} db {:?}",
|
||||||
|
&primary_fpr,
|
||||||
|
cert_emails,
|
||||||
|
db_emails,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let policy = &POLICY;
|
||||||
|
let mut cert_fprs: Vec<Fingerprint> = cert
|
||||||
|
.keys()
|
||||||
|
.with_policy(policy, None)
|
||||||
|
.for_certification()
|
||||||
|
.for_signing()
|
||||||
|
.map(|amalgamation| amalgamation.key().fingerprint())
|
||||||
|
.map(Fingerprint::try_from)
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
let mut db_fprs: Vec<Fingerprint> = conn
|
||||||
|
.prepare("SELECT fingerprint FROM cert_identifiers WHERE primary_fingerprint = ?1")?
|
||||||
|
.query_map([&primary_fpr], |row| row.get::<_, Fingerprint>(0))
|
||||||
|
.unwrap()
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
cert_fprs.sort();
|
||||||
|
db_fprs.sort();
|
||||||
|
if cert_fprs != db_fprs {
|
||||||
|
return Err(format_err!(
|
||||||
|
"{:?} does not have correct fingerprints indexed, cert ${:?} db {:?}",
|
||||||
|
&primary_fpr,
|
||||||
|
cert_fprs,
|
||||||
|
db_fprs,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_last_log_entry(&self) -> Result<Fingerprint> {
|
||||||
|
let conn = self.pool.get().unwrap();
|
||||||
|
Ok(conn.query_row(
|
||||||
|
"SELECT primary_fingerprint FROM certs ORDER BY updated_at DESC LIMIT 1",
|
||||||
|
[],
|
||||||
|
|row| row.get::<_, Fingerprint>(0),
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use openpgp::cert::CertBuilder;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use test;
|
||||||
|
|
||||||
|
const DATA_1: &str = "data, content doesn't matter";
|
||||||
|
const DATA_2: &str = "other data, content doesn't matter";
|
||||||
|
const FINGERPRINT_1: &str = "D4AB192964F76A7F8F8A9B357BD18320DEADFA11";
|
||||||
|
|
||||||
|
fn open_db() -> (TempDir, Sqlite) {
|
||||||
|
let tmpdir = TempDir::new().unwrap();
|
||||||
|
let db = Sqlite::new_file(tmpdir.path()).unwrap();
|
||||||
|
(tmpdir, db)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn new() {
|
||||||
|
let (_tmp_dir, db) = open_db();
|
||||||
|
let k1 = CertBuilder::new()
|
||||||
|
.add_userid("a@invalid.example.org")
|
||||||
|
.generate()
|
||||||
|
.unwrap()
|
||||||
|
.0;
|
||||||
|
let k2 = CertBuilder::new()
|
||||||
|
.add_userid("b@invalid.example.org")
|
||||||
|
.generate()
|
||||||
|
.unwrap()
|
||||||
|
.0;
|
||||||
|
let k3 = CertBuilder::new()
|
||||||
|
.add_userid("c@invalid.example.org")
|
||||||
|
.generate()
|
||||||
|
.unwrap()
|
||||||
|
.0;
|
||||||
|
|
||||||
|
assert!(db.merge(k1).unwrap().into_tpk_status().email_status.len() > 0);
|
||||||
|
assert!(
|
||||||
|
db.merge(k2.clone())
|
||||||
|
.unwrap()
|
||||||
|
.into_tpk_status()
|
||||||
|
.email_status
|
||||||
|
.len()
|
||||||
|
> 0
|
||||||
|
);
|
||||||
|
assert!(!db.merge(k2).unwrap().into_tpk_status().email_status.len() > 0);
|
||||||
|
assert!(
|
||||||
|
db.merge(k3.clone())
|
||||||
|
.unwrap()
|
||||||
|
.into_tpk_status()
|
||||||
|
.email_status
|
||||||
|
.len()
|
||||||
|
> 0
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
!db.merge(k3.clone())
|
||||||
|
.unwrap()
|
||||||
|
.into_tpk_status()
|
||||||
|
.email_status
|
||||||
|
.len()
|
||||||
|
> 0
|
||||||
|
);
|
||||||
|
assert!(!db.merge(k3).unwrap().into_tpk_status().email_status.len() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn xx_by_fpr_full() -> Result<()> {
|
||||||
|
let (_tmp_dir, db) = open_db();
|
||||||
|
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||||
|
|
||||||
|
let lock = db.transaction().unwrap();
|
||||||
|
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||||
|
lock.link_fpr(&fpr1, &fpr1)?;
|
||||||
|
lock.commit().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(db.by_fpr_full(&fpr1).expect("must find key"), DATA_1);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn xx_by_kid() -> Result<()> {
|
||||||
|
let (_tmp_dir, db) = open_db();
|
||||||
|
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||||
|
|
||||||
|
let lock = db.transaction().unwrap();
|
||||||
|
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||||
|
lock.move_tmp_to_published(lock.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
|
||||||
|
lock.link_fpr(&fpr1, &fpr1)?;
|
||||||
|
lock.commit().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(db.by_kid(&fpr1.into()).expect("must find key"), DATA_2);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn xx_by_primary_fpr() -> Result<()> {
|
||||||
|
let (_tmp_dir, db) = open_db();
|
||||||
|
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||||
|
|
||||||
|
let lock = db.transaction().unwrap();
|
||||||
|
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||||
|
lock.move_tmp_to_published(lock.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
|
||||||
|
lock.commit().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(db.by_primary_fpr(&fpr1).expect("must find key"), DATA_2);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uid_verification() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_uid_verification(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uid_deletion() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_uid_deletion(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn subkey_lookup() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_subkey_lookup(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn kid_lookup() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_kid_lookup(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn upload_revoked_tpk() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_upload_revoked_tpk(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uid_revocation() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_uid_revocation(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn regenerate() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_regenerate(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn key_reupload() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_reupload(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uid_replacement() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_uid_replacement(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uid_unlinking() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_unlink_uid(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn same_email_1() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_same_email_1(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn same_email_2() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_same_email_2(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn same_email_3() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_same_email_3(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn same_email_4() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_same_email_4(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_selfsig() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_no_selfsig(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bad_uids() {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::test_bad_uids(&mut db);
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reverse_fingerprint_to_path() {
|
||||||
|
let tmpdir = TempDir::new().unwrap();
|
||||||
|
let db = Sqlite::new_file(tmpdir.path()).unwrap();
|
||||||
|
|
||||||
|
let _fp: Fingerprint = "CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
|
||||||
|
|
||||||
|
// XXX: fixme
|
||||||
|
//assert_eq!(Sqlite::path_to_fingerprint(&db.link_by_fingerprint(&fp)),
|
||||||
|
// Some(fp.clone()));
|
||||||
|
db.check_consistency().expect("inconsistent database");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn attested_key_signatures() -> Result<()> {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::attested_key_signatures(&mut db)?;
|
||||||
|
db.check_consistency()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn nonexportable_sigs() -> Result<()> {
|
||||||
|
let (_tmp_dir, mut db) = open_db();
|
||||||
|
test::nonexportable_sigs(&mut db)?;
|
||||||
|
db.check_consistency()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
@@ -26,8 +26,6 @@ use openpgp::{
|
|||||||
types::RevocationStatus,
|
types::RevocationStatus,
|
||||||
Cert, Packet,
|
Cert, Packet,
|
||||||
};
|
};
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
|
||||||
use types::{Email, Fingerprint, KeyID};
|
use types::{Email, Fingerprint, KeyID};
|
||||||
use Database;
|
use Database;
|
||||||
use Query;
|
use Query;
|
||||||
@@ -37,17 +35,19 @@ use openpgp_utils::POLICY;
|
|||||||
use EmailAddressStatus;
|
use EmailAddressStatus;
|
||||||
use TpkStatus;
|
use TpkStatus;
|
||||||
|
|
||||||
fn check_mail_none(db: &impl Database, email: &Email) {
|
use crate::DatabaseTransaction;
|
||||||
|
|
||||||
|
fn check_mail_none<'a>(db: &impl Database<'a>, email: &Email) {
|
||||||
assert!(db.by_email(email).is_none());
|
assert!(db.by_email(email).is_none());
|
||||||
assert!(db.by_email_wkd(email).is_none());
|
assert!(db.by_email_wkd(email).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_mail_some(db: &impl Database, email: &Email) {
|
fn check_mail_some<'a>(db: &impl Database<'a>, email: &Email) {
|
||||||
assert!(db.by_email(email).is_some());
|
assert!(db.by_email(email).is_some());
|
||||||
assert!(db.by_email_wkd(email).is_some());
|
assert!(db.by_email_wkd(email).is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
pub fn test_uid_verification<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
let str_uid2 = "Test B <test_b@example.com>";
|
let str_uid2 = "Test B <test_b@example.com>";
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
@@ -64,7 +64,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
@@ -169,7 +169,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -275,7 +275,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
|||||||
}*/
|
}*/
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
|
pub fn test_regenerate<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
.add_userid(str_uid1)
|
.add_userid(str_uid1)
|
||||||
@@ -303,7 +303,7 @@ pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
db.merge(tpk).unwrap().into_tpk_status();
|
db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
|
|
||||||
db.regenerate_links(&fpr).unwrap();
|
db.regenerate_links(&fpr).unwrap();
|
||||||
check_mail_none(db, &email1);
|
check_mail_none(db, &email1);
|
||||||
@@ -313,23 +313,35 @@ pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
db.set_email_published(&fpr, &email1).unwrap();
|
db.set_email_published(&fpr, &email1).unwrap();
|
||||||
|
|
||||||
db.unlink_email(&email1, &fpr).unwrap();
|
{
|
||||||
|
let lock = db.transaction().unwrap();
|
||||||
|
lock.unlink_email(&email1, &fpr).unwrap();
|
||||||
|
lock.commit().unwrap();
|
||||||
|
}
|
||||||
assert!(db.check_consistency().is_err());
|
assert!(db.check_consistency().is_err());
|
||||||
db.regenerate_links(&fpr).unwrap();
|
db.regenerate_links(&fpr).unwrap();
|
||||||
assert!(db.check_consistency().is_ok());
|
db.check_consistency().expect("consistency must return Ok");
|
||||||
|
|
||||||
db.unlink_fpr(&fpr, &fpr).unwrap();
|
{
|
||||||
|
let lock = db.transaction().unwrap();
|
||||||
|
lock.unlink_fpr(&fpr, &fpr).unwrap();
|
||||||
|
lock.commit().unwrap();
|
||||||
|
}
|
||||||
assert!(db.check_consistency().is_err());
|
assert!(db.check_consistency().is_err());
|
||||||
db.regenerate_links(&fpr).unwrap();
|
db.regenerate_links(&fpr).unwrap();
|
||||||
assert!(db.check_consistency().is_ok());
|
db.check_consistency().expect("consistency must return Ok");
|
||||||
|
|
||||||
db.unlink_fpr(&fpr_sign, &fpr).unwrap();
|
{
|
||||||
|
let lock = db.transaction().unwrap();
|
||||||
|
lock.unlink_fpr(&fpr_sign, &fpr).unwrap();
|
||||||
|
lock.commit().unwrap();
|
||||||
|
}
|
||||||
assert!(db.check_consistency().is_err());
|
assert!(db.check_consistency().is_err());
|
||||||
db.regenerate_links(&fpr).unwrap();
|
db.regenerate_links(&fpr).unwrap();
|
||||||
assert!(db.check_consistency().is_ok());
|
db.check_consistency().expect("consistency must return Ok");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
|
pub fn test_reupload<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
let str_uid2 = "Test B <test_b@example.com>";
|
let str_uid2 = "Test B <test_b@example.com>";
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
@@ -344,7 +356,7 @@ pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
db.merge(tpk.clone()).unwrap().into_tpk_status();
|
db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
|
|
||||||
// verify 1st uid
|
// verify 1st uid
|
||||||
db.set_email_published(&fpr, &email1).unwrap();
|
db.set_email_published(&fpr, &email1).unwrap();
|
||||||
@@ -367,7 +379,7 @@ pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
|
|||||||
assert!(db.by_email(&email2).is_none() ^ db.by_email(&email1).is_none());
|
assert!(db.by_email(&email2).is_none() ^ db.by_email(&email1).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
|
pub fn test_uid_replacement<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
let tpk1 = CertBuilder::new()
|
let tpk1 = CertBuilder::new()
|
||||||
.add_userid(str_uid1)
|
.add_userid(str_uid1)
|
||||||
@@ -390,9 +402,9 @@ pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload both keys
|
// upload both keys
|
||||||
db.merge(tpk1).unwrap().into_tpk_status();
|
db.merge(tpk1).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr1);
|
check_log_entry(db, &fpr1);
|
||||||
db.merge(tpk2).unwrap().into_tpk_status();
|
db.merge(tpk2).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr2);
|
check_log_entry(db, &fpr2);
|
||||||
|
|
||||||
// verify 1st uid
|
// verify 1st uid
|
||||||
db.set_email_published(&fpr1, &email1).unwrap();
|
db.set_email_published(&fpr1, &email1).unwrap();
|
||||||
@@ -445,7 +457,7 @@ pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
|
pub fn test_uid_deletion<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
let str_uid2 = "Test B <test_b@example.com>";
|
let str_uid2 = "Test B <test_b@example.com>";
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
@@ -463,7 +475,7 @@ pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key and verify uids
|
// upload key and verify uids
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -509,7 +521,7 @@ pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
|
|||||||
assert_eq!(tpk.keys().subkeys().count(), n_subkeys);
|
assert_eq!(tpk.keys().subkeys().count(), n_subkeys);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_subkey_lookup(db: &mut impl Database, _log_path: &Path) {
|
pub fn test_subkey_lookup<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
.add_userid("Testy <test@example.com>")
|
.add_userid("Testy <test@example.com>")
|
||||||
.add_signing_subkey()
|
.add_signing_subkey()
|
||||||
@@ -549,7 +561,7 @@ pub fn test_subkey_lookup(db: &mut impl Database, _log_path: &Path) {
|
|||||||
assert_eq!(raw1, raw2);
|
assert_eq!(raw1, raw2);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_kid_lookup(db: &mut impl Database, _log_path: &Path) {
|
pub fn test_kid_lookup<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
.add_userid("Testy <test@example.com>")
|
.add_userid("Testy <test@example.com>")
|
||||||
.add_signing_subkey()
|
.add_signing_subkey()
|
||||||
@@ -588,7 +600,7 @@ pub fn test_kid_lookup(db: &mut impl Database, _log_path: &Path) {
|
|||||||
assert_eq!(raw1, raw2);
|
assert_eq!(raw1, raw2);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
|
pub fn test_upload_revoked_tpk<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
let str_uid2 = "Test B <test_b@example.com>";
|
let str_uid2 = "Test B <test_b@example.com>";
|
||||||
let (mut tpk, revocation) = CertBuilder::new()
|
let (mut tpk, revocation) = CertBuilder::new()
|
||||||
@@ -616,7 +628,7 @@ pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: true,
|
is_revoked: true,
|
||||||
@@ -633,7 +645,7 @@ pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
|
|||||||
check_mail_none(db, &email2);
|
check_mail_none(db, &email2);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_uid_revocation(db: &mut impl Database, log_path: &Path) {
|
pub fn test_uid_revocation<'a>(db: &'a mut impl Database<'a>) {
|
||||||
use std::{thread, time};
|
use std::{thread, time};
|
||||||
|
|
||||||
let str_uid1 = "Test A <test_a@example.com>";
|
let str_uid1 = "Test A <test_a@example.com>";
|
||||||
@@ -651,7 +663,7 @@ pub fn test_uid_revocation(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -804,7 +816,7 @@ pub fn test_uid_revocation_fake(db: &mut D) {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
pub fn test_unlink_uid(db: &mut impl Database, log_path: &Path) {
|
pub fn test_unlink_uid<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let uid = "Test A <test_a@example.com>";
|
let uid = "Test A <test_a@example.com>";
|
||||||
let email = Email::from_str(uid).unwrap();
|
let email = Email::from_str(uid).unwrap();
|
||||||
|
|
||||||
@@ -853,7 +865,7 @@ pub fn test_unlink_uid(db: &mut impl Database, log_path: &Path) {
|
|||||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||||
let tpk_evil = tpk_evil.insert_packets(sig).unwrap();
|
let tpk_evil = tpk_evil.insert_packets(sig).unwrap();
|
||||||
let tpk_status = db.merge(tpk_evil).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk_evil).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr_evil);
|
check_log_entry(db, &fpr_evil);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -883,7 +895,7 @@ pub fn get_userids(armored: &str) -> Vec<UserID> {
|
|||||||
|
|
||||||
// If multiple keys have the same email address, make sure things work
|
// If multiple keys have the same email address, make sure things work
|
||||||
// as expected.
|
// as expected.
|
||||||
pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
pub fn test_same_email_1<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "A <test@example.com>";
|
let str_uid1 = "A <test@example.com>";
|
||||||
let tpk1 = CertBuilder::new()
|
let tpk1 = CertBuilder::new()
|
||||||
.add_userid(str_uid1)
|
.add_userid(str_uid1)
|
||||||
@@ -906,7 +918,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload keys.
|
// upload keys.
|
||||||
let tpk_status1 = db.merge(tpk1).unwrap().into_tpk_status();
|
let tpk_status1 = db.merge(tpk1).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr1);
|
check_log_entry(db, &fpr1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -916,7 +928,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
|||||||
tpk_status1
|
tpk_status1
|
||||||
);
|
);
|
||||||
let tpk_status2 = db.merge(tpk2.clone()).unwrap().into_tpk_status();
|
let tpk_status2 = db.merge(tpk2.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr2);
|
check_log_entry(db, &fpr2);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -984,7 +996,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
|||||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||||
let tpk2 = tpk2.insert_packets(sig).unwrap();
|
let tpk2 = tpk2.insert_packets(sig).unwrap();
|
||||||
let tpk_status2 = db.merge(tpk2).unwrap().into_tpk_status();
|
let tpk_status2 = db.merge(tpk2).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr2);
|
check_log_entry(db, &fpr2);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -1003,7 +1015,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
|||||||
// sure things still work. We do this twice (see above), to
|
// sure things still work. We do this twice (see above), to
|
||||||
// make sure the order isn't relevant when revoking one user id
|
// make sure the order isn't relevant when revoking one user id
|
||||||
// but leaving the other.
|
// but leaving the other.
|
||||||
pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
pub fn test_same_email_2<'a>(db: &'a mut impl Database<'a>) {
|
||||||
use std::{thread, time};
|
use std::{thread, time};
|
||||||
|
|
||||||
let str_uid1 = "A <test@example.com>";
|
let str_uid1 = "A <test@example.com>";
|
||||||
@@ -1021,7 +1033,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
|
|
||||||
// verify uid1
|
// verify uid1
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -1074,7 +1086,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
|||||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||||
let tpk = tpk.insert_packets(sig).unwrap();
|
let tpk = tpk.insert_packets(sig).unwrap();
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -1092,7 +1104,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
|||||||
// sure things still work. We do this twice (see above), to
|
// sure things still work. We do this twice (see above), to
|
||||||
// make sure the order isn't relevant when revoking one user id
|
// make sure the order isn't relevant when revoking one user id
|
||||||
// but leaving the other.
|
// but leaving the other.
|
||||||
pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
pub fn test_same_email_3<'a>(db: &'a mut impl Database<'a>) {
|
||||||
use std::{thread, time};
|
use std::{thread, time};
|
||||||
|
|
||||||
let str_uid1 = "A <test@example.com>";
|
let str_uid1 = "A <test@example.com>";
|
||||||
@@ -1110,7 +1122,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
|
|
||||||
// verify uid1
|
// verify uid1
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -1163,7 +1175,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
|||||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||||
let tpk = tpk.insert_packets(sig).unwrap();
|
let tpk = tpk.insert_packets(sig).unwrap();
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -1186,7 +1198,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// If a key has a verified email address, make sure newly uploaded user
|
// If a key has a verified email address, make sure newly uploaded user
|
||||||
// ids with the same email are published as well.
|
// ids with the same email are published as well.
|
||||||
pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
pub fn test_same_email_4<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "A <test@example.com>";
|
let str_uid1 = "A <test@example.com>";
|
||||||
let str_uid2 = "B <test@example.com>";
|
let str_uid2 = "B <test@example.com>";
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
@@ -1205,7 +1217,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
|||||||
|
|
||||||
// upload key
|
// upload key
|
||||||
let tpk_status = db.merge(cert_uid_1).unwrap().into_tpk_status();
|
let tpk_status = db.merge(cert_uid_1).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
db.set_email_published(&fpr, &tpk_status.email_status[0].0)
|
db.set_email_published(&fpr, &tpk_status.email_status[0].0)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -1214,7 +1226,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let tpk_status = db.merge(cert_uid_2).unwrap().into_tpk_status();
|
let tpk_status = db.merge(cert_uid_2).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -1231,7 +1243,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_bad_uids(db: &mut impl Database, log_path: &Path) {
|
pub fn test_bad_uids<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "foo@bar.example <foo@bar.example>";
|
let str_uid1 = "foo@bar.example <foo@bar.example>";
|
||||||
let str_uid2 = "A <test@example.com>";
|
let str_uid2 = "A <test@example.com>";
|
||||||
let str_uid3 = "lalalalaaaaa";
|
let str_uid3 = "lalalalaaaaa";
|
||||||
@@ -1247,7 +1259,7 @@ pub fn test_bad_uids(db: &mut impl Database, log_path: &Path) {
|
|||||||
let email2 = Email::from_str(str_uid2).unwrap();
|
let email2 = Email::from_str(str_uid2).unwrap();
|
||||||
|
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -1293,7 +1305,7 @@ fn cert_without_signature_at(cert: Cert, mut index: i32) -> Cert {
|
|||||||
Cert::from_packets(packets).unwrap()
|
Cert::from_packets(packets).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
|
pub fn test_unsigned_uids<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let str_uid1 = "test1@example.com";
|
let str_uid1 = "test1@example.com";
|
||||||
let str_uid2 = "test2@example.com";
|
let str_uid2 = "test2@example.com";
|
||||||
let tpk = CertBuilder::new()
|
let tpk = CertBuilder::new()
|
||||||
@@ -1308,7 +1320,7 @@ pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
|
|||||||
let tpk = cert_without_signature_at(tpk, 1);
|
let tpk = cert_without_signature_at(tpk, 1);
|
||||||
|
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: false,
|
is_revoked: false,
|
||||||
@@ -1319,7 +1331,7 @@ pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
|
pub fn test_no_selfsig<'a>(db: &'a mut impl Database<'a>) {
|
||||||
let (mut tpk, revocation) = CertBuilder::new().generate().unwrap();
|
let (mut tpk, revocation) = CertBuilder::new().generate().unwrap();
|
||||||
let fpr = Fingerprint::try_from(tpk.fingerprint()).unwrap();
|
let fpr = Fingerprint::try_from(tpk.fingerprint()).unwrap();
|
||||||
|
|
||||||
@@ -1329,7 +1341,7 @@ pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
|
|||||||
// with revocation, it's ok
|
// with revocation, it's ok
|
||||||
tpk = tpk.insert_packets(revocation).unwrap();
|
tpk = tpk.insert_packets(revocation).unwrap();
|
||||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||||
check_log_entry(log_path, &fpr);
|
check_log_entry(db, &fpr);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TpkStatus {
|
TpkStatus {
|
||||||
is_revoked: true,
|
is_revoked: true,
|
||||||
@@ -1341,7 +1353,7 @@ pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Makes sure that attested key signatures are correctly handled.
|
/// Makes sure that attested key signatures are correctly handled.
|
||||||
pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Result<()> {
|
pub fn attested_key_signatures<'a>(db: &'a mut impl Database<'a>) -> Result<()> {
|
||||||
use openpgp::types::*;
|
use openpgp::types::*;
|
||||||
use std::time::{Duration, SystemTime};
|
use std::time::{Duration, SystemTime};
|
||||||
let t0 = SystemTime::now() - Duration::new(5 * 60, 0);
|
let t0 = SystemTime::now() - Duration::new(5 * 60, 0);
|
||||||
@@ -1390,7 +1402,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
|||||||
|
|
||||||
// Now for the test. First, import Bob's cert as is.
|
// Now for the test. First, import Bob's cert as is.
|
||||||
db.merge(bob.clone())?;
|
db.merge(bob.clone())?;
|
||||||
check_log_entry(log_path, &bobs_fp);
|
check_log_entry(db, &bobs_fp);
|
||||||
|
|
||||||
// Confirm the email so that we can inspect the userid component.
|
// Confirm the email so that we can inspect the userid component.
|
||||||
db.set_email_published(&bobs_fp, &Email::from_str("bob@bar.com")?)?;
|
db.set_email_published(&bobs_fp, &Email::from_str("bob@bar.com")?)?;
|
||||||
@@ -1399,7 +1411,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
|||||||
// certification is stripped.
|
// certification is stripped.
|
||||||
let bob = bob.insert_packets(vec![alice_certifies_bob.clone()])?;
|
let bob = bob.insert_packets(vec![alice_certifies_bob.clone()])?;
|
||||||
db.merge(bob.clone())?;
|
db.merge(bob.clone())?;
|
||||||
check_log_entry(log_path, &bobs_fp);
|
check_log_entry(db, &bobs_fp);
|
||||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
|
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
|
||||||
@@ -1408,7 +1420,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
|||||||
// certification is now included.
|
// certification is now included.
|
||||||
let bob_attested = bob.clone().insert_packets(vec![attestation])?;
|
let bob_attested = bob.clone().insert_packets(vec![attestation])?;
|
||||||
db.merge(bob_attested.clone())?;
|
db.merge(bob_attested.clone())?;
|
||||||
check_log_entry(log_path, &bobs_fp);
|
check_log_entry(db, &bobs_fp);
|
||||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
|
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
|
||||||
@@ -1434,7 +1446,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
|||||||
// Make a random merge with Bob's unattested cert, demonstrating
|
// Make a random merge with Bob's unattested cert, demonstrating
|
||||||
// that the attestation still works.
|
// that the attestation still works.
|
||||||
db.merge(bob.clone())?;
|
db.merge(bob.clone())?;
|
||||||
check_log_entry(log_path, &bobs_fp);
|
check_log_entry(db, &bobs_fp);
|
||||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
|
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
|
||||||
@@ -1471,7 +1483,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
|||||||
);
|
);
|
||||||
|
|
||||||
db.merge(bob)?;
|
db.merge(bob)?;
|
||||||
check_log_entry(log_path, &bobs_fp);
|
check_log_entry(db, &bobs_fp);
|
||||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
|
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
|
||||||
@@ -1497,10 +1509,9 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_log_entry(log_path: &Path, fpr: &Fingerprint) {
|
fn check_log_entry<'a>(db: &impl Database<'a>, fpr: &Fingerprint) {
|
||||||
let log_data = fs::read_to_string(log_path).unwrap();
|
let last_entry = db.get_last_log_entry().expect("must have log entry");
|
||||||
let last_entry = log_data.lines().last().unwrap().split(' ').last().unwrap();
|
assert_eq!(last_entry.to_string(), fpr.to_string());
|
||||||
assert_eq!(last_entry, fpr.to_string());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
|
fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
|
||||||
@@ -1514,7 +1525,7 @@ fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
|
|||||||
Cert::from_packets(packets).unwrap()
|
Cert::from_packets(packets).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nonexportable_sigs(db: &mut impl Database, _log_path: &Path) -> Result<()> {
|
pub fn nonexportable_sigs<'a>(db: &'a mut impl Database<'a>) -> Result<()> {
|
||||||
let str_uid1 = "Test A <test_a@example.org>";
|
let str_uid1 = "Test A <test_a@example.org>";
|
||||||
let str_uid2 = "Test B <test_b@example.org>";
|
let str_uid2 = "Test B <test_b@example.org>";
|
||||||
|
|
||||||
|
@@ -4,7 +4,13 @@ use std::result;
|
|||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use anyhow::Error;
|
use anyhow::Error;
|
||||||
|
use hex::ToHex;
|
||||||
use openpgp::packet::UserID;
|
use openpgp::packet::UserID;
|
||||||
|
use r2d2_sqlite::rusqlite::types::FromSql;
|
||||||
|
use r2d2_sqlite::rusqlite::types::FromSqlError;
|
||||||
|
use r2d2_sqlite::rusqlite::types::FromSqlResult;
|
||||||
|
use r2d2_sqlite::rusqlite::types::ToSql;
|
||||||
|
use r2d2_sqlite::rusqlite::types::ValueRef;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use Result;
|
use Result;
|
||||||
|
|
||||||
@@ -26,6 +32,22 @@ impl Email {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FromSql for Email {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
value
|
||||||
|
.as_str()
|
||||||
|
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Email {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||||
|
Ok(rusqlite::types::ToSqlOutput::Borrowed(
|
||||||
|
rusqlite::types::ValueRef::Text(self.0.as_bytes()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TryFrom<&UserID> for Email {
|
impl TryFrom<&UserID> for Email {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
@@ -77,9 +99,25 @@ impl FromStr for Email {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct Fingerprint([u8; 20]);
|
pub struct Fingerprint([u8; 20]);
|
||||||
|
|
||||||
|
impl FromSql for Fingerprint {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
value
|
||||||
|
.as_str()
|
||||||
|
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Fingerprint {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||||
|
Ok(rusqlite::types::ToSqlOutput::Owned(
|
||||||
|
rusqlite::types::Value::Text(self.to_string()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
@@ -94,7 +132,6 @@ impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
|||||||
|
|
||||||
impl fmt::Display for Fingerprint {
|
impl fmt::Display for Fingerprint {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
use hex::ToHex;
|
|
||||||
self.0.write_hex_upper(f)
|
self.0.write_hex_upper(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -137,6 +174,22 @@ impl FromStr for Fingerprint {
|
|||||||
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
pub struct KeyID([u8; 8]);
|
pub struct KeyID([u8; 8]);
|
||||||
|
|
||||||
|
impl FromSql for KeyID {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
value
|
||||||
|
.as_str()
|
||||||
|
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for KeyID {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||||
|
Ok(rusqlite::types::ToSqlOutput::Owned(
|
||||||
|
rusqlite::types::Value::Text(self.to_string()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
|
impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
@@ -169,7 +222,6 @@ impl From<Fingerprint> for KeyID {
|
|||||||
|
|
||||||
impl fmt::Display for KeyID {
|
impl fmt::Display for KeyID {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
use hex::ToHex;
|
|
||||||
self.0.write_hex_upper(f)
|
self.0.write_hex_upper(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
36
default.nix
Normal file
36
default.nix
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{ lib, rustPlatform, sqlite, openssl, gettext, pkg-config }:
|
||||||
|
|
||||||
|
rustPlatform.buildRustPackage rec {
|
||||||
|
pname = "hagrid";
|
||||||
|
version = "1.0.0";
|
||||||
|
|
||||||
|
src = ./.;
|
||||||
|
cargoLock = {
|
||||||
|
lockFile = ./Cargo.lock;
|
||||||
|
outputHashes = {
|
||||||
|
"rocket_i18n-0.5.0" = "sha256-EbUE8Z3TQBnDnptl9qWK6JvsACCgP7EXTxcA7pouYbc=";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
cp -r dist $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkg-config
|
||||||
|
gettext
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
sqlite
|
||||||
|
openssl
|
||||||
|
];
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "A verifying keyserver";
|
||||||
|
homepage = "https://gitlab.com/keys.openpgp.org/hagrid";
|
||||||
|
license = with licenses; [ gpl3 ];
|
||||||
|
maintainers = with maintainers; [ valodim ];
|
||||||
|
platforms = platforms.all;
|
||||||
|
};
|
||||||
|
}
|
@@ -1,10 +1,10 @@
|
|||||||
FROM rustlang/rust:nightly
|
FROM rust:bullseye
|
||||||
RUN apt update -qy
|
RUN apt update -qy
|
||||||
RUN apt install -qy libclang-dev build-essential pkg-config clang libssl-dev gettext zsh
|
RUN apt install -qy libclang-dev build-essential pkg-config clang libssl-dev libsqlite3-dev gettext zsh
|
||||||
|
|
||||||
RUN useradd -u 1000 -d /home/user user && mkdir /home/user && chown user:user /home/user
|
RUN useradd -u 1000 -d /home/user user && mkdir /home/user && chown user:user /home/user
|
||||||
USER user
|
USER user
|
||||||
|
|
||||||
RUN rustup install 1.70.0
|
RUN rustup install 1.82.0
|
||||||
|
|
||||||
WORKDIR /home/user/src
|
WORKDIR /home/user/src
|
||||||
|
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1739357830,
|
||||||
|
"narHash": "sha256-9xim3nJJUFbVbJCz48UP4fGRStVW5nv4VdbimbKxJ3I=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "0ff09db9d034a04acd4e8908820ba0b410d7a33a",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-24.11",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"utils": "utils"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1731533236,
|
||||||
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
16
flake.nix
Normal file
16
flake.nix
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
|
||||||
|
utils.url = "github:numtide/flake-utils";
|
||||||
|
};
|
||||||
|
outputs = { self, nixpkgs, utils }:
|
||||||
|
utils.lib.eachDefaultSystem (system: let
|
||||||
|
pkgs = nixpkgs.legacyPackages."${system}";
|
||||||
|
in rec {
|
||||||
|
packages.hagrid = pkgs.callPackage ./. { };
|
||||||
|
packages.default = packages.hagrid;
|
||||||
|
}) // {
|
||||||
|
overlays.hagrid = (final: prev: { hagrid = self.packages."${final.system}".hagrid; });
|
||||||
|
overlays.default = self.overlays.hagrid;
|
||||||
|
};
|
||||||
|
}
|
@@ -36,12 +36,12 @@ location /vks {
|
|||||||
limit_req zone=search_fpr_keyid burst=1000 nodelay;
|
limit_req zone=search_fpr_keyid burst=1000 nodelay;
|
||||||
|
|
||||||
error_page 404 /errors-static/404-by-fpr.htm;
|
error_page 404 /errors-static/404-by-fpr.htm;
|
||||||
default_type application/pgp-keys;
|
# default_type application/pgp-keys;
|
||||||
add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
# add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
||||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||||
add_header 'Cache-Control' 'no-cache' always;
|
add_header 'Cache-Control' 'no-cache' always;
|
||||||
etag off;
|
etag off;
|
||||||
try_files /keys/links/by-fpr/$1/$2/$3 =404;
|
proxy_pass http://127.0.0.1:8080;
|
||||||
}
|
}
|
||||||
|
|
||||||
location ~ ^/vks/v1/by-keyid/(?:0x)?([^/][^/])([^/][^/])(.*)$ {
|
location ~ ^/vks/v1/by-keyid/(?:0x)?([^/][^/])([^/][^/])(.*)$ {
|
||||||
@@ -49,12 +49,12 @@ location /vks {
|
|||||||
error_page 429 /errors-static/429-rate-limit-vks-fpr.htm;
|
error_page 429 /errors-static/429-rate-limit-vks-fpr.htm;
|
||||||
|
|
||||||
error_page 404 /errors-static/404-by-keyid.htm;
|
error_page 404 /errors-static/404-by-keyid.htm;
|
||||||
default_type application/pgp-keys;
|
# default_type application/pgp-keys;
|
||||||
add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
# add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
||||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||||
add_header 'Cache-Control' 'no-cache' always;
|
add_header 'Cache-Control' 'no-cache' always;
|
||||||
etag off;
|
etag off;
|
||||||
try_files /keys/links/by-keyid/$1/$2/$3 =404;
|
proxy_pass http://127.0.0.1:8080;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /vks/v1/by-email/ {
|
location /vks/v1/by-email/ {
|
||||||
@@ -110,12 +110,12 @@ location /.well-known/openpgpkey {
|
|||||||
error_page 429 /errors-static/429-rate-limit-vks-email.htm;
|
error_page 429 /errors-static/429-rate-limit-vks-email.htm;
|
||||||
|
|
||||||
error_page 404 /errors-static/404-wkd.htm;
|
error_page 404 /errors-static/404-wkd.htm;
|
||||||
default_type application/octet-stream;
|
# default_type application/octet-stream;
|
||||||
add_header Content-Disposition 'attachment; filename="$2$3$4.asc"';
|
# add_header Content-Disposition 'attachment; filename="$2$3$4.asc"';
|
||||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||||
add_header 'Cache-Control' 'no-cache' always;
|
add_header 'Cache-Control' 'no-cache' always;
|
||||||
etag off;
|
etag off;
|
||||||
try_files /keys/links/wkd/$1/$2/$3/$4 =404;
|
proxy_pass http://127.0.0.1:8080;
|
||||||
}
|
}
|
||||||
|
|
||||||
location ~ "^/.well-known/openpgpkey/([^/]+)/policy$" {
|
location ~ "^/.well-known/openpgpkey/([^/]+)/policy$" {
|
||||||
|
@@ -6,7 +6,7 @@ authors = ["Vincent Breitmoser <look@my.amazin.horse>"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
hagrid-database = { path = "../database" }
|
hagrid-database = { path = "../database" }
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
sequoia-openpgp = { version = "1.17.0", default-features = false, features = ["crypto-openssl"] }
|
||||||
multipart = "0"
|
multipart = "0"
|
||||||
log = "0"
|
log = "0"
|
||||||
rand = "0.6"
|
rand = "0.6"
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
use std::cmp::min;
|
use std::cmp::min;
|
||||||
|
use std::convert::TryInto;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@@ -14,7 +15,7 @@ use openpgp::parse::{PacketParser, PacketParserResult, Parse};
|
|||||||
use openpgp::Packet;
|
use openpgp::Packet;
|
||||||
|
|
||||||
extern crate hagrid_database as database;
|
extern crate hagrid_database as database;
|
||||||
use database::{Database, ImportResult, KeyDatabase};
|
use database::{Database, EmailAddressStatus, ImportResult, KeyDatabase};
|
||||||
|
|
||||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||||
|
|
||||||
@@ -26,7 +27,7 @@ use HagridConfig;
|
|||||||
const NUM_THREADS_MAX: usize = 3;
|
const NUM_THREADS_MAX: usize = 3;
|
||||||
|
|
||||||
#[allow(clippy::needless_collect)]
|
#[allow(clippy::needless_collect)]
|
||||||
pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>) -> Result<()> {
|
pub fn do_import(config: &HagridConfig, input_files: Vec<PathBuf>) -> Result<()> {
|
||||||
let num_threads = min(NUM_THREADS_MAX, input_files.len());
|
let num_threads = min(NUM_THREADS_MAX, input_files.len());
|
||||||
let input_file_chunks = setup_chunks(input_files, num_threads);
|
let input_file_chunks = setup_chunks(input_files, num_threads);
|
||||||
|
|
||||||
@@ -39,7 +40,7 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
|
|||||||
let config = config.clone();
|
let config = config.clone();
|
||||||
let multi_progress = multi_progress.clone();
|
let multi_progress = multi_progress.clone();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
import_from_files(&config, dry_run, input_file_chunk, multi_progress).unwrap();
|
import_from_files(&config, input_file_chunk, multi_progress).unwrap();
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
@@ -116,16 +117,10 @@ impl<'a> ImportStats<'a> {
|
|||||||
|
|
||||||
fn import_from_files(
|
fn import_from_files(
|
||||||
config: &HagridConfig,
|
config: &HagridConfig,
|
||||||
dry_run: bool,
|
|
||||||
input_files: Vec<PathBuf>,
|
input_files: Vec<PathBuf>,
|
||||||
multi_progress: Arc<MultiProgress>,
|
multi_progress: Arc<MultiProgress>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let db = KeyDatabase::new_internal(
|
let db = KeyDatabase::new_file(config.keys_internal_dir.as_ref().unwrap())?;
|
||||||
config.keys_internal_dir.as_ref().unwrap(),
|
|
||||||
config.keys_external_dir.as_ref().unwrap(),
|
|
||||||
config.tmp_dir.as_ref().unwrap(),
|
|
||||||
dry_run,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
for input_file in input_files {
|
for input_file in input_files {
|
||||||
import_from_file(&db, &input_file, &multi_progress)?;
|
import_from_file(&db, &input_file, &multi_progress)?;
|
||||||
@@ -152,15 +147,33 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
|
|||||||
|
|
||||||
read_file_to_tpks(input_reader, &mut |acc| {
|
read_file_to_tpks(input_reader, &mut |acc| {
|
||||||
let primary_key = acc[0].clone();
|
let primary_key = acc[0].clone();
|
||||||
let result = import_key(db, acc);
|
|
||||||
if let Err(ref e) = result {
|
|
||||||
let key_fpr = match primary_key {
|
let key_fpr = match primary_key {
|
||||||
Packet::PublicKey(key) => key.fingerprint().to_hex(),
|
Packet::PublicKey(key) => key.fingerprint(),
|
||||||
Packet::SecretKey(key) => key.fingerprint().to_hex(),
|
Packet::SecretKey(key) => key.fingerprint(),
|
||||||
_ => "Unknown".to_owned(),
|
_ => return (),
|
||||||
};
|
};
|
||||||
let error = format!("{}:{:05}:{}: {}", filename, stats.count_total, key_fpr, e);
|
let result = import_key(db, acc);
|
||||||
|
if let Ok(ref result) = result {
|
||||||
|
let tpk_status = result.as_tpk_status();
|
||||||
|
if !tpk_status.is_revoked {
|
||||||
|
for (email, status) in &tpk_status.email_status {
|
||||||
|
if status == &EmailAddressStatus::NotPublished {
|
||||||
|
db.set_email_published(&key_fpr.clone().try_into().unwrap(), &email)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Err(ref e) = result {
|
||||||
|
let error = format!(
|
||||||
|
"{}:{:05}:{}: {}",
|
||||||
|
filename,
|
||||||
|
stats.count_total,
|
||||||
|
key_fpr.to_hex(),
|
||||||
|
e
|
||||||
|
);
|
||||||
progress_bar.println(error);
|
progress_bar.println(error);
|
||||||
|
return ();
|
||||||
}
|
}
|
||||||
stats.update(result);
|
stats.update(result);
|
||||||
})?;
|
})?;
|
||||||
@@ -198,46 +211,3 @@ fn read_file_to_tpks(
|
|||||||
fn import_key(db: &KeyDatabase, packets: Vec<Packet>) -> Result<ImportResult> {
|
fn import_key(db: &KeyDatabase, packets: Vec<Packet>) -> Result<ImportResult> {
|
||||||
openpgp::Cert::from_packets(packets.into_iter()).and_then(|tpk| db.merge(tpk))
|
openpgp::Cert::from_packets(packets.into_iter()).and_then(|tpk| db.merge(tpk))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
#[cfg(test)]
|
|
||||||
mod import_tests {
|
|
||||||
use std::fs::File;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
use openpgp::serialize::Serialize;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn import() {
|
|
||||||
let root = tempdir().unwrap();
|
|
||||||
|
|
||||||
let db = KeyDatabase::new_from_base(root.path().to_path_buf()).unwrap();
|
|
||||||
|
|
||||||
// Generate a key and import it.
|
|
||||||
let (tpk, _) = openpgp::tpk::TPKBuilder::autocrypt(
|
|
||||||
None, Some("foo@invalid.example.com".into()))
|
|
||||||
.generate().unwrap();
|
|
||||||
let import_me = root.path().join("import-me");
|
|
||||||
tpk.serialize(&mut File::create(&import_me).unwrap()).unwrap();
|
|
||||||
|
|
||||||
do_import(root.path().to_path_buf(), vec![import_me]).unwrap();
|
|
||||||
|
|
||||||
let check = |query: &str| {
|
|
||||||
let tpk_ = db.lookup(&query.parse().unwrap()).unwrap().unwrap();
|
|
||||||
assert_eq!(tpk.fingerprint(), tpk_.fingerprint());
|
|
||||||
assert_eq!(tpk.subkeys().map(|skb| skb.subkey().fingerprint())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
tpk_.subkeys().map(|skb| skb.subkey().fingerprint())
|
|
||||||
.collect::<Vec<_>>());
|
|
||||||
assert_eq!(tpk_.userids().count(), 0);
|
|
||||||
};
|
|
||||||
|
|
||||||
check(&format!("{}", tpk.primary().fingerprint()));
|
|
||||||
check(&format!("{}", tpk.primary().fingerprint().to_keyid()));
|
|
||||||
check(&format!("{}", tpk.subkeys().nth(0).unwrap().subkey()
|
|
||||||
.fingerprint()));
|
|
||||||
check(&format!("{}", tpk.subkeys().nth(0).unwrap().subkey()
|
|
||||||
.fingerprint().to_keyid()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
@@ -18,7 +18,6 @@ use anyhow::Result;
|
|||||||
use clap::{App, Arg, SubCommand};
|
use clap::{App, Arg, SubCommand};
|
||||||
|
|
||||||
mod import;
|
mod import;
|
||||||
mod regenerate;
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct HagridConfigs {
|
pub struct HagridConfigs {
|
||||||
@@ -34,10 +33,10 @@ pub struct HagridConfigs {
|
|||||||
pub struct HagridConfig {
|
pub struct HagridConfig {
|
||||||
_template_dir: Option<PathBuf>,
|
_template_dir: Option<PathBuf>,
|
||||||
keys_internal_dir: Option<PathBuf>,
|
keys_internal_dir: Option<PathBuf>,
|
||||||
keys_external_dir: Option<PathBuf>,
|
_keys_external_dir: Option<PathBuf>,
|
||||||
_assets_dir: Option<PathBuf>,
|
_assets_dir: Option<PathBuf>,
|
||||||
_token_dir: Option<PathBuf>,
|
_token_dir: Option<PathBuf>,
|
||||||
tmp_dir: Option<PathBuf>,
|
_tmp_dir: Option<PathBuf>,
|
||||||
_maintenance_file: Option<PathBuf>,
|
_maintenance_file: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -62,16 +61,9 @@ fn main() -> Result<()> {
|
|||||||
.default_value("prod")
|
.default_value("prod")
|
||||||
.possible_values(&["dev", "stage", "prod"]),
|
.possible_values(&["dev", "stage", "prod"]),
|
||||||
)
|
)
|
||||||
.subcommand(SubCommand::with_name("regenerate").about("Regenerate symlink directory"))
|
|
||||||
.subcommand(
|
.subcommand(
|
||||||
SubCommand::with_name("import")
|
SubCommand::with_name("import")
|
||||||
.about("Import keys into Hagrid")
|
.about("Import keys into Hagrid")
|
||||||
.arg(
|
|
||||||
Arg::with_name("dry run")
|
|
||||||
.short("n")
|
|
||||||
.long("dry-run")
|
|
||||||
.help("don't actually keep imported keys"),
|
|
||||||
)
|
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("keyring files")
|
Arg::with_name("keyring files")
|
||||||
.required(true)
|
.required(true)
|
||||||
@@ -91,16 +83,13 @@ fn main() -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some(matches) = matches.subcommand_matches("import") {
|
if let Some(matches) = matches.subcommand_matches("import") {
|
||||||
let dry_run = matches.occurrences_of("dry run") > 0;
|
|
||||||
let keyrings: Vec<PathBuf> = matches
|
let keyrings: Vec<PathBuf> = matches
|
||||||
.values_of_lossy("keyring files")
|
.values_of_lossy("keyring files")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|arg| PathBuf::from_str(arg).unwrap())
|
.map(|arg| PathBuf::from_str(arg).unwrap())
|
||||||
.collect();
|
.collect();
|
||||||
import::do_import(&config, dry_run, keyrings)?;
|
import::do_import(&config, keyrings)?;
|
||||||
} else if let Some(_matches) = matches.subcommand_matches("regenerate") {
|
|
||||||
regenerate::do_regenerate(&config)?;
|
|
||||||
} else {
|
} else {
|
||||||
println!("{}", matches.usage());
|
println!("{}", matches.usage());
|
||||||
}
|
}
|
||||||
|
@@ -1,133 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::time::Instant;
|
|
||||||
|
|
||||||
use indicatif::{ProgressBar, ProgressStyle};
|
|
||||||
use walkdir::WalkDir;
|
|
||||||
|
|
||||||
use database::types::Fingerprint;
|
|
||||||
use database::{Database, KeyDatabase, RegenerateResult};
|
|
||||||
use HagridConfig;
|
|
||||||
|
|
||||||
struct RegenerateStats<'a> {
|
|
||||||
progress: &'a ProgressBar,
|
|
||||||
prefix: String,
|
|
||||||
count_total: u64,
|
|
||||||
count_err: u64,
|
|
||||||
count_updated: u64,
|
|
||||||
count_unchanged: u64,
|
|
||||||
count_partial: u64,
|
|
||||||
start_time_partial: Instant,
|
|
||||||
kps_partial: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> RegenerateStats<'a> {
|
|
||||||
fn new(progress: &'a ProgressBar) -> Self {
|
|
||||||
Self {
|
|
||||||
progress,
|
|
||||||
prefix: "".to_owned(),
|
|
||||||
count_total: 0,
|
|
||||||
count_err: 0,
|
|
||||||
count_updated: 0,
|
|
||||||
count_unchanged: 0,
|
|
||||||
count_partial: 0,
|
|
||||||
start_time_partial: Instant::now(),
|
|
||||||
kps_partial: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(&mut self, result: Result<RegenerateResult>, fpr: Fingerprint) {
|
|
||||||
// If a new TPK starts, parse and import.
|
|
||||||
self.count_total += 1;
|
|
||||||
self.count_partial += 1;
|
|
||||||
if (self.count_total % 10) == 0 {
|
|
||||||
self.prefix = fpr.to_string()[0..4].to_owned();
|
|
||||||
}
|
|
||||||
match result {
|
|
||||||
Err(e) => {
|
|
||||||
self.progress.println(format!("{}: {}", fpr, e));
|
|
||||||
self.count_err += 1;
|
|
||||||
}
|
|
||||||
Ok(RegenerateResult::Updated) => self.count_updated += 1,
|
|
||||||
Ok(RegenerateResult::Unchanged) => self.count_unchanged += 1,
|
|
||||||
}
|
|
||||||
self.progress_update();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn progress_update(&mut self) {
|
|
||||||
if (self.count_total % 10) != 0 {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if self.count_partial >= 1000 {
|
|
||||||
let runtime = (self.start_time_partial.elapsed().as_millis() + 1) as u64;
|
|
||||||
self.kps_partial = (self.count_partial * 1000) / runtime;
|
|
||||||
self.start_time_partial = Instant::now();
|
|
||||||
self.count_partial = 0;
|
|
||||||
}
|
|
||||||
self.progress.set_message(&format!(
|
|
||||||
"prefix {} regenerated {:5} keys, {:5} Updated {:5} Unchanged {:5} Errors ({:3} keys/s)",
|
|
||||||
self.prefix, self.count_total, self.count_updated, self.count_unchanged, self.count_err, self.kps_partial));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn do_regenerate(config: &HagridConfig) -> Result<()> {
|
|
||||||
let db = KeyDatabase::new_internal(
|
|
||||||
config.keys_internal_dir.as_ref().unwrap(),
|
|
||||||
config.keys_external_dir.as_ref().unwrap(),
|
|
||||||
config.tmp_dir.as_ref().unwrap(),
|
|
||||||
false,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let published_dir = config
|
|
||||||
.keys_external_dir
|
|
||||||
.as_ref()
|
|
||||||
.unwrap()
|
|
||||||
.join("links")
|
|
||||||
.join("by-email");
|
|
||||||
let dirs: Vec<_> = WalkDir::new(published_dir)
|
|
||||||
.min_depth(1)
|
|
||||||
.max_depth(1)
|
|
||||||
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
|
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.map(|entry| entry.into_path())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let progress_bar = ProgressBar::new(dirs.len() as u64);
|
|
||||||
progress_bar.set_style(
|
|
||||||
ProgressStyle::default_bar()
|
|
||||||
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
|
|
||||||
.progress_chars("##-"),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut stats = RegenerateStats::new(&progress_bar);
|
|
||||||
|
|
||||||
for dir in dirs {
|
|
||||||
progress_bar.inc(1);
|
|
||||||
regenerate_dir_recursively(&db, &mut stats, &dir)?;
|
|
||||||
}
|
|
||||||
progress_bar.finish();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn regenerate_dir_recursively(
|
|
||||||
db: &KeyDatabase,
|
|
||||||
stats: &mut RegenerateStats,
|
|
||||||
dir: &Path,
|
|
||||||
) -> Result<()> {
|
|
||||||
for path in WalkDir::new(dir)
|
|
||||||
.follow_links(true)
|
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.filter(|e| e.file_type().is_file())
|
|
||||||
.map(|entry| entry.into_path())
|
|
||||||
{
|
|
||||||
let fpr = KeyDatabase::path_to_primary(&path).unwrap();
|
|
||||||
let result = db.regenerate_links(&fpr);
|
|
||||||
stats.update(result, fpr);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@@ -1 +1 @@
|
|||||||
1.70.0
|
1.82.0
|
||||||
|
@@ -17,6 +17,7 @@ pkgs.mkShell {
|
|||||||
];
|
];
|
||||||
|
|
||||||
buildInputs = with pkgs; [
|
buildInputs = with pkgs; [
|
||||||
|
sqlite
|
||||||
openssl
|
openssl
|
||||||
|
|
||||||
clang
|
clang
|
||||||
|
@@ -50,7 +50,7 @@ fn main() {
|
|||||||
|
|
||||||
fn real_main() -> Result<()> {
|
fn real_main() -> Result<()> {
|
||||||
let opt = Opt::from_args();
|
let opt = Opt::from_args();
|
||||||
let db = KeyDatabase::new_from_base(opt.base.canonicalize()?)?;
|
let db = KeyDatabase::new_file(opt.base.canonicalize()?)?;
|
||||||
delete(&db, &opt.query.parse()?, opt.all_bindings, opt.all)
|
delete(&db, &opt.query.parse()?, opt.all_bindings, opt.all)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
12
src/mail.rs
12
src/mail.rs
@@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use crate::counters;
|
use crate::counters;
|
||||||
use lettre::message::{header, Mailbox, MultiPart, SinglePart};
|
use lettre::message::{header, Mailbox, MultiPart, SinglePart};
|
||||||
use lettre::{FileTransport, SendmailTransport, Transport as LettreTransport};
|
use lettre::{FileTransport, SendmailTransport, SmtpTransport, Transport as LettreTransport};
|
||||||
use rocket_dyn_templates::handlebars::Handlebars;
|
use rocket_dyn_templates::handlebars::Handlebars;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@@ -53,6 +53,7 @@ pub struct Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
enum Transport {
|
enum Transport {
|
||||||
|
LocalSmtp,
|
||||||
Sendmail,
|
Sendmail,
|
||||||
Filemail(PathBuf),
|
Filemail(PathBuf),
|
||||||
}
|
}
|
||||||
@@ -63,6 +64,11 @@ impl Service {
|
|||||||
Self::new(from, base_uri, template_dir, Transport::Sendmail)
|
Self::new(from, base_uri, template_dir, Transport::Sendmail)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sends mail via local smtp server.
|
||||||
|
pub fn localsmtp(from: &str, base_uri: &str, template_dir: &Path) -> Result<Self> {
|
||||||
|
Self::new(from, base_uri, template_dir, Transport::LocalSmtp)
|
||||||
|
}
|
||||||
|
|
||||||
/// Sends mail by storing it in the given directory.
|
/// Sends mail by storing it in the given directory.
|
||||||
pub fn filemail(from: &str, base_uri: &str, template_dir: &Path, path: &Path) -> Result<Self> {
|
pub fn filemail(from: &str, base_uri: &str, template_dir: &Path, path: &Path) -> Result<Self> {
|
||||||
Self::new(
|
Self::new(
|
||||||
@@ -246,6 +252,10 @@ impl Service {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
match self.transport {
|
match self.transport {
|
||||||
|
Transport::LocalSmtp => {
|
||||||
|
let transport = SmtpTransport::unencrypted_localhost();
|
||||||
|
transport.send(&email)?;
|
||||||
|
}
|
||||||
Transport::Sendmail => {
|
Transport::Sendmail => {
|
||||||
let transport = SendmailTransport::new();
|
let transport = SendmailTransport::new();
|
||||||
transport.send(&email)?;
|
transport.send(&email)?;
|
||||||
|
@@ -1,64 +1,50 @@
|
|||||||
use ring::aead::{open_in_place, seal_in_place, Algorithm, AES_256_GCM};
|
use aes_gcm::{
|
||||||
use ring::aead::{OpeningKey, SealingKey};
|
aead::{Aead, OsRng},
|
||||||
use ring::digest;
|
AeadCore, Aes256Gcm, Key, KeyInit, Nonce,
|
||||||
use ring::hmac;
|
};
|
||||||
use ring::rand::{SecureRandom, SystemRandom};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
// Keep these in sync, and keep the key len synced with the `private` docs as
|
|
||||||
// well as the `KEYS_INFO` const in secure::Key.
|
|
||||||
static ALGO: &Algorithm = &AES_256_GCM;
|
|
||||||
const NONCE_LEN: usize = 12;
|
const NONCE_LEN: usize = 12;
|
||||||
|
|
||||||
pub struct SealedState {
|
pub struct SealedState {
|
||||||
sealing_key: SealingKey,
|
cipher: Aes256Gcm,
|
||||||
opening_key: OpeningKey,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SealedState {
|
impl SealedState {
|
||||||
pub fn new(secret: &str) -> Self {
|
pub fn new(secret: &str) -> Self {
|
||||||
let salt = hmac::SigningKey::new(&digest::SHA256, b"hagrid");
|
let mut hash = Sha256::new();
|
||||||
let mut key = vec![0; 32];
|
hash.update(b"hagrid");
|
||||||
ring::hkdf::extract_and_expand(&salt, secret.as_bytes(), b"", &mut key);
|
hash.update(secret);
|
||||||
|
let hashed_secret = hash.finalize();
|
||||||
|
let key = Key::<Aes256Gcm>::from_slice(&hashed_secret);
|
||||||
|
let cipher = Aes256Gcm::new(&key);
|
||||||
|
|
||||||
let sealing_key = SealingKey::new(ALGO, key.as_ref()).expect("sealing key creation");
|
SealedState { cipher }
|
||||||
let opening_key = OpeningKey::new(ALGO, key.as_ref()).expect("sealing key creation");
|
|
||||||
|
|
||||||
SealedState {
|
|
||||||
sealing_key,
|
|
||||||
opening_key,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unseal(&self, data: &[u8]) -> Result<String, &'static str> {
|
pub fn unseal(&self, data: &[u8]) -> Result<String, &'static str> {
|
||||||
if data.len() < NONCE_LEN {
|
if data.len() < NONCE_LEN {
|
||||||
return Err("invalid sealed value: too short");
|
return Err("invalid sealed value: too short");
|
||||||
}
|
}
|
||||||
let (nonce, sealed) = data.split_at(NONCE_LEN);
|
let (sealed, nonce) = data.split_at(data.len() - NONCE_LEN);
|
||||||
let mut sealed_copy = sealed.to_vec();
|
let unsealed = self
|
||||||
let unsealed = open_in_place(&self.opening_key, nonce, &[], 0, &mut sealed_copy)
|
.cipher
|
||||||
|
.decrypt(Nonce::from_slice(nonce), sealed)
|
||||||
.map_err(|_| "invalid key/nonce/value: bad seal")?;
|
.map_err(|_| "invalid key/nonce/value: bad seal")?;
|
||||||
|
|
||||||
::std::str::from_utf8(unsealed)
|
core::str::from_utf8(&unsealed)
|
||||||
.map(|s| s.to_string())
|
.map(|s| s.to_string())
|
||||||
.map_err(|_| "bad unsealed utf8")
|
.map_err(|_| "bad unsealed utf8")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seal(&self, input: &str) -> Vec<u8> {
|
pub fn seal(&self, input: &str) -> Vec<u8> {
|
||||||
let mut data;
|
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||||
let output_len = {
|
let mut sealed = self
|
||||||
let overhead = ALGO.tag_len();
|
.cipher
|
||||||
data = vec![0; NONCE_LEN + input.len() + overhead];
|
.encrypt(&nonce, input.as_bytes())
|
||||||
|
.expect("sealing works");
|
||||||
let (nonce, in_out) = data.split_at_mut(NONCE_LEN);
|
sealed.extend(nonce);
|
||||||
SystemRandom::new()
|
sealed
|
||||||
.fill(nonce)
|
|
||||||
.expect("couldn't random fill nonce");
|
|
||||||
in_out[..input.len()].copy_from_slice(input.as_bytes());
|
|
||||||
|
|
||||||
seal_in_place(&self.sealing_key, nonce, &[], in_out, overhead).expect("in-place seal")
|
|
||||||
};
|
|
||||||
|
|
||||||
data[..(NONCE_LEN + output_len)].to_vec()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,8 +55,10 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_encrypt_decrypt() {
|
fn test_encrypt_decrypt() {
|
||||||
let sv = SealedState::new("swag");
|
let sv = SealedState::new("swag");
|
||||||
|
|
||||||
let sealed = sv.seal("test");
|
let sealed = sv.seal("test");
|
||||||
|
|
||||||
|
// use a different instance to make sure no internal state remains
|
||||||
|
let sv = SealedState::new("swag");
|
||||||
let unsealed = sv.unseal(sealed.as_slice()).unwrap();
|
let unsealed = sv.unseal(sealed.as_slice()).unwrap();
|
||||||
|
|
||||||
assert_eq!("test", unsealed);
|
assert_eq!("test", unsealed);
|
||||||
|
@@ -113,7 +113,7 @@ mod tests {
|
|||||||
let payload = TestStruct1 {
|
let payload = TestStruct1 {
|
||||||
payload: "hello".to_owned(),
|
payload: "hello".to_owned(),
|
||||||
};
|
};
|
||||||
let token = "rwM_S9gZaRQaf6DLvmWtZSipQhH_G5ronSIJv2FrMdwGBPSYYQ-1jaP58dTHU5WuC14vb8jxmz2Xf_b3pqzpCGTEJj9drm4t";
|
let token = "C6fCPAGv93nZqDQXodl-bsDgzkxqbjDtbeR6Be4v_UHJfL2UJxG2imzmUlK1PfLT4QzNIRWsdFDYWrx_aCgLZ4MgVQWYyazn";
|
||||||
let mt = Service::init("secret", 60);
|
let mt = Service::init("secret", 60);
|
||||||
|
|
||||||
let check_result = mt.check(token);
|
let check_result = mt.check(token);
|
||||||
|
@@ -319,7 +319,7 @@ pub fn key_to_response_plain(
|
|||||||
return MyResponse::not_found_plain(describe_query_error(&i18n, &query));
|
return MyResponse::not_found_plain(describe_query_error(&i18n, &query));
|
||||||
};
|
};
|
||||||
|
|
||||||
match db.by_fpr(&fp) {
|
match db.by_primary_fpr(&fp) {
|
||||||
Some(armored) => MyResponse::key(armored, &fp),
|
Some(armored) => MyResponse::key(armored, &fp),
|
||||||
None => MyResponse::not_found_plain(describe_query_error(&i18n, &query)),
|
None => MyResponse::not_found_plain(describe_query_error(&i18n, &query)),
|
||||||
}
|
}
|
||||||
@@ -516,11 +516,9 @@ fn configure_prometheus(config: &Figment) -> Option<PrometheusMetrics> {
|
|||||||
|
|
||||||
fn configure_db_service(config: &Figment) -> Result<KeyDatabase> {
|
fn configure_db_service(config: &Figment) -> Result<KeyDatabase> {
|
||||||
let keys_internal_dir: PathBuf = config.extract_inner("keys_internal_dir")?;
|
let keys_internal_dir: PathBuf = config.extract_inner("keys_internal_dir")?;
|
||||||
let keys_external_dir: PathBuf = config.extract_inner("keys_external_dir")?;
|
|
||||||
let tmp_dir: PathBuf = config.extract_inner("tmp_dir")?;
|
|
||||||
|
|
||||||
let fs_db = KeyDatabase::new(keys_internal_dir, keys_external_dir, tmp_dir)?;
|
let sqlite_db = KeyDatabase::new_file(keys_internal_dir)?;
|
||||||
Ok(fs_db)
|
Ok(sqlite_db)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn configure_hagrid_state(config: &Figment) -> Result<HagridState> {
|
fn configure_hagrid_state(config: &Figment) -> Result<HagridState> {
|
||||||
@@ -557,9 +555,12 @@ fn configure_mail_service(config: &Figment) -> Result<mail::Service> {
|
|||||||
let from: String = config.extract_inner("from")?;
|
let from: String = config.extract_inner("from")?;
|
||||||
|
|
||||||
let filemail_into: Option<PathBuf> = config.extract_inner::<PathBuf>("filemail_into").ok();
|
let filemail_into: Option<PathBuf> = config.extract_inner::<PathBuf>("filemail_into").ok();
|
||||||
|
let local_smtp: Option<bool> = config.extract_inner::<bool>("local_smtp").ok();
|
||||||
|
|
||||||
if let Some(path) = filemail_into {
|
if let Some(path) = filemail_into {
|
||||||
mail::Service::filemail(&from, &base_uri, &email_template_dir, &path)
|
mail::Service::filemail(&from, &base_uri, &email_template_dir, &path)
|
||||||
|
} else if local_smtp == Some(true) {
|
||||||
|
mail::Service::localsmtp(&from, &base_uri, &email_template_dir)
|
||||||
} else {
|
} else {
|
||||||
mail::Service::sendmail(&from, &base_uri, &email_template_dir)
|
mail::Service::sendmail(&from, &base_uri, &email_template_dir)
|
||||||
}
|
}
|
||||||
|
21
tester/Cargo.toml
Normal file
21
tester/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "tester"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Vincent Breitmoser <look@my.amazin.horse>"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1"
|
||||||
|
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
||||||
|
log = "0"
|
||||||
|
rand = "0.6"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_derive = "1"
|
||||||
|
serde_json = "1"
|
||||||
|
time = "0.1"
|
||||||
|
url = "1"
|
||||||
|
hex = "0.3"
|
||||||
|
base64 = "0.10"
|
||||||
|
idna = "0.1"
|
||||||
|
fs2 = "0.4"
|
||||||
|
clap = "2"
|
||||||
|
indicatif = "0.11"
|
37
tester/src/generate.rs
Normal file
37
tester/src/generate.rs
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
use std::{fs::File, io::Write, path::Path};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use openpgp::{cert::CertBuilder, serialize::Serialize};
|
||||||
|
|
||||||
|
use crate::util;
|
||||||
|
|
||||||
|
pub fn do_generate(count: u64, output_path: &Path, fprs_path: Option<&Path>) -> Result<()> {
|
||||||
|
let progress_bar = ProgressBar::new(count);
|
||||||
|
progress_bar.set_style(
|
||||||
|
ProgressStyle::default_bar()
|
||||||
|
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos}/{len} {msg}")
|
||||||
|
.progress_chars("##-"),
|
||||||
|
);
|
||||||
|
progress_bar.set_draw_delta(count / 100);
|
||||||
|
|
||||||
|
let mut output = File::create(output_path)?;
|
||||||
|
let mut output_fprs = if let Some(p) = fprs_path {
|
||||||
|
Some(File::create(p)?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
for i in 0..count {
|
||||||
|
let (cert, _) = CertBuilder::general_purpose(None, Some(util::gen_email(i))).generate()?;
|
||||||
|
cert.serialize(&mut output)?;
|
||||||
|
if let Some(ref mut output_fprs) = output_fprs {
|
||||||
|
writeln!(output_fprs, "{}", cert)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
progress_bar.inc(1);
|
||||||
|
}
|
||||||
|
progress_bar.finish();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
49
tester/src/genreqs.rs
Normal file
49
tester/src/genreqs.rs
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
use std::io::Write;
|
||||||
|
use std::{fs::File, io, io::BufRead, path::Path};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use rand::seq::SliceRandom;
|
||||||
|
use rand::{thread_rng, Rng};
|
||||||
|
|
||||||
|
use crate::util;
|
||||||
|
|
||||||
|
pub fn do_genreqs(host: &str, fprs_path: &Path) -> Result<()> {
|
||||||
|
let file = File::open(fprs_path)?;
|
||||||
|
let fingerprints: Vec<String> = io::BufReader::new(file).lines().flatten().collect();
|
||||||
|
|
||||||
|
/* possible requests:
|
||||||
|
* /vks/v1/by-fingerprint/
|
||||||
|
* /vks/v1/by-keyid/
|
||||||
|
* /vks/v1/by-email/
|
||||||
|
*/
|
||||||
|
|
||||||
|
let mut rng = thread_rng();
|
||||||
|
let mut stdout = io::LineWriter::new(io::stdout());
|
||||||
|
loop {
|
||||||
|
let result = match rng.gen_range(0, 3) {
|
||||||
|
0 => {
|
||||||
|
let email = util::gen_email(rng.gen_range(0, fingerprints.len() as u64));
|
||||||
|
stdout.write_fmt(format_args!("GET {}/vks/v1/by-email/{}\n", host, email))
|
||||||
|
}
|
||||||
|
1 => {
|
||||||
|
let random_fpr = fingerprints.choose(&mut rng).unwrap();
|
||||||
|
stdout.write_fmt(format_args!(
|
||||||
|
"GET {}/vks/v1/by-keyid/{}\n",
|
||||||
|
host,
|
||||||
|
&random_fpr[24..40]
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let random_fpr = fingerprints.choose(&mut rng).unwrap();
|
||||||
|
stdout.write_fmt(format_args!(
|
||||||
|
"GET {}/vks/v1/by-fingerprint/{}\n",
|
||||||
|
host, random_fpr
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if result.is_err() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
92
tester/src/main.rs
Normal file
92
tester/src/main.rs
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
extern crate anyhow;
|
||||||
|
extern crate clap;
|
||||||
|
extern crate indicatif;
|
||||||
|
extern crate rand;
|
||||||
|
extern crate sequoia_openpgp as openpgp;
|
||||||
|
extern crate serde_derive;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use clap::{App, Arg, SubCommand};
|
||||||
|
|
||||||
|
mod generate;
|
||||||
|
mod genreqs;
|
||||||
|
mod util;
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
let matches = App::new("Hagrid Tester")
|
||||||
|
.version("0.1")
|
||||||
|
.about("Control hagrid database externally")
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("config")
|
||||||
|
.short("c")
|
||||||
|
.long("config")
|
||||||
|
.value_name("FILE")
|
||||||
|
.help("Sets a custom config file")
|
||||||
|
.takes_value(true),
|
||||||
|
)
|
||||||
|
.subcommand(
|
||||||
|
SubCommand::with_name("generate")
|
||||||
|
.about("Generate a test set of certificates")
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("cert count")
|
||||||
|
.long("cert-count")
|
||||||
|
.default_value("100000")
|
||||||
|
.help("number of certifictes to generate"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("certs output file")
|
||||||
|
.long("output-file")
|
||||||
|
.default_value("keyring.pub.pgp")
|
||||||
|
.help("path to file to store the certificates in"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("fingerprints output file")
|
||||||
|
.long("fingerprints-file")
|
||||||
|
.default_value("fingerprints.txt")
|
||||||
|
.help("path to file to store fingerprints in"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.subcommand(
|
||||||
|
SubCommand::with_name("gen-reqs")
|
||||||
|
.about("generate requests")
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("fingerprints file")
|
||||||
|
.long("fingerprints-file")
|
||||||
|
.default_value("fingerprints.txt")
|
||||||
|
.help("path to read fingerprints from"),
|
||||||
|
)
|
||||||
|
.arg(Arg::with_name("host").index(1).required(true)),
|
||||||
|
)
|
||||||
|
.get_matches();
|
||||||
|
|
||||||
|
if let Some(matches) = matches.subcommand_matches("generate") {
|
||||||
|
let count: u64 = matches.value_of("cert count").unwrap().parse().unwrap();
|
||||||
|
let output_certs: PathBuf = matches
|
||||||
|
.value_of("certs output file")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
let output_fprs: Option<PathBuf> = matches
|
||||||
|
.value_of("fingerprints output file")
|
||||||
|
.map(|s| s.parse().unwrap());
|
||||||
|
generate::do_generate(
|
||||||
|
count,
|
||||||
|
output_certs.as_path(),
|
||||||
|
output_fprs.as_ref().map(|f| f.as_path()),
|
||||||
|
)?;
|
||||||
|
} else if let Some(matches) = matches.subcommand_matches("gen-reqs") {
|
||||||
|
let host = matches.value_of("host").unwrap();
|
||||||
|
let fprs_file: PathBuf = matches
|
||||||
|
.value_of("fingerprints file")
|
||||||
|
.map(|s| s.parse().unwrap())
|
||||||
|
.unwrap();
|
||||||
|
genreqs::do_genreqs(host, fprs_file.as_path())?;
|
||||||
|
} else {
|
||||||
|
println!("{}", matches.usage());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
3
tester/src/util.rs
Normal file
3
tester/src/util.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
pub fn gen_email(i: u64) -> String {
|
||||||
|
format!("{:07}@hagrid.invalid", i)
|
||||||
|
}
|
Reference in New Issue
Block a user