mirror of
https://gitlab.com/keys.openpgp.org/hagrid.git
synced 2025-10-07 17:12:47 +02:00
Compare commits
179 Commits
v1.3.0
...
test-via-n
Author | SHA1 | Date | |
---|---|---|---|
|
65a6902c77 | ||
|
893442bc4e | ||
|
178dfb9dec | ||
|
2e9a14f58e | ||
|
2395244b8f | ||
|
8d21fde2c9 | ||
|
df19ececc3 | ||
|
9e0409bbac | ||
|
deefbfabe6 | ||
|
62a6248b29 | ||
|
3bb755d8d9 | ||
|
8e8cb34522 | ||
|
9ad13a30a0 | ||
|
63a4445c9f | ||
|
3ce6e8b495 | ||
|
bf67b3714e | ||
|
f54d6ff283 | ||
|
766e97107e | ||
|
7f6c4f88aa | ||
|
6e7fb88000 | ||
|
1796989bc3 | ||
|
e389e64c07 | ||
|
86b89ac7bc | ||
|
5720dbe454 | ||
|
20ebdbd0e2 | ||
|
090a6f222a | ||
|
74c25c9d9b | ||
|
07804b8833 | ||
|
8795469b52 | ||
|
29ac3534c1 | ||
|
b6ad3f3705 | ||
|
d9741fad8f | ||
|
76ec3eed82 | ||
|
0d868ce27e | ||
|
d32b48885e | ||
|
b11f7dc7b3 | ||
|
f8c4871b61 | ||
|
93aa79e979 | ||
|
0fe99ba962 | ||
|
0dceaa454f | ||
|
6060fcf0bc | ||
|
3285f19d09 | ||
|
82f48d3a4e | ||
|
5f819e8004 | ||
|
d7b5796abf | ||
|
5beafb2881 | ||
|
d8fcaa7d5e | ||
|
fb0d0c24c4 | ||
|
34d056ea55 | ||
|
eb4ffd59f4 | ||
|
5ed05975e7 | ||
|
9b6b495f56 | ||
|
9adeb4d544 | ||
|
015e698725 | ||
|
8b89ab112a | ||
|
7d3194dd25 | ||
|
5aa404fc32 | ||
|
5b28cedf37 | ||
|
12f0eef5be | ||
|
94bf37a6a3 | ||
|
ce8a6deed0 | ||
|
df221eaf2b | ||
|
7532ff4b22 | ||
|
ca7d0406cf | ||
|
784d866da0 | ||
|
6a4e20a41f | ||
|
e7b7b994ce | ||
|
4a6ba18b33 | ||
|
d9afbd151c | ||
|
d7de01d023 | ||
|
eb36332f8b | ||
|
e2594b019c | ||
|
81b333bd43 | ||
|
8fad06e11d | ||
|
9c4b51fa61 | ||
|
9f5d8b3706 | ||
|
24034536e8 | ||
|
c901336b12 | ||
|
1442bed329 | ||
|
5757ac2819 | ||
|
0027a25486 | ||
|
012be246d1 | ||
|
48ef918d51 | ||
|
333727a6dc | ||
|
a7186bb6df | ||
|
83b0515274 | ||
|
b296157c08 | ||
|
abeafbe3d4 | ||
|
d35136b0d6 | ||
|
9acb4b52db | ||
|
c0c8247c42 | ||
|
f8982939aa | ||
|
b66fb67302 | ||
|
84cfb5afaf | ||
|
e966c1fbb7 | ||
|
70b0eeb3e7 | ||
|
7156d92246 | ||
|
893f99c460 | ||
|
90d00637a0 | ||
|
3a3aba5db1 | ||
|
0a829824dc | ||
|
51a66d643d | ||
|
2059c69226 | ||
|
0bea4f0f2a | ||
|
4d57dc1eb2 | ||
|
0cfd412907 | ||
|
9094b09b27 | ||
|
c28e6af441 | ||
|
37e6b2cb09 | ||
|
fefebaaffe | ||
|
9bc3ccecac | ||
|
09072200d6 | ||
|
5399e6c2d3 | ||
|
77372abb7c | ||
|
0200c15266 | ||
|
e4aac748be | ||
|
8b6049cb45 | ||
|
45402ddd07 | ||
|
e85e414619 | ||
|
fcc9689ef3 | ||
|
896206d6ca | ||
|
f4699a4545 | ||
|
57a8e3a3a8 | ||
|
1f67668500 | ||
|
27b68dc826 | ||
|
535668c507 | ||
|
c77bf9d3db | ||
|
fec6763b75 | ||
|
80df057617 | ||
|
5731f8aa2b | ||
|
62b936864d | ||
|
4f86585ac3 | ||
|
8db33156c3 | ||
|
b8fdaeb3c6 | ||
|
7df4d76d5d | ||
|
e72c647505 | ||
|
d2ac58b3fa | ||
|
c541c19622 | ||
|
58959e112e | ||
|
e90a2e2888 | ||
|
ee82a078ea | ||
|
50f80ebade | ||
|
709e358800 | ||
|
da6267887e | ||
|
31f4ff704f | ||
|
871cae1e24 | ||
|
f024b0bffe | ||
|
a5294b07cb | ||
|
c6aa0b3fdb | ||
|
3bceb608e8 | ||
|
f6b1f3cc73 | ||
|
3293dd8f78 | ||
|
c7a032eb69 | ||
|
475bcbffb8 | ||
|
dafed3d492 | ||
|
a504b0ea12 | ||
|
df6bfb2d84 | ||
|
b5b5879474 | ||
|
5778aaed84 | ||
|
7beb5209af | ||
|
4787816581 | ||
|
359475f89f | ||
|
253d672d47 | ||
|
e0aeef7ddc | ||
|
44db398a1c | ||
|
8ea89d3e0e | ||
|
0d25da7138 | ||
|
e0f8352ac6 | ||
|
dca8afa1e6 | ||
|
ea44f52a16 | ||
|
b4d92f0ec1 | ||
|
26ef2f6e1c | ||
|
cfd9fd8eb3 | ||
|
13ddd4ff3a | ||
|
a9440c6d0a | ||
|
fe2337507a | ||
|
36dff563fc | ||
|
da5648488b | ||
|
7f304929ea |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -7,3 +7,5 @@
|
||||
target
|
||||
*.po~
|
||||
/dist/templates/localized
|
||||
|
||||
result
|
||||
|
@@ -1,13 +1,11 @@
|
||||
build, test and lint:
|
||||
image: "rust:1-bullseye"
|
||||
image: "nixos/nix:2.3.16"
|
||||
interruptible: true
|
||||
script:
|
||||
- apt update -qy
|
||||
- apt install -qy build-essential pkg-config clang libclang-dev nettle-dev gettext zsh
|
||||
- rustup component add clippy
|
||||
- rustup component add rustfmt
|
||||
- ./make-translated-templates
|
||||
- cargo build
|
||||
- cargo clippy --tests --no-deps --workspace
|
||||
- cargo fmt --all -- --check
|
||||
- cargo test --all
|
||||
- nix-shell --command true
|
||||
- nix-shell --command ./make-translated-templates
|
||||
- nix-shell --command cargo build
|
||||
- nix-shell --command cargo clippy --tests --no-deps --workspace
|
||||
- nix-shell --command cargo fmt --all -- --check
|
||||
- nix-shell --command cargo test --all
|
||||
- nix-shell --command cargo test --workspace
|
||||
|
1973
Cargo.lock
generated
1973
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
114
Cargo.toml
114
Cargo.toml
@@ -1,64 +1,104 @@
|
||||
[package]
|
||||
name = "hagrid"
|
||||
version = "1.3.0"
|
||||
version = "2.1.0"
|
||||
authors = ["Vincent Breitmoser <look@my.amazin.horse>", "Kai Michaelis <kai@sequoia-pgp.org>", "Justus Winter <justus@sequoia-pgp.org>"]
|
||||
build = "build.rs"
|
||||
default-run = "hagrid"
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
rust-version = "1.86"
|
||||
resolver = "3"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"database",
|
||||
"hagridctl",
|
||||
"tester",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
hagrid-database = { path = "database" }
|
||||
chrono = "0.4"
|
||||
[workspace.dependencies]
|
||||
anyhow = "1"
|
||||
rocket = { version = "0.5", features = [ "json" ] }
|
||||
rocket_dyn_templates = { version = "0.1", features = ["handlebars"] }
|
||||
rocket_codegen = "0.5"
|
||||
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
||||
multipart = "0"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
serde_json = "1"
|
||||
time = "0.1"
|
||||
tempfile = "3"
|
||||
structopt = "0.2"
|
||||
url = "1"
|
||||
num_cpus = "1"
|
||||
ring = "0.13"
|
||||
hagrid-database = { path = "database" }
|
||||
aes-gcm = "0.10"
|
||||
base64 = "0.10"
|
||||
uuid = { version = "0.7", features = [ "v4" ] }
|
||||
rocket_prometheus = "0.10"
|
||||
lazy_static = "1"
|
||||
chrono = "0.4"
|
||||
clap = ">= 4.5.37"
|
||||
fs2 = "0.4"
|
||||
gettext = "0.4"
|
||||
gettext-macros = "0.6"
|
||||
gettext-utils = "0.1"
|
||||
gettext = "0.4"
|
||||
glob = "0.3"
|
||||
hex = "0.3"
|
||||
hyperx = "1.4"
|
||||
# this is a slightly annoying update, so keeping this back for now
|
||||
lettre = { version = "=0.10.0-rc.5", default-features = false, features = ["builder", "file-transport", "sendmail-transport"] }
|
||||
idna = "0.1"
|
||||
indicatif = "0.11"
|
||||
lettre = { version = "=0.10.0-rc.5", default-features = false }
|
||||
log = ">= 0.4.27"
|
||||
multipart = "~0.18"
|
||||
num_cpus = "1"
|
||||
pathdiff = "0.1"
|
||||
r2d2 = "0.8"
|
||||
r2d2_sqlite = "0.24"
|
||||
rand = "0.6"
|
||||
regex = "1"
|
||||
rocket = ">= 0.5.1"
|
||||
rocket_codegen = ">= 0.5.1"
|
||||
rocket_dyn_templates = ">= 0.2.0"
|
||||
rocket_i18n = { git = "https://github.com/Valodim/rocket_i18n", branch = "go-async", default-features = false }
|
||||
rocket_prometheus = ">= 0.10.1"
|
||||
rusqlite = "0.31"
|
||||
self_cell = "1"
|
||||
serde = "1.0"
|
||||
serde_derive = "1"
|
||||
serde_json = "1"
|
||||
sha2 = "0.10"
|
||||
tempfile = "3"
|
||||
time = "0.1"
|
||||
toml = "0.5"
|
||||
url = "1"
|
||||
uuid = "0.7"
|
||||
vergen = "3"
|
||||
walkdir = "2"
|
||||
zbase32 = "0.1"
|
||||
sequoia-openpgp = { version = "=1.17.0", default-features = false }
|
||||
rstest = ">= 0.26.1"
|
||||
|
||||
[dependencies.rocket_i18n]
|
||||
# git = "https://github.com/Plume-org/rocket_i18n"
|
||||
git = "https://github.com/Valodim/rocket_i18n"
|
||||
branch = "go-async"
|
||||
default-features = false
|
||||
features = ["rocket"]
|
||||
[dependencies]
|
||||
hagrid-database = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
rocket = { workspace = true, features = ["json"] }
|
||||
rocket_dyn_templates = { workspace = true, features = ["handlebars"] }
|
||||
rocket_codegen = { workspace = true }
|
||||
sequoia-openpgp = { workspace = true, features = ["crypto-openssl"] }
|
||||
multipart = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_derive = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
url = { workspace = true }
|
||||
num_cpus = { workspace = true }
|
||||
aes-gcm = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
rocket_prometheus = { workspace = true }
|
||||
gettext-macros = { workspace = true }
|
||||
gettext-utils = { workspace = true }
|
||||
gettext = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
hyperx = { workspace = true }
|
||||
# this is a slightly annoying update, so keeping this back for now
|
||||
lettre = { workspace = true, features = ["builder", "file-transport", "sendmail-transport", "smtp-transport"] }
|
||||
rocket_i18n= { workspace = true, features = ["rocket"] }
|
||||
|
||||
[build-dependencies]
|
||||
vergen = "3"
|
||||
vergen = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
regex = "1"
|
||||
regex = { workspace = true }
|
||||
rstest = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "hagrid"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "hagrid-delete"
|
||||
path = "src/delete.rs"
|
||||
|
41
README.md
41
README.md
@@ -10,6 +10,11 @@ Please note that Hagrid is built and maintained only for the service at
|
||||
keys.openpgp.org. It is not maintained or officially supported as
|
||||
deployable software.
|
||||
|
||||
Compatibility note: Hagrid v2.0 uses an sqlite certificate store instead of the
|
||||
previous file based database. This means that it also no longer supports serving
|
||||
certificates directly via reverse proxy. You can us hagridctl to dump and import
|
||||
an old database.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
@@ -37,10 +42,10 @@ Additionally, some external dependencies are required.
|
||||
Get them (on Debian or Ubuntu) with
|
||||
|
||||
```bash
|
||||
sudo apt install gnutls-bin libssl-dev gcc llvm-dev libclang-dev build-essential pkg-config gettext
|
||||
sudo apt install gnutls-bin libssl-dev gcc llvm-dev libclang-dev build-essential pkg-config gettext libsqlite3-dev
|
||||
```
|
||||
|
||||
After Rust and the other dependencies are installed, copy the config file, then simply compile and run:
|
||||
After Rust and the other dependencies are installed, copy the config file (or run `just init`), then simply compile and run:
|
||||
|
||||
```bash
|
||||
cd hagrid
|
||||
@@ -55,19 +60,31 @@ will be statically built, and can be copied anywhere. You will also need to
|
||||
adjust `Rocket.toml` accordingly. Hagrid uses `sendmail` for mailing, so you
|
||||
also need a working local mailer setup.
|
||||
|
||||
Reverse Proxy
|
||||
-------------
|
||||
# Development Dependencies
|
||||
|
||||
Hagrid is designed to defer lookups to reverse proxy server like Nginx.
|
||||
Lookups via `/vks/v1/by-finingerprint`, `/vks/v1/by-keyid`, and
|
||||
`/vks/v1/by-email` can be handled by a robust and performant HTTP server.
|
||||
A sample configuration for nginx is part of the repository (`nginx.conf`,
|
||||
`hagrid-routes.conf`).
|
||||
Note that we make use of
|
||||
[ngx_http_lua_module](https://github.com/openresty/lua-nginx-module) to
|
||||
perform some request rewrites.
|
||||
List of dependencies which are required or could be helpful for contribution
|
||||
to the project.
|
||||
|
||||
| Category | Type | Name | Version | Verified Version | Notes |
|
||||
|:------------------------:|:-----------:|:------------------------------------------:|:----------:|:----------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Shell | Obligatory | [Zsh](https://zsh.sourceforge.io/) | \>= 5.9 | 5.9 | Required for [translated templates generation](./make-translated-templates) script. |
|
||||
| VCS/SCM | Obligatory | [Git](https://git-scm.com/) | \>= 2.47.3 | 2.47.3 | Obviously, if going to get this repository you'll have the `git` CLI as dependency. But it also used in `just upgrade-rust` recipe to automate Rust upgrades. |
|
||||
| Shell | Preferrable | [Bash](https://www.gnu.org/software/bash/) | \>= 5.2.15 | 5.2.15 | Required for scripts embedded into [`justfile`](./justfile). If you don't want to use [`just`](https://just.systems/) you probably don't need this shell. |
|
||||
| Command Runner | Preferrable | [`just`](https://just.systems/) | \>= 1.42.4 | 1.40.0 | All commands from [`justfile`](./justfile) could be run without [`just`](https://just.systems/), but it makes development more convenient. |
|
||||
| SQlite Prompt | Preferrable | [`sqlite3`](https://sqlite.org/cli.html) | \>= 3.40.1 | 3.40.1 | Used by [`just db`](./justfile) recipe to open interactive prompt to SQlite database of the project. |
|
||||
| Command Line HTTP client | Preferrable | [`curl`](https://curl.se/) | \>= 8.14.1 | 8.14.1 | Used by `just _rust-stable-version` recipe to determine current stable version of Rust. Indirectly, used by `just upgrade-rust` depends on `curl` through `_rust-stable-version` recipe. |
|
||||
| Text stream editor | Preferrable | [`sed`](https://www.gnu.org/software/sed/) | \>= 4.9 | 4.9 | Similar to `curl`, Used by `just _rust-stable-version` recipe to determine current stable version of Rust. Indirectly, used by `just upgrade-rust` depends on `curl` through `_rust-stable-version` recipe. |
|
||||
| TOML Query | Preferrable | [tomlq](https://crates.io/crates/tomlq) | \>= 0.2.2 | 0.2.2 | Similar to `curl`, Used by `just _rust-stable-version` recipe to determine current stable version of Rust. Indirectly, used by `just upgrade-rust` depends on `curl` through `_rust-stable-version` recipe. |
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
We're in `#hagrid` on OFTC IRC, also reachable via Matrix as `#hagrid:stratum0.org`.
|
||||
|
||||
# Contribution
|
||||
## Housekeeping
|
||||
### Rust version upgrade
|
||||
|
||||
Take a look at `just upgrade-rust` recipe.
|
||||
It bumps used version of Rust to the current stable version
|
||||
(as [declared by manifest](https://static.rust-lang.org/dist/channel-rust-stable.toml)).
|
||||
|
4
build.rs
4
build.rs
@@ -1,6 +1,4 @@
|
||||
extern crate vergen;
|
||||
|
||||
use vergen::{generate_cargo_keys, ConstantsFlags};
|
||||
use vergen::{ConstantsFlags, generate_cargo_keys};
|
||||
|
||||
fn main() {
|
||||
// Generate the 'cargo:' key output
|
||||
|
@@ -1,5 +1,3 @@
|
||||
# Disabled until we use at least Rust 1.49.0, which is the first
|
||||
# version that supports the msrv field.
|
||||
msrv = "1.58.1"
|
||||
msrv = "1.86"
|
||||
|
||||
too-many-arguments-threshold = 10
|
||||
|
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env zsh
|
||||
|
||||
set -e
|
||||
|
||||
[[ $# == 4 || $# == 5 ]] || { echo "Usage: $0 keys-internal-dir keys-external-dir encryption-key backup-dir [date]" >&2; exit 1; }
|
||||
|
||||
local keys_internal_dir=$1
|
||||
local keys_external_dir=$2
|
||||
local encryption_key=$3
|
||||
local backup_dir=$4
|
||||
# backupdate in format YYYY-MM-DD
|
||||
local backupdate=$5
|
||||
|
||||
[[ -d $keys_internal_dir ]] || { echo "Missing dir $keys_internal_dir" >&2; exit 1; }
|
||||
[[ -d $keys_internal_dir/log ]] || { echo "Missing dir $keys_internal_dir/log" >&2; exit 1; }
|
||||
[[ -d $keys_external_dir ]] || { echo "Missing dir $keys_external_dir" >&2; exit 1; }
|
||||
[[ -d $keys_external_dir/pub ]] || { echo "Missing dir $keys_external_dir/pub" >&2; exit 1; }
|
||||
[[ -f $encryption_key ]] || { echo "Missing file $encryption_key" >&2; exit 1; }
|
||||
[[ -d $backup_dir ]] || { echo "Missing dir $backup_dir" >&2; exit 1; }
|
||||
|
||||
if [[ -z $backupdate ]]; then
|
||||
# for EPOCHSECONDS
|
||||
zmodload zsh/datetime
|
||||
backupdate="$(date --date=@$(( EPOCHSECONDS - 24*60*60 )) +'%Y-%m-%d')"
|
||||
fi
|
||||
|
||||
local log_file="$keys_internal_dir/log/$backupdate"
|
||||
[[ -f $log_file ]] || { echo "Missing dir $log_file" >&2; exit 1; }
|
||||
|
||||
local tempdir=$(mktemp -d)
|
||||
trap "rm -rf ${(q)tempdir}" EXIT
|
||||
|
||||
local keylist_file=$tempdir/keylist
|
||||
|
||||
integer count=0
|
||||
cat $log_file | cut -d' ' -f2 | sort -u | while read -r fp; do
|
||||
key_file=${fp[1,2]}/${fp[3,4]}/${fp[5,$]}
|
||||
[[ -f $keys_external_dir/pub/$key_file ]] || { echo "Missing file $key_file" >&2; exit 1; }
|
||||
echo -E - $key_file
|
||||
count+=1
|
||||
done > $keylist_file
|
||||
|
||||
local backup_file_unencrypted=$tempdir/$backupdate.tar.gz
|
||||
local backup_file_encrypted=$tempdir/$backupdate.tar.gz.pgp
|
||||
|
||||
tar \
|
||||
--create \
|
||||
--gzip \
|
||||
--file $backup_file_unencrypted \
|
||||
--verbatim-files-from \
|
||||
--directory $keys_external_dir/pub \
|
||||
--files-from $keylist_file
|
||||
|
||||
GNUPGHOME=$tempdir gpg \
|
||||
--quiet \
|
||||
--no-keyring \
|
||||
--compress-level 0 \
|
||||
--recipient-file $encryption_key \
|
||||
--output $backup_file_encrypted \
|
||||
--encrypt $backup_file_unencrypted
|
||||
|
||||
backup_file=$backup_dir/$backupdate.tar.gz.pgp
|
||||
mv $backup_file_encrypted $backup_file
|
||||
|
||||
sha256sum="$(cd $backup_dir; sha256sum $backupdate.tar.gz.pgp)"
|
||||
echo $sha256sum >> $backup_dir/SHA256SUM
|
||||
|
||||
echo "finished backup for $backupdate, total keys $count"
|
||||
ls -l $backup_file
|
||||
echo $sha256sum
|
@@ -2,26 +2,31 @@
|
||||
name = "hagrid-database"
|
||||
version = "0.1.0"
|
||||
authors = ["Kai Michaelis <kai@sequoia-pgp.org>"]
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
||||
log = "0"
|
||||
rand = "0.6"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_derive = "1"
|
||||
serde_json = "1"
|
||||
time = "0.1"
|
||||
tempfile = "3"
|
||||
url = "1"
|
||||
hex = "0.3"
|
||||
base64 = "0.10"
|
||||
pathdiff = "0.1"
|
||||
idna = "0.1"
|
||||
fs2 = "0.4"
|
||||
walkdir = "2"
|
||||
chrono = "0.4"
|
||||
zbase32 = "0.1"
|
||||
anyhow = { workspace = true }
|
||||
sequoia-openpgp = { workspace = true, features = ["crypto-openssl"] }
|
||||
log = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_derive = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
url = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
pathdiff = { workspace = true }
|
||||
idna = { workspace = true }
|
||||
fs2 = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
zbase32 = { workspace = true }
|
||||
r2d2 = { workspace = true }
|
||||
r2d2_sqlite = { workspace = true }
|
||||
rusqlite = { workspace = true, features = ["trace"] }
|
||||
self_cell = { workspace = true }
|
||||
|
||||
[lib]
|
||||
name = "hagrid_database"
|
||||
|
1018
database/src/fs.rs
1018
database/src/fs.rs
File diff suppressed because it is too large
Load Diff
@@ -3,31 +3,14 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::str::FromStr;
|
||||
|
||||
use openpgp::serialize::SerializeInto;
|
||||
use sequoia_openpgp::{
|
||||
Cert, packet::UserID, parse::Parse, serialize::SerializeInto, types::KeyFlags,
|
||||
};
|
||||
|
||||
use chrono::prelude::Utc;
|
||||
|
||||
#[macro_use]
|
||||
extern crate anyhow;
|
||||
use anyhow::Result;
|
||||
extern crate fs2;
|
||||
extern crate idna;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate chrono;
|
||||
extern crate hex;
|
||||
extern crate pathdiff;
|
||||
extern crate rand;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
extern crate tempfile;
|
||||
extern crate time;
|
||||
extern crate url;
|
||||
extern crate walkdir;
|
||||
extern crate zbase32;
|
||||
|
||||
extern crate sequoia_openpgp as openpgp;
|
||||
use openpgp::{packet::UserID, parse::Parse, types::KeyFlags, Cert};
|
||||
use anyhow::anyhow;
|
||||
use log::{error, info};
|
||||
|
||||
pub mod types;
|
||||
use types::{Email, Fingerprint, KeyID};
|
||||
@@ -35,14 +18,14 @@ use types::{Email, Fingerprint, KeyID};
|
||||
pub mod sync;
|
||||
pub mod wkd;
|
||||
|
||||
mod fs;
|
||||
pub use self::fs::Filesystem as KeyDatabase;
|
||||
mod sqlite;
|
||||
pub use crate::sqlite::Sqlite;
|
||||
|
||||
mod stateful_tokens;
|
||||
pub use stateful_tokens::StatefulTokens;
|
||||
|
||||
mod openpgp_utils;
|
||||
use openpgp_utils::{is_status_revoked, tpk_clean, tpk_filter_alive_emails, tpk_to_string, POLICY};
|
||||
use openpgp_utils::{POLICY, is_status_revoked, tpk_clean, tpk_filter_alive_emails, tpk_to_string};
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
@@ -66,7 +49,7 @@ impl Query {
|
||||
impl FromStr for Query {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(term: &str) -> Result<Self> {
|
||||
fn from_str(term: &str) -> anyhow::Result<Self> {
|
||||
use self::Query::*;
|
||||
|
||||
let looks_like_short_key_id =
|
||||
@@ -106,6 +89,14 @@ impl ImportResult {
|
||||
ImportResult::Unchanged(status) => status,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_tpk_status(&self) -> &TpkStatus {
|
||||
match self {
|
||||
ImportResult::New(status) => status,
|
||||
ImportResult::Updated(status) => status,
|
||||
ImportResult::Unchanged(status) => status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
@@ -120,62 +111,73 @@ pub enum RegenerateResult {
|
||||
Unchanged,
|
||||
}
|
||||
|
||||
pub trait Database: Sync + Send {
|
||||
type MutexGuard;
|
||||
pub trait DatabaseTransaction<'a> {
|
||||
type TempCert;
|
||||
|
||||
fn commit(self) -> anyhow::Result<()>;
|
||||
|
||||
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> anyhow::Result<()>;
|
||||
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> anyhow::Result<()>;
|
||||
|
||||
fn link_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> anyhow::Result<()>;
|
||||
fn unlink_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> anyhow::Result<()>;
|
||||
|
||||
fn write_to_temp(&self, content: &[u8]) -> anyhow::Result<Self::TempCert>;
|
||||
fn move_tmp_to_full(&self, content: Self::TempCert, fpr: &Fingerprint) -> anyhow::Result<()>;
|
||||
fn move_tmp_to_published(
|
||||
&self,
|
||||
content: Self::TempCert,
|
||||
fpr: &Fingerprint,
|
||||
) -> anyhow::Result<()>;
|
||||
fn move_tmp_to_published_wkd(
|
||||
&self,
|
||||
content: Option<Self::TempCert>,
|
||||
fpr: &Fingerprint,
|
||||
) -> anyhow::Result<()>;
|
||||
fn write_to_quarantine(&self, fpr: &Fingerprint, content: &[u8]) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
pub trait Database<'a>: Sync + Send {
|
||||
type Transaction: DatabaseTransaction<'a>;
|
||||
|
||||
/// Lock the DB for a complex update.
|
||||
///
|
||||
/// All basic write operations are atomic so we don't need to lock
|
||||
/// read operations to ensure that we return something sane.
|
||||
fn lock(&self) -> Result<Self::MutexGuard>;
|
||||
fn transaction(&'a self) -> anyhow::Result<Self::Transaction>;
|
||||
|
||||
/// Queries the database using Fingerprint, KeyID, or
|
||||
/// email-address, returning the primary fingerprint.
|
||||
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint>;
|
||||
|
||||
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
|
||||
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> Result<()>;
|
||||
|
||||
fn link_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
|
||||
fn unlink_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
|
||||
|
||||
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
||||
fn by_kid(&self, kid: &KeyID) -> Option<String>;
|
||||
fn by_email(&self, email: &Email) -> Option<String>;
|
||||
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>>;
|
||||
fn by_domain_and_hash_wkd(&self, domain: &str, hash: &str) -> Option<Vec<u8>>;
|
||||
|
||||
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
|
||||
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
||||
|
||||
fn get_last_log_entry(&self) -> anyhow::Result<Fingerprint>;
|
||||
|
||||
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> anyhow::Result<()>;
|
||||
|
||||
fn check_link_fpr(
|
||||
&self,
|
||||
fpr: &Fingerprint,
|
||||
target: &Fingerprint,
|
||||
) -> Result<Option<Fingerprint>>;
|
||||
|
||||
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
|
||||
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
|
||||
|
||||
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert>;
|
||||
fn move_tmp_to_full(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
|
||||
fn move_tmp_to_published(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
|
||||
fn move_tmp_to_published_wkd(
|
||||
&self,
|
||||
content: Option<Self::TempCert>,
|
||||
fpr: &Fingerprint,
|
||||
) -> Result<()>;
|
||||
fn write_to_quarantine(&self, fpr: &Fingerprint, content: &[u8]) -> Result<()>;
|
||||
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()>;
|
||||
|
||||
fn check_consistency(&self) -> Result<()>;
|
||||
) -> anyhow::Result<Option<Fingerprint>>;
|
||||
fn check_consistency(&self) -> anyhow::Result<()>;
|
||||
|
||||
/// Queries the database using Fingerprint, KeyID, or
|
||||
/// email-address.
|
||||
fn lookup(&self, term: &Query) -> Result<Option<Cert>> {
|
||||
fn lookup(&self, term: &Query) -> anyhow::Result<Option<Cert>> {
|
||||
use self::Query::*;
|
||||
let armored = match term {
|
||||
ByFingerprint(ref fp) => self.by_fpr(fp),
|
||||
ByKeyID(ref keyid) => self.by_kid(keyid),
|
||||
ByEmail(ref email) => self.by_email(email),
|
||||
ByFingerprint(fp) => self.by_fpr(fp),
|
||||
ByKeyID(keyid) => self.by_kid(keyid),
|
||||
ByEmail(email) => self.by_email(email),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
@@ -197,10 +199,10 @@ pub trait Database: Sync + Send {
|
||||
/// - abort if any problems come up!
|
||||
/// 5. Move full and published temporary Cert to their location
|
||||
/// 6. Update all symlinks
|
||||
fn merge(&self, new_tpk: Cert) -> Result<ImportResult> {
|
||||
fn merge(&'a self, new_tpk: Cert) -> anyhow::Result<ImportResult> {
|
||||
let fpr_primary = Fingerprint::try_from(new_tpk.primary_key().fingerprint())?;
|
||||
|
||||
let _lock = self.lock()?;
|
||||
let tx = self.transaction()?;
|
||||
|
||||
let known_uids: Vec<UserID> = new_tpk
|
||||
.userids()
|
||||
@@ -223,7 +225,12 @@ pub trait Database: Sync + Send {
|
||||
|
||||
let is_ok = is_revoked
|
||||
|| full_tpk_new.keys().subkeys().next().is_some()
|
||||
|| full_tpk_new.userids().next().is_some();
|
||||
|| full_tpk_new.userids().next().is_some()
|
||||
|| full_tpk_new
|
||||
.primary_key()
|
||||
.self_signatures()
|
||||
.next()
|
||||
.is_some();
|
||||
if !is_ok {
|
||||
// self.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
|
||||
return Err(anyhow!("Not a well-formed key!"));
|
||||
@@ -245,14 +252,13 @@ pub trait Database: Sync + Send {
|
||||
|
||||
let mut email_status: Vec<_> = full_tpk_new
|
||||
.userids()
|
||||
.map(|binding| {
|
||||
.filter_map(|binding| {
|
||||
if let Ok(email) = Email::try_from(binding.userid()) {
|
||||
Some((binding, email))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.filter(|(binding, _)| binding.self_signatures().next().is_some())
|
||||
.filter(|(binding, email)| {
|
||||
known_uids.contains(binding.userid()) || published_emails.contains(email)
|
||||
@@ -294,8 +300,7 @@ pub trait Database: Sync + Send {
|
||||
.userids()
|
||||
.filter(|binding| !is_status_revoked(binding.revocation_status(&POLICY, None)))
|
||||
.map(|binding| binding.userid())
|
||||
.map(|uid| Email::try_from(uid).ok())
|
||||
.flatten()
|
||||
.flat_map(Email::try_from)
|
||||
.any(|unrevoked_email| &unrevoked_email == *email);
|
||||
!has_unrevoked_userid
|
||||
})
|
||||
@@ -308,24 +313,24 @@ pub trait Database: Sync + Send {
|
||||
.map(|fpr| self.check_link_fpr(fpr, &fpr_primary))
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>>>();
|
||||
.collect::<anyhow::Result<Vec<_>>>();
|
||||
|
||||
if fpr_checks.is_err() {
|
||||
self.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
|
||||
tx.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
|
||||
}
|
||||
let fpr_checks = fpr_checks?;
|
||||
|
||||
let fpr_not_linked = fpr_checks.into_iter().flatten();
|
||||
|
||||
let full_tpk_tmp = self.write_to_temp(&tpk_to_string(&full_tpk_new)?)?;
|
||||
let full_tpk_tmp = tx.write_to_temp(&tpk_to_string(&full_tpk_new)?)?;
|
||||
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
||||
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||
|
||||
// these are very unlikely to fail. but if it happens,
|
||||
// database consistency might be compromised!
|
||||
self.move_tmp_to_full(full_tpk_tmp, &fpr_primary)?;
|
||||
self.move_tmp_to_published(published_tpk_tmp, &fpr_primary)?;
|
||||
self.regenerate_wkd(&fpr_primary, &published_tpk_clean)?;
|
||||
tx.move_tmp_to_full(full_tpk_tmp, &fpr_primary)?;
|
||||
tx.move_tmp_to_published(published_tpk_tmp, &fpr_primary)?;
|
||||
self.regenerate_wkd(&tx, &fpr_primary, &published_tpk_clean)?;
|
||||
|
||||
let published_tpk_changed = published_tpk_old
|
||||
.map(|tpk| tpk != published_tpk_clean)
|
||||
@@ -335,13 +340,13 @@ pub trait Database: Sync + Send {
|
||||
}
|
||||
|
||||
for fpr in fpr_not_linked {
|
||||
if let Err(e) = self.link_fpr(&fpr, &fpr_primary) {
|
||||
if let Err(e) = tx.link_fpr(&fpr, &fpr_primary) {
|
||||
info!("Error ensuring symlink! {} {} {:?}", &fpr, &fpr_primary, e);
|
||||
}
|
||||
}
|
||||
|
||||
for revoked_email in newly_revoked_emails {
|
||||
if let Err(e) = self.unlink_email(revoked_email, &fpr_primary) {
|
||||
if let Err(e) = tx.unlink_email(revoked_email, &fpr_primary) {
|
||||
info!(
|
||||
"Error ensuring symlink! {} {} {:?}",
|
||||
&fpr_primary, &revoked_email, e
|
||||
@@ -349,6 +354,8 @@ pub trait Database: Sync + Send {
|
||||
}
|
||||
}
|
||||
|
||||
tx.commit()?;
|
||||
|
||||
if is_update {
|
||||
Ok(ImportResult::Updated(TpkStatus {
|
||||
is_revoked,
|
||||
@@ -379,7 +386,7 @@ pub trait Database: Sync + Send {
|
||||
&self,
|
||||
fpr_primary: &Fingerprint,
|
||||
known_addresses: &[Email],
|
||||
) -> Result<TpkStatus> {
|
||||
) -> anyhow::Result<TpkStatus> {
|
||||
let tpk_full = self
|
||||
.by_fpr_full(fpr_primary)
|
||||
.ok_or_else(|| anyhow!("Key not in database!"))
|
||||
@@ -448,10 +455,14 @@ pub trait Database: Sync + Send {
|
||||
/// - abort if any problems come up!
|
||||
/// 5. Move full and published temporary Cert to their location
|
||||
/// 6. Update all symlinks
|
||||
fn set_email_published(&self, fpr_primary: &Fingerprint, email_new: &Email) -> Result<()> {
|
||||
let _lock = self.lock()?;
|
||||
fn set_email_published(
|
||||
&'a self,
|
||||
fpr_primary: &Fingerprint,
|
||||
email_new: &Email,
|
||||
) -> anyhow::Result<()> {
|
||||
let tx = self.transaction()?;
|
||||
|
||||
self.nolock_unlink_email_if_other(fpr_primary, email_new)?;
|
||||
self.unlink_email_if_other(&tx, fpr_primary, email_new)?;
|
||||
|
||||
let full_tpk = self
|
||||
.by_fpr_full(fpr_primary)
|
||||
@@ -469,8 +480,7 @@ pub trait Database: Sync + Send {
|
||||
.unwrap_or_default();
|
||||
let published_emails_old: Vec<Email> = published_uids_old
|
||||
.iter()
|
||||
.map(|uid| Email::try_from(uid).ok())
|
||||
.flatten()
|
||||
.flat_map(Email::try_from)
|
||||
.collect();
|
||||
|
||||
// println!("publishing: {:?}", &uid_new);
|
||||
@@ -486,41 +496,43 @@ pub trait Database: Sync + Send {
|
||||
|
||||
if !published_tpk_new
|
||||
.userids()
|
||||
.map(|binding| Email::try_from(binding.userid()))
|
||||
.flatten()
|
||||
.flat_map(|binding| Email::try_from(binding.userid()))
|
||||
.any(|email| email == *email_new)
|
||||
{
|
||||
return Err(anyhow!("Requested UserID not found!"));
|
||||
}
|
||||
|
||||
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
||||
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||
|
||||
self.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
||||
self.regenerate_wkd(fpr_primary, &published_tpk_clean)?;
|
||||
tx.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
||||
self.regenerate_wkd(&tx, fpr_primary, &published_tpk_clean)?;
|
||||
|
||||
self.update_write_log(fpr_primary);
|
||||
|
||||
if let Err(e) = self.link_email(email_new, fpr_primary) {
|
||||
if let Err(e) = tx.link_email(email_new, fpr_primary) {
|
||||
info!(
|
||||
"Error ensuring email symlink! {} -> {} {:?}",
|
||||
&email_new, &fpr_primary, e
|
||||
);
|
||||
}
|
||||
|
||||
tx.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn nolock_unlink_email_if_other(
|
||||
fn unlink_email_if_other(
|
||||
&self,
|
||||
tx: &Self::Transaction,
|
||||
fpr_primary: &Fingerprint,
|
||||
unlink_email: &Email,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let current_link_fpr =
|
||||
self.lookup_primary_fingerprint(&Query::ByEmail(unlink_email.clone()));
|
||||
if let Some(current_fpr) = current_link_fpr {
|
||||
if current_fpr != *fpr_primary {
|
||||
self.nolock_set_email_unpublished_filter(¤t_fpr, |uid| {
|
||||
self.set_email_unpublished_filter(tx, ¤t_fpr, |uid| {
|
||||
Email::try_from(uid)
|
||||
.map(|email| email != *unlink_email)
|
||||
.unwrap_or(false)
|
||||
@@ -545,18 +557,10 @@ pub trait Database: Sync + Send {
|
||||
/// 6. Update all symlinks
|
||||
fn set_email_unpublished_filter(
|
||||
&self,
|
||||
tx: &Self::Transaction,
|
||||
fpr_primary: &Fingerprint,
|
||||
email_remove: impl Fn(&UserID) -> bool,
|
||||
) -> Result<()> {
|
||||
let _lock = self.lock()?;
|
||||
self.nolock_set_email_unpublished_filter(fpr_primary, email_remove)
|
||||
}
|
||||
|
||||
fn nolock_set_email_unpublished_filter(
|
||||
&self,
|
||||
fpr_primary: &Fingerprint,
|
||||
email_remove: impl Fn(&UserID) -> bool,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let published_tpk_old = self
|
||||
.by_fpr(fpr_primary)
|
||||
.ok_or_else(|| anyhow!("Key not in database!"))
|
||||
@@ -564,16 +568,14 @@ pub trait Database: Sync + Send {
|
||||
|
||||
let published_emails_old: Vec<Email> = published_tpk_old
|
||||
.userids()
|
||||
.map(|binding| Email::try_from(binding.userid()))
|
||||
.flatten()
|
||||
.flat_map(|binding| Email::try_from(binding.userid()))
|
||||
.collect();
|
||||
|
||||
let published_tpk_new = published_tpk_old.retain_userids(|uid| email_remove(uid.userid()));
|
||||
|
||||
let published_emails_new: Vec<Email> = published_tpk_new
|
||||
.userids()
|
||||
.map(|binding| Email::try_from(binding.userid()))
|
||||
.flatten()
|
||||
.flat_map(|binding| Email::try_from(binding.userid()))
|
||||
.collect();
|
||||
|
||||
let unpublished_emails = published_emails_old
|
||||
@@ -581,15 +583,15 @@ pub trait Database: Sync + Send {
|
||||
.filter(|email| !published_emails_new.contains(email));
|
||||
|
||||
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
|
||||
let published_tpk_tmp = self.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||
let published_tpk_tmp = tx.write_to_temp(&tpk_to_string(&published_tpk_clean)?)?;
|
||||
|
||||
self.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
||||
self.regenerate_wkd(fpr_primary, &published_tpk_clean)?;
|
||||
tx.move_tmp_to_published(published_tpk_tmp, fpr_primary)?;
|
||||
self.regenerate_wkd(tx, fpr_primary, &published_tpk_clean)?;
|
||||
|
||||
self.update_write_log(fpr_primary);
|
||||
|
||||
for unpublished_email in unpublished_emails {
|
||||
if let Err(e) = self.unlink_email(unpublished_email, fpr_primary) {
|
||||
if let Err(e) = tx.unlink_email(unpublished_email, fpr_primary) {
|
||||
info!(
|
||||
"Error deleting email symlink! {} -> {} {:?}",
|
||||
&unpublished_email, &fpr_primary, e
|
||||
@@ -600,19 +602,31 @@ pub trait Database: Sync + Send {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_email_unpublished(&self, fpr_primary: &Fingerprint, email_remove: &Email) -> Result<()> {
|
||||
self.set_email_unpublished_filter(fpr_primary, |uid| {
|
||||
fn set_email_unpublished(
|
||||
&'a self,
|
||||
fpr_primary: &Fingerprint,
|
||||
email_remove: &Email,
|
||||
) -> anyhow::Result<()> {
|
||||
let tx = self.transaction().unwrap();
|
||||
self.set_email_unpublished_filter(&tx, fpr_primary, |uid| {
|
||||
Email::try_from(uid)
|
||||
.map(|email| email != *email_remove)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
})?;
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_email_unpublished_all(&self, fpr_primary: &Fingerprint) -> Result<()> {
|
||||
self.set_email_unpublished_filter(fpr_primary, |_| false)
|
||||
fn set_email_unpublished_all(&'a self, fpr_primary: &Fingerprint) -> anyhow::Result<()> {
|
||||
let tx = self.transaction().unwrap();
|
||||
self.set_email_unpublished_filter(&tx, fpr_primary, |_| false)?;
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn regenerate_links(&self, fpr_primary: &Fingerprint) -> Result<RegenerateResult> {
|
||||
fn regenerate_links(&'a self, fpr_primary: &Fingerprint) -> anyhow::Result<RegenerateResult> {
|
||||
let tx = self.transaction().unwrap();
|
||||
|
||||
let tpk = self
|
||||
.by_primary_fpr(fpr_primary)
|
||||
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
|
||||
@@ -620,11 +634,10 @@ pub trait Database: Sync + Send {
|
||||
|
||||
let published_emails: Vec<Email> = tpk
|
||||
.userids()
|
||||
.map(|binding| Email::try_from(binding.userid()))
|
||||
.flatten()
|
||||
.flat_map(|binding| Email::try_from(binding.userid()))
|
||||
.collect();
|
||||
|
||||
self.regenerate_wkd(fpr_primary, &tpk)?;
|
||||
self.regenerate_wkd(&tx, fpr_primary, &tpk)?;
|
||||
|
||||
let fingerprints = tpk_get_linkable_fprs(&tpk);
|
||||
|
||||
@@ -633,7 +646,7 @@ pub trait Database: Sync + Send {
|
||||
.map(|fpr| self.check_link_fpr(&fpr, fpr_primary))
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
.collect::<anyhow::Result<Vec<_>>>()?;
|
||||
|
||||
let fpr_not_linked = fpr_checks.into_iter().flatten();
|
||||
|
||||
@@ -642,14 +655,16 @@ pub trait Database: Sync + Send {
|
||||
|
||||
for fpr in fpr_not_linked {
|
||||
keys_linked += 1;
|
||||
self.link_fpr(&fpr, fpr_primary)?;
|
||||
tx.link_fpr(&fpr, fpr_primary)?;
|
||||
}
|
||||
|
||||
for email in published_emails {
|
||||
emails_linked += 1;
|
||||
self.link_email(&email, fpr_primary)?;
|
||||
tx.link_email(&email, fpr_primary)?;
|
||||
}
|
||||
|
||||
tx.commit()?;
|
||||
|
||||
if keys_linked != 0 || emails_linked != 0 {
|
||||
Ok(RegenerateResult::Updated)
|
||||
} else {
|
||||
@@ -657,13 +672,18 @@ pub trait Database: Sync + Send {
|
||||
}
|
||||
}
|
||||
|
||||
fn regenerate_wkd(&self, fpr_primary: &Fingerprint, published_tpk: &Cert) -> Result<()> {
|
||||
fn regenerate_wkd(
|
||||
&self,
|
||||
tx: &Self::Transaction,
|
||||
fpr_primary: &Fingerprint,
|
||||
published_tpk: &Cert,
|
||||
) -> anyhow::Result<()> {
|
||||
let published_wkd_tpk_tmp = if published_tpk.userids().next().is_some() {
|
||||
Some(self.write_to_temp(&published_tpk.export_to_vec()?)?)
|
||||
Some(tx.write_to_temp(&published_tpk.export_to_vec()?)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.move_tmp_to_published_wkd(published_wkd_tpk_tmp, fpr_primary)?;
|
||||
tx.move_tmp_to_published_wkd(published_wkd_tpk_tmp, fpr_primary)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -671,8 +691,7 @@ pub trait Database: Sync + Send {
|
||||
|
||||
fn tpk_get_emails(cert: &Cert) -> Vec<Email> {
|
||||
cert.userids()
|
||||
.map(|binding| Email::try_from(binding.userid()))
|
||||
.flatten()
|
||||
.flat_map(|binding| Email::try_from(binding.userid()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -680,7 +699,6 @@ pub fn tpk_get_linkable_fprs(tpk: &Cert) -> Vec<Fingerprint> {
|
||||
let signing_capable = &KeyFlags::empty().set_signing().set_certification();
|
||||
let fpr_primary = &Fingerprint::try_from(tpk.fingerprint()).unwrap();
|
||||
tpk.keys()
|
||||
.into_iter()
|
||||
.flat_map(|bundle| {
|
||||
Fingerprint::try_from(bundle.key().fingerprint()).map(|fpr| {
|
||||
(
|
||||
|
@@ -1,12 +1,11 @@
|
||||
use openpgp::Result;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use openpgp::{
|
||||
cert::prelude::*, policy::StandardPolicy, serialize::SerializeInto as _,
|
||||
types::RevocationStatus, Cert,
|
||||
use sequoia_openpgp::{
|
||||
Cert, Result, cert::prelude::*, policy::StandardPolicy, serialize::SerializeInto as _,
|
||||
types::RevocationStatus,
|
||||
};
|
||||
|
||||
use Email;
|
||||
use crate::Email;
|
||||
|
||||
pub const POLICY: StandardPolicy = StandardPolicy::new();
|
||||
|
||||
@@ -19,7 +18,7 @@ pub fn is_status_revoked(status: RevocationStatus) -> bool {
|
||||
}
|
||||
|
||||
pub fn tpk_to_string(tpk: &Cert) -> Result<Vec<u8>> {
|
||||
tpk.armored().export_to_vec()
|
||||
tpk.armored().to_vec()
|
||||
}
|
||||
|
||||
pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
|
||||
@@ -85,7 +84,8 @@ pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
|
||||
/// Filters the Cert, keeping only UserIDs that aren't revoked, and whose emails match the given list
|
||||
pub fn tpk_filter_alive_emails(tpk: &Cert, emails: &[Email]) -> Cert {
|
||||
tpk.clone().retain_userids(|uid| {
|
||||
if is_status_revoked(uid.revocation_status(&POLICY, None)) {
|
||||
let is_exportable = uid.self_signatures().any(|s| s.exportable().is_ok());
|
||||
if !is_exportable || is_status_revoked(uid.revocation_status(&POLICY, None)) {
|
||||
false
|
||||
} else if let Ok(email) = Email::try_from(uid.userid()) {
|
||||
emails.contains(&email)
|
||||
|
782
database/src/sqlite.rs
Normal file
782
database/src/sqlite.rs
Normal file
@@ -0,0 +1,782 @@
|
||||
use self_cell::self_cell;
|
||||
|
||||
use std::convert::TryFrom;
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::types::{Email, Fingerprint, KeyID};
|
||||
use crate::{Database, Query};
|
||||
use anyhow::{anyhow, format_err};
|
||||
use sequoia_openpgp::{Cert, policy::StandardPolicy};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
use r2d2_sqlite::rusqlite::OptionalExtension;
|
||||
use r2d2_sqlite::rusqlite::ToSql;
|
||||
use r2d2_sqlite::rusqlite::Transaction;
|
||||
use r2d2_sqlite::rusqlite::params;
|
||||
|
||||
use crate::{DatabaseTransaction, wkd};
|
||||
|
||||
pub const POLICY: StandardPolicy = StandardPolicy::new();
|
||||
|
||||
const DEFAULT_DB_FILE_NAME: &str = "keys.sqlite";
|
||||
const DEFAULT_LOG_DIR_NAME: &str = "log";
|
||||
|
||||
pub struct Sqlite {
|
||||
pool: r2d2::Pool<SqliteConnectionManager>,
|
||||
}
|
||||
|
||||
impl Sqlite {
|
||||
pub fn new_file(db_file: impl AsRef<Path>, log_dir: impl AsRef<Path>) -> anyhow::Result<Self> {
|
||||
create_dir_all(log_dir)?;
|
||||
|
||||
Self::new_internal(SqliteConnectionManager::file(db_file))
|
||||
}
|
||||
|
||||
pub fn log_dir_path(base_dir: impl AsRef<Path>) -> PathBuf {
|
||||
base_dir.as_ref().join(DEFAULT_LOG_DIR_NAME)
|
||||
}
|
||||
|
||||
pub fn log_dir_path_from_db_file_path(
|
||||
db_file_path: impl AsRef<Path>,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
db_file_path
|
||||
.as_ref()
|
||||
.parent()
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Can't get log dir path from invalid db file path: {:?}",
|
||||
db_file_path.as_ref()
|
||||
)
|
||||
})
|
||||
.map(|parent_dir_path| parent_dir_path.join(DEFAULT_LOG_DIR_NAME))
|
||||
}
|
||||
|
||||
pub fn db_file_path(base_dir: impl AsRef<Path>) -> PathBuf {
|
||||
base_dir.as_ref().join(DEFAULT_DB_FILE_NAME)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn build_pool(
|
||||
manager: SqliteConnectionManager,
|
||||
) -> anyhow::Result<r2d2::Pool<SqliteConnectionManager>> {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct LogConnectionCustomizer;
|
||||
impl<E> r2d2::CustomizeConnection<rusqlite::Connection, E> for LogConnectionCustomizer {
|
||||
fn on_acquire(&self, conn: &mut rusqlite::Connection) -> Result<(), E> {
|
||||
println!("Acquiring sqlite pool connection: {:?}", conn);
|
||||
conn.trace(Some(|query| {
|
||||
println!("{}", query);
|
||||
}));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_release(&self, conn: rusqlite::Connection) {
|
||||
println!("Releasing pool connection: {:?}", conn);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(r2d2::Pool::builder()
|
||||
.connection_customizer(Box::new(LogConnectionCustomizer {}))
|
||||
.build(manager)?)
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn build_pool(
|
||||
manager: SqliteConnectionManager,
|
||||
) -> anyhow::Result<r2d2::Pool<SqliteConnectionManager>> {
|
||||
Ok(r2d2::Pool::builder().build(manager)?)
|
||||
}
|
||||
|
||||
fn new_internal(manager: SqliteConnectionManager) -> anyhow::Result<Self> {
|
||||
let pool = Self::build_pool(manager)?;
|
||||
let conn = pool.get()?;
|
||||
conn.pragma_update(None, "journal_mode", "wal")?;
|
||||
conn.pragma_update(None, "synchronous", "normal")?;
|
||||
conn.pragma_update(None, "user_version", "1")?;
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE IF NOT EXISTS certs (
|
||||
primary_fingerprint TEXT NOT NULL PRIMARY KEY,
|
||||
full TEXT NOT NULL,
|
||||
published TEXT,
|
||||
published_not_armored BLOB,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS cert_identifiers (
|
||||
fingerprint TEXT NOT NULL PRIMARY KEY,
|
||||
keyid TEXT NOT NULL,
|
||||
primary_fingerprint TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS emails (
|
||||
email TEXT NOT NULL PRIMARY KEY,
|
||||
domain TEXT NOT NULL,
|
||||
wkd_hash TEXT NOT NULL,
|
||||
primary_fingerprint TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL
|
||||
);
|
||||
",
|
||||
)?;
|
||||
|
||||
Ok(Self { pool })
|
||||
}
|
||||
}
|
||||
|
||||
self_cell! {
|
||||
pub struct SqliteTransaction {
|
||||
owner: r2d2::PooledConnection<SqliteConnectionManager>,
|
||||
#[covariant]
|
||||
dependent: Transaction,
|
||||
}
|
||||
}
|
||||
|
||||
impl SqliteTransaction {
|
||||
fn start(pool: &r2d2::Pool<SqliteConnectionManager>) -> anyhow::Result<Self> {
|
||||
let conn = pool.get()?;
|
||||
Ok(Self::new(conn, |c| {
|
||||
Transaction::new_unchecked(c, rusqlite::TransactionBehavior::Deferred).unwrap()
|
||||
}))
|
||||
}
|
||||
|
||||
fn tx(&self) -> &Transaction<'_> {
|
||||
self.borrow_dependent()
|
||||
}
|
||||
}
|
||||
|
||||
fn query_simple<T: rusqlite::types::FromSql>(
|
||||
conn: &r2d2::PooledConnection<SqliteConnectionManager>,
|
||||
query: &str,
|
||||
params: &[&dyn ToSql],
|
||||
) -> Option<T> {
|
||||
conn.prepare_cached(query)
|
||||
.expect("query must be valid")
|
||||
.query_row(params, |row| row.get(0))
|
||||
.optional()
|
||||
.expect("query exection must not fail")
|
||||
}
|
||||
|
||||
impl DatabaseTransaction<'_> for SqliteTransaction {
|
||||
type TempCert = Vec<u8>;
|
||||
|
||||
fn commit(self) -> anyhow::Result<()> {
|
||||
// we can't use tx().commit(), but we can cheat :)
|
||||
self.tx().execute_batch("COMMIT")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_to_temp(&self, content: &[u8]) -> anyhow::Result<Self::TempCert> {
|
||||
Ok(content.to_vec())
|
||||
}
|
||||
|
||||
fn move_tmp_to_full(&self, file: Self::TempCert, fpr: &Fingerprint) -> anyhow::Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards")
|
||||
.as_millis() as u64;
|
||||
let file = String::from_utf8(file)?;
|
||||
self.tx().execute(
|
||||
"
|
||||
INSERT INTO certs (primary_fingerprint, full, created_at, updated_at)
|
||||
VALUES (?1, ?2, ?3, ?3)
|
||||
ON CONFLICT(primary_fingerprint) DO UPDATE SET full=excluded.full, updated_at = excluded.updated_at
|
||||
",
|
||||
params![fpr, file, now],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn move_tmp_to_published(&self, file: Self::TempCert, fpr: &Fingerprint) -> anyhow::Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards")
|
||||
.as_millis() as u64;
|
||||
let file = String::from_utf8(file)?;
|
||||
self.tx().execute(
|
||||
"UPDATE certs SET published = ?2, updated_at = ?3 WHERE primary_fingerprint = ?1",
|
||||
params![fpr, file, now],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn move_tmp_to_published_wkd(
|
||||
&self,
|
||||
file: Option<Self::TempCert>,
|
||||
fpr: &Fingerprint,
|
||||
) -> anyhow::Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards")
|
||||
.as_millis() as u64;
|
||||
self.tx().execute(
|
||||
"UPDATE certs SET published_not_armored = ?2, updated_at = ?3 WHERE primary_fingerprint = ?1",
|
||||
params![fpr, file, now],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_to_quarantine(&self, _fpr: &Fingerprint, _content: &[u8]) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn link_email(&self, email: &Email, fpr: &Fingerprint) -> anyhow::Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards")
|
||||
.as_millis() as u64;
|
||||
let (domain, wkd_hash) = wkd::encode_wkd(email.as_str()).expect("email must be vaild");
|
||||
self.tx().execute(
|
||||
"
|
||||
INSERT INTO emails (email, wkd_hash, domain, primary_fingerprint, created_at)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
ON CONFLICT(email) DO UPDATE SET primary_fingerprint = excluded.primary_fingerprint
|
||||
",
|
||||
params![email, domain, wkd_hash, fpr, now],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn unlink_email(&self, email: &Email, fpr: &Fingerprint) -> anyhow::Result<()> {
|
||||
self.tx()
|
||||
.execute(
|
||||
"DELETE FROM emails WHERE email = ?1 AND primary_fingerprint = ?2",
|
||||
params![email, fpr],
|
||||
)
|
||||
.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn link_fpr(&self, from_fpr: &Fingerprint, primary_fpr: &Fingerprint) -> anyhow::Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards")
|
||||
.as_millis() as u64;
|
||||
self.tx().execute(
|
||||
"
|
||||
INSERT INTO cert_identifiers (fingerprint, keyid, primary_fingerprint, created_at)
|
||||
VALUES (?1, ?2, ?3, ?4)
|
||||
ON CONFLICT(fingerprint) DO UPDATE SET primary_fingerprint = excluded.primary_fingerprint;
|
||||
",
|
||||
params![
|
||||
from_fpr,
|
||||
KeyID::from(from_fpr),
|
||||
primary_fpr,
|
||||
now,
|
||||
],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn unlink_fpr(&self, from_fpr: &Fingerprint, primary_fpr: &Fingerprint) -> anyhow::Result<()> {
|
||||
self.tx().execute(
|
||||
"DELETE FROM cert_identifiers WHERE primary_fingerprint = ?1 AND fingerprint = ?2 AND keyid = ?3",
|
||||
params![primary_fpr, from_fpr, KeyID::from(from_fpr)],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Database<'a> for Sqlite {
|
||||
type Transaction = SqliteTransaction;
|
||||
|
||||
fn transaction(&'a self) -> anyhow::Result<Self::Transaction> {
|
||||
SqliteTransaction::start(&self.pool)
|
||||
}
|
||||
|
||||
fn write_log_append(&self, _filename: &str, _fpr_primary: &Fingerprint) -> anyhow::Result<()> {
|
||||
// this is done implicitly via created_at in sqlite, no need to do anything here
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lookup_primary_fingerprint(&self, term: &Query) -> Option<Fingerprint> {
|
||||
use super::Query::*;
|
||||
|
||||
let conn = self.pool.get().unwrap();
|
||||
match term {
|
||||
ByFingerprint(fp) => query_simple(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM cert_identifiers WHERE fingerprint = ?1",
|
||||
params![fp],
|
||||
),
|
||||
ByKeyID(keyid) => query_simple(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM cert_identifiers WHERE keyid = ?1",
|
||||
params![keyid],
|
||||
),
|
||||
ByEmail(email) => query_simple(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
|
||||
params![email],
|
||||
),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// Lookup straight from certs table, no link resolution
|
||||
fn by_fpr_full(&self, primary_fpr: &Fingerprint) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT full FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![primary_fpr],
|
||||
)
|
||||
}
|
||||
|
||||
// XXX: rename! to by_primary_fpr_published
|
||||
// Lookup the published cert straight from certs table, no link resolution
|
||||
fn by_primary_fpr(&self, primary_fpr: &Fingerprint) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![primary_fpr],
|
||||
)
|
||||
}
|
||||
|
||||
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple::<Fingerprint>(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM cert_identifiers WHERE fingerprint = ?1",
|
||||
params![fpr],
|
||||
)
|
||||
.and_then(|primary_fpr| {
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![&primary_fpr],
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn by_email(&self, email: &Email) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple::<Fingerprint>(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
|
||||
params![email],
|
||||
)
|
||||
.and_then(|primary_fpr| {
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![&primary_fpr],
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple::<Fingerprint>(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM emails WHERE email = ?1",
|
||||
params![email],
|
||||
)
|
||||
.and_then(|primary_fpr| {
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT published_not_armored FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![&primary_fpr],
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn by_kid(&self, kid: &KeyID) -> Option<String> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple::<Fingerprint>(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM cert_identifiers WHERE keyid = ?1",
|
||||
params![kid],
|
||||
)
|
||||
.and_then(|primary_fpr| {
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT published FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![primary_fpr],
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn by_domain_and_hash_wkd(&self, domain: &str, wkd_hash: &str) -> Option<Vec<u8>> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
query_simple::<Fingerprint>(
|
||||
&conn,
|
||||
"SELECT primary_fingerprint FROM emails WHERE domain = ?1 AND wkd_hash = ?2",
|
||||
params![domain, wkd_hash],
|
||||
)
|
||||
.and_then(|primary_fpr| {
|
||||
query_simple(
|
||||
&conn,
|
||||
"SELECT published_not_armored FROM certs WHERE primary_fingerprint = ?1",
|
||||
params![primary_fpr],
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn check_link_fpr(
|
||||
&self,
|
||||
fpr: &Fingerprint,
|
||||
_fpr_target: &Fingerprint,
|
||||
) -> anyhow::Result<Option<Fingerprint>> {
|
||||
// a desync here cannot happen structurally, so always return true here
|
||||
Ok(Some(fpr.clone()))
|
||||
}
|
||||
|
||||
/// Checks the database for consistency.
|
||||
///
|
||||
/// Note that this operation may take a long time, and is
|
||||
/// generally only useful for testing.
|
||||
fn check_consistency(&self) -> anyhow::Result<()> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
let mut stmt = conn.prepare("SELECT primary_fingerprint, published FROM certs")?;
|
||||
let mut rows = stmt.query([])?;
|
||||
while let Some(row) = rows.next()? {
|
||||
let primary_fpr: Fingerprint = row.get(0)?;
|
||||
let published: String = row.get(1)?;
|
||||
let cert = Cert::from_str(&published).unwrap();
|
||||
|
||||
let mut cert_emails: Vec<Email> = cert
|
||||
.userids()
|
||||
.filter_map(|uid| uid.userid().email2().unwrap())
|
||||
.flat_map(Email::from_str)
|
||||
.collect();
|
||||
let mut db_emails: Vec<Email> = conn
|
||||
.prepare("SELECT email FROM emails WHERE primary_fingerprint = ?1")?
|
||||
.query_map([&primary_fpr], |row| row.get::<_, String>(0))
|
||||
.unwrap()
|
||||
.flat_map(|email| Email::from_str(&email.unwrap()))
|
||||
.collect();
|
||||
cert_emails.sort();
|
||||
cert_emails.dedup();
|
||||
db_emails.sort();
|
||||
if cert_emails != db_emails {
|
||||
return Err(format_err!(
|
||||
"{:?} does not have correct emails indexed, cert ${:?} db {:?}",
|
||||
&primary_fpr,
|
||||
cert_emails,
|
||||
db_emails,
|
||||
));
|
||||
}
|
||||
|
||||
let policy = &POLICY;
|
||||
let mut cert_fprs: Vec<Fingerprint> = cert
|
||||
.keys()
|
||||
.with_policy(policy, None)
|
||||
.for_certification()
|
||||
.for_signing()
|
||||
.map(|amalgamation| amalgamation.key().fingerprint())
|
||||
.flat_map(Fingerprint::try_from)
|
||||
.collect();
|
||||
let mut db_fprs: Vec<Fingerprint> = conn
|
||||
.prepare("SELECT fingerprint FROM cert_identifiers WHERE primary_fingerprint = ?1")?
|
||||
.query_map([&primary_fpr], |row| row.get::<_, Fingerprint>(0))
|
||||
.unwrap()
|
||||
.flatten()
|
||||
.collect();
|
||||
cert_fprs.sort();
|
||||
db_fprs.sort();
|
||||
if cert_fprs != db_fprs {
|
||||
return Err(format_err!(
|
||||
"{:?} does not have correct fingerprints indexed, cert ${:?} db {:?}",
|
||||
&primary_fpr,
|
||||
cert_fprs,
|
||||
db_fprs,
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_last_log_entry(&self) -> anyhow::Result<Fingerprint> {
|
||||
let conn = self.pool.get().unwrap();
|
||||
Ok(conn.query_row(
|
||||
"SELECT primary_fingerprint FROM certs ORDER BY updated_at DESC LIMIT 1",
|
||||
[],
|
||||
|row| row.get::<_, Fingerprint>(0),
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test;
|
||||
use sequoia_openpgp::cert::CertBuilder;
|
||||
use tempfile::TempDir;
|
||||
|
||||
const DATA_1: &str = "data, content doesn't matter";
|
||||
const DATA_2: &str = "other data, content doesn't matter";
|
||||
const FINGERPRINT_1: &str = "D4AB192964F76A7F8F8A9B357BD18320DEADFA11";
|
||||
|
||||
fn open_db() -> (TempDir, Sqlite) {
|
||||
let tmpdir = TempDir::new().unwrap();
|
||||
let tempdir_path = tmpdir.path();
|
||||
|
||||
let db = Sqlite::new_file(
|
||||
Sqlite::db_file_path(tempdir_path),
|
||||
Sqlite::log_dir_path(tempdir_path),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(tmpdir, db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new() {
|
||||
let (_tmp_dir, db) = open_db();
|
||||
let k1 = CertBuilder::new()
|
||||
.add_userid("a@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let k2 = CertBuilder::new()
|
||||
.add_userid("b@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
let k3 = CertBuilder::new()
|
||||
.add_userid("c@invalid.example.org")
|
||||
.generate()
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
assert!(
|
||||
!db.merge(k1)
|
||||
.unwrap()
|
||||
.into_tpk_status()
|
||||
.email_status
|
||||
.is_empty()
|
||||
);
|
||||
assert!(
|
||||
!db.merge(k2.clone())
|
||||
.unwrap()
|
||||
.into_tpk_status()
|
||||
.email_status
|
||||
.is_empty()
|
||||
);
|
||||
assert!(!db.merge(k2).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
assert!(
|
||||
!db.merge(k3.clone())
|
||||
.unwrap()
|
||||
.into_tpk_status()
|
||||
.email_status
|
||||
.is_empty()
|
||||
);
|
||||
assert!(
|
||||
!db.merge(k3.clone())
|
||||
.unwrap()
|
||||
.into_tpk_status()
|
||||
.email_status
|
||||
.len()
|
||||
> 0
|
||||
);
|
||||
assert!(!db.merge(k3).unwrap().into_tpk_status().email_status.len() > 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn xx_by_fpr_full() -> anyhow::Result<()> {
|
||||
let (_tmp_dir, db) = open_db();
|
||||
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||
|
||||
let lock = db.transaction().unwrap();
|
||||
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||
lock.link_fpr(&fpr1, &fpr1)?;
|
||||
lock.commit().unwrap();
|
||||
|
||||
assert_eq!(db.by_fpr_full(&fpr1).expect("must find key"), DATA_1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn xx_by_kid() -> anyhow::Result<()> {
|
||||
let (_tmp_dir, db) = open_db();
|
||||
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||
|
||||
let lock = db.transaction().unwrap();
|
||||
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||
lock.move_tmp_to_published(lock.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
|
||||
lock.link_fpr(&fpr1, &fpr1)?;
|
||||
lock.commit().unwrap();
|
||||
|
||||
assert_eq!(db.by_kid(&fpr1.into()).expect("must find key"), DATA_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn xx_by_primary_fpr() -> anyhow::Result<()> {
|
||||
let (_tmp_dir, db) = open_db();
|
||||
let fpr1 = Fingerprint::from_str(FINGERPRINT_1)?;
|
||||
|
||||
let lock = db.transaction().unwrap();
|
||||
lock.move_tmp_to_full(lock.write_to_temp(DATA_1.as_bytes())?, &fpr1)?;
|
||||
lock.move_tmp_to_published(lock.write_to_temp(DATA_2.as_bytes())?, &fpr1)?;
|
||||
lock.commit().unwrap();
|
||||
|
||||
assert_eq!(db.by_primary_fpr(&fpr1).expect("must find key"), DATA_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_verification() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_uid_verification(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_deletion() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_uid_deletion(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subkey_lookup() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_subkey_lookup(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kid_lookup() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_kid_lookup(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn upload_revoked_tpk() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_upload_revoked_tpk(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_revocation() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_uid_revocation(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regenerate() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_regenerate(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn key_reupload() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_reupload(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_replacement() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_uid_replacement(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uid_unlinking() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_unlink_uid(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_1() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_same_email_1(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_2() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_same_email_2(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_3() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_same_email_3(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn same_email_4() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_same_email_4(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_selfsig() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_no_selfsig(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_dks() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_allow_dks(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_revoked() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_allow_revoked(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_uids() {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::test_bad_uids(&mut db);
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reverse_fingerprint_to_path() {
|
||||
let tmpdir = TempDir::new().unwrap();
|
||||
let tmpdir_path = tmpdir.path();
|
||||
let db = Sqlite::new_file(
|
||||
Sqlite::db_file_path(tmpdir_path),
|
||||
Sqlite::log_dir_path(tmpdir_path),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let _fp: Fingerprint = "CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
|
||||
|
||||
// XXX: fixme
|
||||
//assert_eq!(Sqlite::path_to_fingerprint(&db.link_by_fingerprint(&fp)),
|
||||
// Some(fp.clone()));
|
||||
db.check_consistency().expect("inconsistent database");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn attested_key_signatures() -> anyhow::Result<()> {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::attested_key_signatures(&mut db)?;
|
||||
db.check_consistency()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexportable_sigs() -> anyhow::Result<()> {
|
||||
let (_tmp_dir, mut db) = open_db();
|
||||
test::nonexportable_sigs(&mut db)?;
|
||||
db.check_consistency()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
@@ -1,17 +1,16 @@
|
||||
use std::fs::{create_dir_all, remove_file, File};
|
||||
use std::fs::{File, create_dir_all, remove_file};
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use log::info;
|
||||
use std::str;
|
||||
|
||||
use Result;
|
||||
|
||||
pub struct StatefulTokens {
|
||||
token_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl StatefulTokens {
|
||||
pub fn new(token_dir: impl Into<PathBuf>) -> Result<Self> {
|
||||
pub fn new(token_dir: impl Into<PathBuf>) -> anyhow::Result<Self> {
|
||||
let token_dir = token_dir.into();
|
||||
create_dir_all(&token_dir)?;
|
||||
|
||||
@@ -21,9 +20,9 @@ impl StatefulTokens {
|
||||
Ok(StatefulTokens { token_dir })
|
||||
}
|
||||
|
||||
pub fn new_token(&self, token_type: &str, payload: &[u8]) -> Result<String> {
|
||||
pub fn new_token(&self, token_type: &str, payload: &[u8]) -> anyhow::Result<String> {
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::{thread_rng, Rng};
|
||||
use rand::{Rng, thread_rng};
|
||||
|
||||
let mut rng = thread_rng();
|
||||
// samples from [a-zA-Z0-9]
|
||||
@@ -38,7 +37,7 @@ impl StatefulTokens {
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
pub fn pop_token(&self, token_type: &str, token: &str) -> Result<String> {
|
||||
pub fn pop_token(&self, token_type: &str, token: &str) -> anyhow::Result<String> {
|
||||
let path = self.token_dir.join(token_type).join(token);
|
||||
let buf = {
|
||||
let mut fd = File::open(&path)?;
|
||||
|
@@ -4,8 +4,6 @@ use std::path::Path;
|
||||
|
||||
use fs2::FileExt;
|
||||
|
||||
use Result;
|
||||
|
||||
/// A minimalistic flock-based mutex.
|
||||
///
|
||||
/// This just barely implements enough what we need from a mutex.
|
||||
@@ -14,7 +12,7 @@ pub struct FlockMutexGuard {
|
||||
}
|
||||
|
||||
impl FlockMutexGuard {
|
||||
pub fn lock(path: impl AsRef<Path>) -> Result<Self> {
|
||||
pub fn lock(path: impl AsRef<Path>) -> anyhow::Result<Self> {
|
||||
let file = File::open(path)?;
|
||||
while let Err(e) = file.lock_exclusive() {
|
||||
// According to flock(2), possible errors returned are:
|
||||
|
@@ -14,40 +14,39 @@
|
||||
// confirm again
|
||||
// fetch by uid & fpr
|
||||
|
||||
use anyhow::Result;
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use std::str::FromStr;
|
||||
|
||||
use openpgp::cert::{CertBuilder, UserIDRevocationBuilder};
|
||||
use openpgp::types::{KeyFlags, ReasonForRevocation, SignatureType};
|
||||
use openpgp::{
|
||||
packet::{signature::*, UserID},
|
||||
parse::Parse,
|
||||
types::RevocationStatus,
|
||||
use crate::types::{Email, Fingerprint, KeyID};
|
||||
use crate::{Database, Query};
|
||||
use sequoia_openpgp::packet::Key;
|
||||
use sequoia_openpgp::packet::key::{Key4, PrimaryRole, SecretParts};
|
||||
use sequoia_openpgp::{
|
||||
Cert, Packet,
|
||||
cert::{CertBuilder, UserIDRevocationBuilder},
|
||||
packet::{UserID, signature::*},
|
||||
parse::Parse,
|
||||
types::{KeyFlags, ReasonForRevocation, RevocationStatus, SignatureType},
|
||||
};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use types::{Email, Fingerprint, KeyID};
|
||||
use Database;
|
||||
use Query;
|
||||
|
||||
use openpgp_utils::POLICY;
|
||||
use crate::openpgp_utils::POLICY;
|
||||
|
||||
use EmailAddressStatus;
|
||||
use TpkStatus;
|
||||
use crate::EmailAddressStatus;
|
||||
use crate::TpkStatus;
|
||||
|
||||
fn check_mail_none(db: &impl Database, email: &Email) {
|
||||
use crate::DatabaseTransaction;
|
||||
|
||||
fn check_mail_none<'a>(db: &impl Database<'a>, email: &Email) {
|
||||
assert!(db.by_email(email).is_none());
|
||||
assert!(db.by_email_wkd(email).is_none());
|
||||
}
|
||||
|
||||
fn check_mail_some(db: &impl Database, email: &Email) {
|
||||
fn check_mail_some<'a>(db: &impl Database<'a>, email: &Email) {
|
||||
assert!(db.by_email(email).is_some());
|
||||
assert!(db.by_email_wkd(email).is_some());
|
||||
}
|
||||
|
||||
pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_uid_verification<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
let str_uid2 = "Test B <test_b@example.com>";
|
||||
let tpk = CertBuilder::new()
|
||||
@@ -64,7 +63,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
@@ -107,7 +106,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
||||
let uid = key.userids().next().unwrap().userid().clone();
|
||||
|
||||
assert!((uid == uid1) ^ (uid == uid2));
|
||||
let email = Email::from_str(&String::from_utf8(uid.value().to_vec()).unwrap()).unwrap();
|
||||
let email = Email::from_str(core::str::from_utf8(uid.value()).unwrap()).unwrap();
|
||||
assert_eq!(db.by_email(&email).unwrap(), raw);
|
||||
|
||||
if email1 == email {
|
||||
@@ -135,7 +134,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
||||
let uid = key.userids().next().unwrap().userid().clone();
|
||||
|
||||
assert!((uid == uid1) ^ (uid == uid2));
|
||||
let email = Email::from_str(&String::from_utf8(uid.value().to_vec()).unwrap()).unwrap();
|
||||
let email = Email::from_str(core::str::from_utf8(uid.value()).unwrap()).unwrap();
|
||||
assert_eq!(db.by_email(&email).unwrap(), raw);
|
||||
|
||||
if email1 == email {
|
||||
@@ -169,7 +168,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
||||
}
|
||||
|
||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -275,7 +274,7 @@ pub fn test_uid_verification(db: &mut impl Database, log_path: &Path) {
|
||||
}*/
|
||||
}
|
||||
|
||||
pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_regenerate<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
let tpk = CertBuilder::new()
|
||||
.add_userid(str_uid1)
|
||||
@@ -303,7 +302,7 @@ pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
|
||||
db.regenerate_links(&fpr).unwrap();
|
||||
check_mail_none(db, &email1);
|
||||
@@ -313,23 +312,35 @@ pub fn test_regenerate(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
db.set_email_published(&fpr, &email1).unwrap();
|
||||
|
||||
db.unlink_email(&email1, &fpr).unwrap();
|
||||
{
|
||||
let lock = db.transaction().unwrap();
|
||||
lock.unlink_email(&email1, &fpr).unwrap();
|
||||
lock.commit().unwrap();
|
||||
}
|
||||
assert!(db.check_consistency().is_err());
|
||||
db.regenerate_links(&fpr).unwrap();
|
||||
assert!(db.check_consistency().is_ok());
|
||||
db.check_consistency().expect("consistency must return Ok");
|
||||
|
||||
db.unlink_fpr(&fpr, &fpr).unwrap();
|
||||
{
|
||||
let lock = db.transaction().unwrap();
|
||||
lock.unlink_fpr(&fpr, &fpr).unwrap();
|
||||
lock.commit().unwrap();
|
||||
}
|
||||
assert!(db.check_consistency().is_err());
|
||||
db.regenerate_links(&fpr).unwrap();
|
||||
assert!(db.check_consistency().is_ok());
|
||||
db.check_consistency().expect("consistency must return Ok");
|
||||
|
||||
db.unlink_fpr(&fpr_sign, &fpr).unwrap();
|
||||
{
|
||||
let lock = db.transaction().unwrap();
|
||||
lock.unlink_fpr(&fpr_sign, &fpr).unwrap();
|
||||
lock.commit().unwrap();
|
||||
}
|
||||
assert!(db.check_consistency().is_err());
|
||||
db.regenerate_links(&fpr).unwrap();
|
||||
assert!(db.check_consistency().is_ok());
|
||||
db.check_consistency().expect("consistency must return Ok");
|
||||
}
|
||||
|
||||
pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_reupload<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
let str_uid2 = "Test B <test_b@example.com>";
|
||||
let tpk = CertBuilder::new()
|
||||
@@ -344,7 +355,7 @@ pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
|
||||
// verify 1st uid
|
||||
db.set_email_published(&fpr, &email1).unwrap();
|
||||
@@ -367,7 +378,7 @@ pub fn test_reupload(db: &mut impl Database, log_path: &Path) {
|
||||
assert!(db.by_email(&email2).is_none() ^ db.by_email(&email1).is_none());
|
||||
}
|
||||
|
||||
pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_uid_replacement<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
let tpk1 = CertBuilder::new()
|
||||
.add_userid(str_uid1)
|
||||
@@ -390,9 +401,9 @@ pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload both keys
|
||||
db.merge(tpk1).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr1);
|
||||
check_log_entry(db, &fpr1);
|
||||
db.merge(tpk2).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr2);
|
||||
check_log_entry(db, &fpr2);
|
||||
|
||||
// verify 1st uid
|
||||
db.set_email_published(&fpr1, &email1).unwrap();
|
||||
@@ -445,7 +456,7 @@ pub fn test_uid_replacement(db: &mut impl Database, log_path: &Path) {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_uid_deletion<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
let str_uid2 = "Test B <test_b@example.com>";
|
||||
let tpk = CertBuilder::new()
|
||||
@@ -463,7 +474,7 @@ pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key and verify uids
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -509,7 +520,7 @@ pub fn test_uid_deletion(db: &mut impl Database, log_path: &Path) {
|
||||
assert_eq!(tpk.keys().subkeys().count(), n_subkeys);
|
||||
}
|
||||
|
||||
pub fn test_subkey_lookup(db: &mut impl Database, _log_path: &Path) {
|
||||
pub fn test_subkey_lookup<'a>(db: &'a mut impl Database<'a>) {
|
||||
let tpk = CertBuilder::new()
|
||||
.add_userid("Testy <test@example.com>")
|
||||
.add_signing_subkey()
|
||||
@@ -549,7 +560,7 @@ pub fn test_subkey_lookup(db: &mut impl Database, _log_path: &Path) {
|
||||
assert_eq!(raw1, raw2);
|
||||
}
|
||||
|
||||
pub fn test_kid_lookup(db: &mut impl Database, _log_path: &Path) {
|
||||
pub fn test_kid_lookup<'a>(db: &'a mut impl Database<'a>) {
|
||||
let tpk = CertBuilder::new()
|
||||
.add_userid("Testy <test@example.com>")
|
||||
.add_signing_subkey()
|
||||
@@ -588,7 +599,7 @@ pub fn test_kid_lookup(db: &mut impl Database, _log_path: &Path) {
|
||||
assert_eq!(raw1, raw2);
|
||||
}
|
||||
|
||||
pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_upload_revoked_tpk<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
let str_uid2 = "Test B <test_b@example.com>";
|
||||
let (mut tpk, revocation) = CertBuilder::new()
|
||||
@@ -616,7 +627,7 @@ pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: true,
|
||||
@@ -633,7 +644,7 @@ pub fn test_upload_revoked_tpk(db: &mut impl Database, log_path: &Path) {
|
||||
check_mail_none(db, &email2);
|
||||
}
|
||||
|
||||
pub fn test_uid_revocation(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_uid_revocation<'a>(db: &'a mut impl Database<'a>) {
|
||||
use std::{thread, time};
|
||||
|
||||
let str_uid1 = "Test A <test_a@example.com>";
|
||||
@@ -651,7 +662,7 @@ pub fn test_uid_revocation(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -804,7 +815,7 @@ pub fn test_uid_revocation_fake(db: &mut D) {
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn test_unlink_uid(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_unlink_uid<'a>(db: &'a mut impl Database<'a>) {
|
||||
let uid = "Test A <test_a@example.com>";
|
||||
let email = Email::from_str(uid).unwrap();
|
||||
|
||||
@@ -853,7 +864,7 @@ pub fn test_unlink_uid(db: &mut impl Database, log_path: &Path) {
|
||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||
let tpk_evil = tpk_evil.insert_packets(sig).unwrap();
|
||||
let tpk_status = db.merge(tpk_evil).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr_evil);
|
||||
check_log_entry(db, &fpr_evil);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -883,7 +894,7 @@ pub fn get_userids(armored: &str) -> Vec<UserID> {
|
||||
|
||||
// If multiple keys have the same email address, make sure things work
|
||||
// as expected.
|
||||
pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_same_email_1<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "A <test@example.com>";
|
||||
let tpk1 = CertBuilder::new()
|
||||
.add_userid(str_uid1)
|
||||
@@ -906,7 +917,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload keys.
|
||||
let tpk_status1 = db.merge(tpk1).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr1);
|
||||
check_log_entry(db, &fpr1);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -916,7 +927,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
||||
tpk_status1
|
||||
);
|
||||
let tpk_status2 = db.merge(tpk2.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr2);
|
||||
check_log_entry(db, &fpr2);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -984,7 +995,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||
let tpk2 = tpk2.insert_packets(sig).unwrap();
|
||||
let tpk_status2 = db.merge(tpk2).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr2);
|
||||
check_log_entry(db, &fpr2);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -1003,7 +1014,7 @@ pub fn test_same_email_1(db: &mut impl Database, log_path: &Path) {
|
||||
// sure things still work. We do this twice (see above), to
|
||||
// make sure the order isn't relevant when revoking one user id
|
||||
// but leaving the other.
|
||||
pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_same_email_2<'a>(db: &'a mut impl Database<'a>) {
|
||||
use std::{thread, time};
|
||||
|
||||
let str_uid1 = "A <test@example.com>";
|
||||
@@ -1021,7 +1032,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
|
||||
// verify uid1
|
||||
assert_eq!(
|
||||
@@ -1074,7 +1085,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||
let tpk = tpk.insert_packets(sig).unwrap();
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -1092,7 +1103,7 @@ pub fn test_same_email_2(db: &mut impl Database, log_path: &Path) {
|
||||
// sure things still work. We do this twice (see above), to
|
||||
// make sure the order isn't relevant when revoking one user id
|
||||
// but leaving the other.
|
||||
pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_same_email_3<'a>(db: &'a mut impl Database<'a>) {
|
||||
use std::{thread, time};
|
||||
|
||||
let str_uid1 = "A <test@example.com>";
|
||||
@@ -1110,7 +1121,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
let tpk_status = db.merge(tpk.clone()).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
|
||||
// verify uid1
|
||||
assert_eq!(
|
||||
@@ -1163,7 +1174,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
||||
assert_eq!(sig.typ(), SignatureType::CertificationRevocation);
|
||||
let tpk = tpk.insert_packets(sig).unwrap();
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -1186,7 +1197,7 @@ pub fn test_same_email_3(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// If a key has a verified email address, make sure newly uploaded user
|
||||
// ids with the same email are published as well.
|
||||
pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_same_email_4<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "A <test@example.com>";
|
||||
let str_uid2 = "B <test@example.com>";
|
||||
let tpk = CertBuilder::new()
|
||||
@@ -1205,7 +1216,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
||||
|
||||
// upload key
|
||||
let tpk_status = db.merge(cert_uid_1).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
db.set_email_published(&fpr, &tpk_status.email_status[0].0)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
@@ -1214,7 +1225,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
||||
);
|
||||
|
||||
let tpk_status = db.merge(cert_uid_2).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -1231,7 +1242,7 @@ pub fn test_same_email_4(db: &mut impl Database, log_path: &Path) {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn test_bad_uids(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_bad_uids<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "foo@bar.example <foo@bar.example>";
|
||||
let str_uid2 = "A <test@example.com>";
|
||||
let str_uid3 = "lalalalaaaaa";
|
||||
@@ -1247,7 +1258,7 @@ pub fn test_bad_uids(db: &mut impl Database, log_path: &Path) {
|
||||
let email2 = Email::from_str(str_uid2).unwrap();
|
||||
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -1293,7 +1304,7 @@ fn cert_without_signature_at(cert: Cert, mut index: i32) -> Cert {
|
||||
Cert::from_packets(packets).unwrap()
|
||||
}
|
||||
|
||||
pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
|
||||
pub fn test_unsigned_uids<'a>(db: &'a mut impl Database<'a>) {
|
||||
let str_uid1 = "test1@example.com";
|
||||
let str_uid2 = "test2@example.com";
|
||||
let tpk = CertBuilder::new()
|
||||
@@ -1308,7 +1319,7 @@ pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
|
||||
let tpk = cert_without_signature_at(tpk, 1);
|
||||
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
@@ -1319,17 +1330,44 @@ pub fn test_unsigned_uids(db: &mut impl Database, log_path: &Path) {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
|
||||
let (mut tpk, revocation) = CertBuilder::new().generate().unwrap();
|
||||
let fpr = Fingerprint::try_from(tpk.fingerprint()).unwrap();
|
||||
pub fn test_no_selfsig<'a>(db: &'a mut impl Database<'a>) {
|
||||
let key: Key<SecretParts, PrimaryRole> =
|
||||
Key4::generate_ecc(true, sequoia_openpgp::types::Curve::Ed25519)
|
||||
.unwrap()
|
||||
.into();
|
||||
// manually create a naked key without any signatures
|
||||
let tpk = Cert::try_from(vec![Packet::SecretKey(key.clone())]).unwrap();
|
||||
|
||||
// don't allow upload of naked key
|
||||
assert!(db.merge(tpk.clone()).is_err());
|
||||
}
|
||||
|
||||
// with revocation, it's ok
|
||||
tpk = tpk.insert_packets(revocation).unwrap();
|
||||
let tpk_status = db.merge(tpk).unwrap().into_tpk_status();
|
||||
check_log_entry(log_path, &fpr);
|
||||
pub fn test_allow_dks<'a>(db: &'a mut impl Database<'a>) {
|
||||
// generate a new User ID-less key, Sequoia adds a Direct Key Signature to it
|
||||
let (cert, _) = CertBuilder::new().generate().unwrap();
|
||||
|
||||
let fpr = Fingerprint::try_from(cert.fingerprint()).unwrap();
|
||||
let tpk_status = db.merge(cert).unwrap().into_tpk_status();
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: false,
|
||||
email_status: vec!(),
|
||||
unparsed_uids: 0
|
||||
},
|
||||
tpk_status
|
||||
);
|
||||
}
|
||||
|
||||
pub fn test_allow_revoked<'a>(db: &'a mut impl Database<'a>) {
|
||||
// generate a new User ID-less key, Sequoia adds a Direct Key Signature to it
|
||||
let (cert, revocation) = CertBuilder::new().generate().unwrap();
|
||||
// revoke the certificate
|
||||
let cert = cert.insert_packets(revocation).unwrap();
|
||||
|
||||
let fpr = Fingerprint::try_from(cert.fingerprint()).unwrap();
|
||||
let tpk_status = db.merge(cert).unwrap().into_tpk_status();
|
||||
check_log_entry(db, &fpr);
|
||||
assert_eq!(
|
||||
TpkStatus {
|
||||
is_revoked: true,
|
||||
@@ -1341,8 +1379,8 @@ pub fn test_no_selfsig(db: &mut impl Database, log_path: &Path) {
|
||||
}
|
||||
|
||||
/// Makes sure that attested key signatures are correctly handled.
|
||||
pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Result<()> {
|
||||
use openpgp::types::*;
|
||||
pub fn attested_key_signatures<'a>(db: &'a mut impl Database<'a>) -> anyhow::Result<()> {
|
||||
use sequoia_openpgp::types::*;
|
||||
use std::time::{Duration, SystemTime};
|
||||
let t0 = SystemTime::now() - Duration::new(5 * 60, 0);
|
||||
let t1 = SystemTime::now() - Duration::new(4 * 60, 0);
|
||||
@@ -1390,7 +1428,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
||||
|
||||
// Now for the test. First, import Bob's cert as is.
|
||||
db.merge(bob.clone())?;
|
||||
check_log_entry(log_path, &bobs_fp);
|
||||
check_log_entry(db, &bobs_fp);
|
||||
|
||||
// Confirm the email so that we can inspect the userid component.
|
||||
db.set_email_published(&bobs_fp, &Email::from_str("bob@bar.com")?)?;
|
||||
@@ -1399,7 +1437,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
||||
// certification is stripped.
|
||||
let bob = bob.insert_packets(vec![alice_certifies_bob.clone()])?;
|
||||
db.merge(bob.clone())?;
|
||||
check_log_entry(log_path, &bobs_fp);
|
||||
check_log_entry(db, &bobs_fp);
|
||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
|
||||
@@ -1408,7 +1446,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
||||
// certification is now included.
|
||||
let bob_attested = bob.clone().insert_packets(vec![attestation])?;
|
||||
db.merge(bob_attested.clone())?;
|
||||
check_log_entry(log_path, &bobs_fp);
|
||||
check_log_entry(db, &bobs_fp);
|
||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
|
||||
@@ -1434,7 +1472,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
||||
// Make a random merge with Bob's unattested cert, demonstrating
|
||||
// that the attestation still works.
|
||||
db.merge(bob.clone())?;
|
||||
check_log_entry(log_path, &bobs_fp);
|
||||
check_log_entry(db, &bobs_fp);
|
||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 1);
|
||||
@@ -1471,7 +1509,7 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
||||
);
|
||||
|
||||
db.merge(bob)?;
|
||||
check_log_entry(log_path, &bobs_fp);
|
||||
check_log_entry(db, &bobs_fp);
|
||||
let bob_ = Cert::from_bytes(&db.by_fpr(&bobs_fp).unwrap())?;
|
||||
assert_eq!(bob_.bad_signatures().count(), 0);
|
||||
assert_eq!(bob_.userids().next().unwrap().certifications().count(), 0);
|
||||
@@ -1497,10 +1535,9 @@ pub fn attested_key_signatures(db: &mut impl Database, log_path: &Path) -> Resul
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_log_entry(log_path: &Path, fpr: &Fingerprint) {
|
||||
let log_data = fs::read_to_string(log_path).unwrap();
|
||||
let last_entry = log_data.lines().last().unwrap().split(' ').last().unwrap();
|
||||
assert_eq!(last_entry, fpr.to_string());
|
||||
fn check_log_entry<'a>(db: &impl Database<'a>, fpr: &Fingerprint) {
|
||||
let last_entry = db.get_last_log_entry().expect("must have log entry");
|
||||
assert_eq!(last_entry.to_string(), fpr.to_string());
|
||||
}
|
||||
|
||||
fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
|
||||
@@ -1508,13 +1545,13 @@ fn cert_without_uid(cert: Cert, removed_uid: &UserID) -> Cert {
|
||||
.into_packet_pile()
|
||||
.into_children()
|
||||
.filter(|pkt| match pkt {
|
||||
Packet::UserID(ref uid) => uid != removed_uid,
|
||||
Packet::UserID(uid) => uid != removed_uid,
|
||||
_ => true,
|
||||
});
|
||||
Cert::from_packets(packets).unwrap()
|
||||
}
|
||||
|
||||
pub fn nonexportable_sigs(db: &mut impl Database, _log_path: &Path) -> Result<()> {
|
||||
pub fn nonexportable_sigs<'a>(db: &'a mut impl Database<'a>) -> anyhow::Result<()> {
|
||||
let str_uid1 = "Test A <test_a@example.org>";
|
||||
let str_uid2 = "Test B <test_b@example.org>";
|
||||
|
||||
|
@@ -1,12 +1,16 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::fmt;
|
||||
use std::result;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Error;
|
||||
use openpgp::packet::UserID;
|
||||
use anyhow::anyhow;
|
||||
use hex::ToHex;
|
||||
use r2d2_sqlite::rusqlite::types::FromSql;
|
||||
use r2d2_sqlite::rusqlite::types::FromSqlError;
|
||||
use r2d2_sqlite::rusqlite::types::FromSqlResult;
|
||||
use r2d2_sqlite::rusqlite::types::ToSql;
|
||||
use r2d2_sqlite::rusqlite::types::ValueRef;
|
||||
use sequoia_openpgp::packet::UserID;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use Result;
|
||||
|
||||
/// Holds a normalized email address.
|
||||
///
|
||||
@@ -26,10 +30,26 @@ impl Email {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&UserID> for Email {
|
||||
type Error = Error;
|
||||
impl FromSql for Email {
|
||||
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||
value
|
||||
.as_str()
|
||||
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
|
||||
}
|
||||
}
|
||||
|
||||
fn try_from(uid: &UserID) -> Result<Self> {
|
||||
impl ToSql for Email {
|
||||
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||
Ok(rusqlite::types::ToSqlOutput::Borrowed(
|
||||
rusqlite::types::ValueRef::Text(self.0.as_bytes()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&UserID> for Email {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(uid: &UserID) -> anyhow::Result<Self> {
|
||||
if let Some(address) = uid.email2()? {
|
||||
let mut iter = address.split('@');
|
||||
let localpart = iter.next().expect("Invalid email address");
|
||||
@@ -70,20 +90,36 @@ impl fmt::Display for Email {
|
||||
}
|
||||
|
||||
impl FromStr for Email {
|
||||
type Err = Error;
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Email> {
|
||||
fn from_str(s: &str) -> anyhow::Result<Email> {
|
||||
Email::try_from(&UserID::from(s))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Fingerprint([u8; 20]);
|
||||
|
||||
impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
||||
type Error = Error;
|
||||
impl FromSql for Fingerprint {
|
||||
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||
value
|
||||
.as_str()
|
||||
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
|
||||
}
|
||||
}
|
||||
|
||||
fn try_from(fpr: sequoia_openpgp::Fingerprint) -> Result<Self> {
|
||||
impl ToSql for Fingerprint {
|
||||
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||
Ok(rusqlite::types::ToSqlOutput::Owned(
|
||||
rusqlite::types::Value::Text(self.to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(fpr: sequoia_openpgp::Fingerprint) -> anyhow::Result<Self> {
|
||||
match fpr {
|
||||
sequoia_openpgp::Fingerprint::V4(a) => Ok(Fingerprint(a)),
|
||||
sequoia_openpgp::Fingerprint::Invalid(_) => Err(anyhow!("invalid fingerprint")),
|
||||
@@ -94,13 +130,12 @@ impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
|
||||
|
||||
impl fmt::Display for Fingerprint {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use hex::ToHex;
|
||||
self.0.write_hex_upper(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Fingerprint {
|
||||
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
@@ -109,7 +144,7 @@ impl Serialize for Fingerprint {
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Fingerprint {
|
||||
fn deserialize<D>(deserializer: D) -> result::Result<Self, D::Error>
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
@@ -121,9 +156,9 @@ impl<'de> Deserialize<'de> for Fingerprint {
|
||||
}
|
||||
|
||||
impl FromStr for Fingerprint {
|
||||
type Err = Error;
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Fingerprint> {
|
||||
fn from_str(s: &str) -> anyhow::Result<Fingerprint> {
|
||||
match sequoia_openpgp::Fingerprint::from_hex(s)? {
|
||||
sequoia_openpgp::Fingerprint::V4(a) => Ok(Fingerprint(a)),
|
||||
sequoia_openpgp::Fingerprint::Invalid(_) => {
|
||||
@@ -137,10 +172,26 @@ impl FromStr for Fingerprint {
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct KeyID([u8; 8]);
|
||||
|
||||
impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
|
||||
type Error = Error;
|
||||
impl FromSql for KeyID {
|
||||
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||
value
|
||||
.as_str()
|
||||
.and_then(|s| Self::from_str(s).map_err(|_| FromSqlError::InvalidType))
|
||||
}
|
||||
}
|
||||
|
||||
fn try_from(fpr: sequoia_openpgp::Fingerprint) -> Result<Self> {
|
||||
impl ToSql for KeyID {
|
||||
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
|
||||
Ok(rusqlite::types::ToSqlOutput::Owned(
|
||||
rusqlite::types::Value::Text(self.to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(fpr: sequoia_openpgp::Fingerprint) -> anyhow::Result<Self> {
|
||||
match fpr {
|
||||
sequoia_openpgp::Fingerprint::V4(a) => Ok(Fingerprint(a).into()),
|
||||
sequoia_openpgp::Fingerprint::Invalid(_) => Err(anyhow!("invalid fingerprint")),
|
||||
@@ -169,15 +220,14 @@ impl From<Fingerprint> for KeyID {
|
||||
|
||||
impl fmt::Display for KeyID {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use hex::ToHex;
|
||||
self.0.write_hex_upper(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for KeyID {
|
||||
type Err = Error;
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<KeyID> {
|
||||
fn from_str(s: &str) -> anyhow::Result<KeyID> {
|
||||
match sequoia_openpgp::KeyID::from_hex(s)? {
|
||||
sequoia_openpgp::KeyID::V4(a) => Ok(KeyID(a)),
|
||||
sequoia_openpgp::KeyID::Invalid(_) => {
|
||||
|
@@ -1,11 +1,11 @@
|
||||
use super::Result;
|
||||
use crate::openpgp::types::HashAlgorithm;
|
||||
use anyhow::anyhow;
|
||||
use sequoia_openpgp::types::HashAlgorithm;
|
||||
use zbase32;
|
||||
|
||||
// cannibalized from
|
||||
// https://gitlab.com/sequoia-pgp/sequoia/blob/master/net/src/wkd.rs
|
||||
|
||||
pub fn encode_wkd(address: impl AsRef<str>) -> Result<(String, String)> {
|
||||
pub fn encode_wkd(address: impl AsRef<str>) -> anyhow::Result<(String, String)> {
|
||||
let (local_part, domain) = split_address(address)?;
|
||||
|
||||
let local_part_encoded = encode_local_part(local_part);
|
||||
@@ -13,7 +13,7 @@ pub fn encode_wkd(address: impl AsRef<str>) -> Result<(String, String)> {
|
||||
Ok((local_part_encoded, domain))
|
||||
}
|
||||
|
||||
fn split_address(email_address: impl AsRef<str>) -> Result<(String, String)> {
|
||||
fn split_address(email_address: impl AsRef<str>) -> anyhow::Result<(String, String)> {
|
||||
let email_address = email_address.as_ref();
|
||||
let v: Vec<&str> = email_address.split('@').collect();
|
||||
if v.len() != 2 {
|
||||
|
38
default.nix
Normal file
38
default.nix
Normal file
@@ -0,0 +1,38 @@
|
||||
{ lib, rustPlatform, sqlite, openssl, gettext, pkg-config, commitShaShort ? "" }:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "hagrid";
|
||||
version = "2.1.0";
|
||||
|
||||
src = ./.;
|
||||
cargoLock = {
|
||||
lockFile = ./Cargo.lock;
|
||||
outputHashes = {
|
||||
"rocket_i18n-0.5.0" = "sha256-EbUE8Z3TQBnDnptl9qWK6JvsACCgP7EXTxcA7pouYbc=";
|
||||
};
|
||||
};
|
||||
|
||||
postInstall = ''
|
||||
cp -r dist $out
|
||||
'';
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
gettext
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
sqlite
|
||||
openssl
|
||||
];
|
||||
|
||||
COMMIT_SHA_SHORT = commitShaShort;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A verifying keyserver";
|
||||
homepage = "https://gitlab.com/keys.openpgp.org/hagrid";
|
||||
license = with licenses; [ gpl3 ];
|
||||
maintainers = with maintainers; [ valodim ];
|
||||
platforms = platforms.all;
|
||||
};
|
||||
}
|
BIN
dist/assets/img/koo.ico
vendored
Normal file
BIN
dist/assets/img/koo.ico
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 261 KiB |
23
dist/assets/img/koo.svg
vendored
Normal file
23
dist/assets/img/koo.svg
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
<svg width="161" height="163" viewBox="0 0 161 163" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="79.6786" cy="81.6786" r="15.6786" fill="#4FA5FC" opacity="0.2"/>
|
||||
<circle cx="16" cy="17" r="16" fill="#4FA5FC" opacity="0.2"/>
|
||||
<circle cx="145" cy="146" r="16" fill="#4FA5FC" opacity="0.2"/>
|
||||
<circle cx="144.5" cy="16.5" r="15.5" fill="#4FA5FC" opacity="0.2"/>
|
||||
<circle cx="16" cy="81" r="16" fill="#4FA5FC" opacity="0.2"/>
|
||||
<circle cx="16" cy="146" r="16" fill="#4FA5FC" opacity="0.2"/>
|
||||
<rect width="32" height="162" rx="16" fill="url(#paint0_linear_2403_93)"/>
|
||||
<circle cx="79.6786" cy="146.679" r="14.1108" stroke="#4FA5FC" stroke-width="3.13573" opacity="0.2"/>
|
||||
<circle cx="79.6786" cy="17.6786" r="14.1108" stroke="#4FA5FC" stroke-width="3.13573" opacity="0.2"/>
|
||||
<circle cx="144.679" cy="81.6786" r="14.1108" stroke="#4FA5FC" stroke-width="3.13573" opacity="0.2"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M64.0032 81.6796C63.9199 77.4791 65.481 73.2521 68.6863 70.0468L133.047 5.68629C139.295 -0.5621 149.426 -0.562094 155.674 5.68629C161.923 11.9347 161.923 22.0653 155.674 28.3137L102.308 81.6802L155.674 135.047C161.923 141.295 161.923 151.426 155.674 157.674C149.426 163.923 139.295 163.923 133.047 157.674L68.6863 93.3137C65.4806 90.108 63.9196 85.8805 64.0032 81.6796Z" fill="url(#paint1_linear_2403_93)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_2403_93" x1="16" y1="0" x2="16" y2="162" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#4FA5FC" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#4FA5FC"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_2403_93" x1="116.873" y1="162.373" x2="116.873" y2="1.37259" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#4FA5FC" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#4FA5FC"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
7
dist/assets/site.css
vendored
7
dist/assets/site.css
vendored
@@ -91,7 +91,6 @@ span.brand {
|
||||
|
||||
h1 {
|
||||
padding-bottom: 0.75em;
|
||||
padding-bottom: 0.75em;
|
||||
}
|
||||
|
||||
.ui p {
|
||||
@@ -111,6 +110,12 @@ a.brand {
|
||||
color: #050505;
|
||||
}
|
||||
|
||||
a.brand img {
|
||||
height: 1.2em;
|
||||
margin-right: 20px;
|
||||
vertical-align: text-top;
|
||||
}
|
||||
|
||||
.usage > h2 a, .usage > h2 a:visited {
|
||||
color: #050505;
|
||||
}
|
||||
|
3
dist/templates/layout.html.hbs
vendored
3
dist/templates/layout.html.hbs
vendored
@@ -4,11 +4,12 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<link rel="stylesheet" href="/assets/site.css?v=19" type="text/css"/>
|
||||
<link rel="alternate" href="/atom.xml" type="application/atom+xml" title="keys.openpgp.org newsfeed" />
|
||||
<link rel="icon" href="/assets/img/koo.ico" />
|
||||
<title>keys.openpgp.org</title>
|
||||
</head>
|
||||
<body lang="{{lang}}">
|
||||
<div class="card">
|
||||
<h1><a class="brand" href="/">keys.openpgp.org</a></h1>
|
||||
<h1><a class="brand" href="/"><img src="/assets/img/koo.svg" alt="">keys.openpgp.org</a></h1>
|
||||
{{> @partial-block }}
|
||||
<div class="spacer"></div>
|
||||
</div>
|
||||
|
@@ -1,10 +0,0 @@
|
||||
FROM rustlang/rust:nightly
|
||||
RUN apt update -qy
|
||||
RUN apt install -qy libclang-dev build-essential pkg-config clang libssl-dev gettext zsh
|
||||
|
||||
RUN useradd -u 1000 -d /home/user user && mkdir /home/user && chown user:user /home/user
|
||||
USER user
|
||||
|
||||
RUN rustup install 1.70.0
|
||||
|
||||
WORKDIR /home/user/src
|
@@ -1,11 +0,0 @@
|
||||
# Instructions
|
||||
|
||||
This docker image can be used to build hagrid for a Debian environment.
|
||||
|
||||
```sh
|
||||
# in the main source directory
|
||||
docker build -t hagrid-builder:1.0 docker-build/
|
||||
# bind in volumes to use cache from hosts
|
||||
docker run --rm -i -t --user $UID --volume $PWD:/home/user/src --volume $HOME/.cargo/registry:/usr/local/cargo/registry --volume $HOME/.cargo/git:/usr/local/cargo/git hagrid-builder:1.0 cargo build --release --frozen
|
||||
# release artifact will be in target directory
|
||||
```
|
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1750005367,
|
||||
"narHash": "sha256-h/aac1dGLhS3qpaD2aZt25NdKY7b+JT0ZIP2WuGsJMU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "6c64dabd3aa85e0c02ef1cdcb6e1213de64baee3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-25.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"utils": "utils"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
25
flake.nix
Normal file
25
flake.nix
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
outputs = { self, nixpkgs, utils }:
|
||||
utils.lib.eachDefaultSystem (system: let
|
||||
pkgs = nixpkgs.legacyPackages."${system}";
|
||||
commitShaShort = if self ? rev then (pkgs.lib.substring 0 10 self.rev) else self.dirtyShortRev;
|
||||
in rec {
|
||||
packages.hagrid = pkgs.callPackage ./. { inherit commitShaShort; };
|
||||
packages.hagridctl = pkgs.callPackage ./hagridctl.nix { };
|
||||
packages.wkdDomainChecker = pkgs.callPackage ./wkd-domain-checker/. { };
|
||||
|
||||
packages.default = packages.hagrid;
|
||||
}) // {
|
||||
overlays.hagrid = (final: prev: {
|
||||
hagrid = self.packages."${final.system}".hagrid;
|
||||
hagridctl = self.packages."${final.system}".hagridctl;
|
||||
});
|
||||
overlays.wkdDomainChecker = (final: prev: { wkdDomainChecker = self.packages."${final.system}".wkdDomainChecker; });
|
||||
|
||||
overlays.default = self.overlays.hagrid;
|
||||
};
|
||||
}
|
@@ -12,23 +12,8 @@ limit_req_status 429;
|
||||
# See https://gitlab.com/sequoia-pgp/hagrid/issues/94
|
||||
error_page 502 =500 /502;
|
||||
location /502 {
|
||||
return 500;
|
||||
}
|
||||
|
||||
# for x-accel-redirect forwards
|
||||
location /keys {
|
||||
internal;
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Cache-Control' 'no-cache' always;
|
||||
etag off;
|
||||
}
|
||||
|
||||
location /vks/v1/upload {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /vks/v1/request-verify {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
return 500;
|
||||
}
|
||||
|
||||
location /vks {
|
||||
@@ -36,12 +21,12 @@ location /vks {
|
||||
limit_req zone=search_fpr_keyid burst=1000 nodelay;
|
||||
|
||||
error_page 404 /errors-static/404-by-fpr.htm;
|
||||
default_type application/pgp-keys;
|
||||
add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
||||
# default_type application/pgp-keys;
|
||||
# add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Cache-Control' 'no-cache' always;
|
||||
etag off;
|
||||
try_files /keys/links/by-fpr/$1/$2/$3 =404;
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location ~ ^/vks/v1/by-keyid/(?:0x)?([^/][^/])([^/][^/])(.*)$ {
|
||||
@@ -49,12 +34,12 @@ location /vks {
|
||||
error_page 429 /errors-static/429-rate-limit-vks-fpr.htm;
|
||||
|
||||
error_page 404 /errors-static/404-by-keyid.htm;
|
||||
default_type application/pgp-keys;
|
||||
add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
||||
# default_type application/pgp-keys;
|
||||
# add_header Content-Disposition 'attachment; filename="$1$2$3.asc"';
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Cache-Control' 'no-cache' always;
|
||||
etag off;
|
||||
try_files /keys/links/by-keyid/$1/$2/$3 =404;
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /vks/v1/by-email/ {
|
||||
@@ -110,12 +95,12 @@ location /.well-known/openpgpkey {
|
||||
error_page 429 /errors-static/429-rate-limit-vks-email.htm;
|
||||
|
||||
error_page 404 /errors-static/404-wkd.htm;
|
||||
default_type application/octet-stream;
|
||||
add_header Content-Disposition 'attachment; filename="$2$3$4.asc"';
|
||||
# default_type application/octet-stream;
|
||||
# add_header Content-Disposition 'attachment; filename="$2$3$4.asc"';
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Cache-Control' 'no-cache' always;
|
||||
etag off;
|
||||
try_files /keys/links/wkd/$1/$2/$3/$4 =404;
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location ~ "^/.well-known/openpgpkey/([^/]+)/policy$" {
|
||||
@@ -227,26 +212,6 @@ location /search {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /pks {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /manage {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /verify {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /upload {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
location /debug {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
# explicitly cache the home directory
|
||||
location = / {
|
||||
proxy_cache static_cache;
|
||||
@@ -264,3 +229,8 @@ location /about {
|
||||
proxy_cache static_cache;
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
||||
# all other locations are handled by hagrid
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
}
|
||||
|
36
hagridctl.nix
Normal file
36
hagridctl.nix
Normal file
@@ -0,0 +1,36 @@
|
||||
{ lib, rustPlatform, sqlite, openssl, gettext, pkg-config, commitShaShort ? "" }:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "hagridctl";
|
||||
version = "0.1.0";
|
||||
|
||||
src = ./.;
|
||||
cargoLock = {
|
||||
lockFile = ./Cargo.lock;
|
||||
outputHashes = {
|
||||
"rocket_i18n-0.5.0" = "sha256-EbUE8Z3TQBnDnptl9qWK6JvsACCgP7EXTxcA7pouYbc=";
|
||||
};
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
gettext
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
sqlite
|
||||
openssl
|
||||
];
|
||||
|
||||
buildAndTestSubdir = "hagridctl";
|
||||
|
||||
COMMIT_SHA_SHORT = commitShaShort;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A verifying keyserver";
|
||||
homepage = "https://gitlab.com/keys.openpgp.org/hagrid";
|
||||
license = with licenses; [ gpl3 ];
|
||||
maintainers = with maintainers; [ valodim ];
|
||||
platforms = platforms.all;
|
||||
};
|
||||
}
|
@@ -2,26 +2,28 @@
|
||||
name = "hagridctl"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Breitmoser <look@my.amazin.horse>"]
|
||||
edition = "2024"
|
||||
description = "Control hagrid database externally"
|
||||
|
||||
[dependencies]
|
||||
hagrid-database = { path = "../database" }
|
||||
anyhow = "1"
|
||||
sequoia-openpgp = { version = "1", default-features = false, features = ["crypto-openssl"] }
|
||||
multipart = "0"
|
||||
log = "0"
|
||||
rand = "0.6"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_derive = "1"
|
||||
serde_json = "1"
|
||||
time = "0.1"
|
||||
tempfile = "3"
|
||||
url = "1"
|
||||
hex = "0.3"
|
||||
base64 = "0.10"
|
||||
pathdiff = "0.1"
|
||||
idna = "0.1"
|
||||
fs2 = "0.4"
|
||||
walkdir = "2"
|
||||
clap = "2"
|
||||
toml = "0.5"
|
||||
indicatif = "0.11"
|
||||
hagrid-database = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
sequoia-openpgp = { workspace = true, features = ["crypto-openssl"] }
|
||||
multipart = { workspace = true }
|
||||
log = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_derive = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
url = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
pathdiff = { workspace = true }
|
||||
idna = { workspace = true }
|
||||
fs2 = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "unicode", "env"] }
|
||||
toml = { workspace = true }
|
||||
indicatif = { workspace = true }
|
||||
|
@@ -1,41 +0,0 @@
|
||||
[global]
|
||||
address = "0.0.0.0"
|
||||
port = 8080
|
||||
|
||||
[development]
|
||||
base-URI = "http://localhost:8080"
|
||||
from = "noreply@localhost"
|
||||
x-accel-redirect = false
|
||||
token_secret = "hagrid"
|
||||
token_validity = 3600
|
||||
template_dir = "dist/templates"
|
||||
assets_dir = "dist/assets"
|
||||
keys_internal_dir = "state/keys-internal"
|
||||
keys_external_dir = "state/keys-external"
|
||||
token_dir = "state/tokens"
|
||||
tmp_dir = "state/tmp"
|
||||
maintenance_file = "state/maintenance"
|
||||
|
||||
[staging]
|
||||
base-URI = "https://keys.openpgp.org"
|
||||
from = "noreply@keys.openpgp.org"
|
||||
x-accel-redirect = true
|
||||
template_dir = "templates"
|
||||
keys_internal_dir = "keys"
|
||||
keys_external_dir = "public/keys"
|
||||
assets_dir = "public/assets"
|
||||
token_dir = "tokens"
|
||||
tmp_dir = "tmp"
|
||||
maintenance_file = "maintenance"
|
||||
|
||||
[production]
|
||||
base-URI = "https://keys.openpgp.org"
|
||||
from = "noreply@keys.openpgp.org"
|
||||
x-accel-redirect = true
|
||||
template_dir = "templates"
|
||||
keys_internal_dir = "keys"
|
||||
keys_external_dir = "public/keys"
|
||||
assets_dir = "public/assets"
|
||||
token_dir = "tokens"
|
||||
tmp_dir = "tmp"
|
||||
maintenance_file = "maintenance"
|
73
hagridctl/src/cli.rs
Normal file
73
hagridctl/src/cli.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use crate::{delete, import};
|
||||
use clap::{Parser, Subcommand};
|
||||
use hagrid_database::Query;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None, help_expected = true)]
|
||||
pub(crate) struct Cli {
|
||||
#[arg(long, required = false, env = "HAGRID_DB_FILE_PATH")]
|
||||
/// Set a path to the Sqlite database file
|
||||
db_file_path: PathBuf,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Command,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Command {
|
||||
/// Delete (address, key)-binding(s), and/or a key(s).
|
||||
Delete {
|
||||
#[arg(long)]
|
||||
/// Also, delete all bindings
|
||||
all_bindings: bool,
|
||||
|
||||
#[arg(long)]
|
||||
/// Also, delete all bindings and the key
|
||||
all: bool,
|
||||
|
||||
/// E-Mail address, Fingerprint, or KeyID of the TPK to delete.
|
||||
/// If a Fingerprint or KeyID is given, --all is implied.
|
||||
query: Query,
|
||||
},
|
||||
/// Import keys into Hagrid
|
||||
Import {
|
||||
#[arg(required = true)]
|
||||
/// List of keyring files to import
|
||||
keyring_files: Vec<PathBuf>,
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) fn dispatch_cmd(cli: &Cli) -> anyhow::Result<()> {
|
||||
let db_file_path = cli.db_file_path.canonicalize()?;
|
||||
|
||||
match &cli.command {
|
||||
Command::Delete {
|
||||
query,
|
||||
all_bindings,
|
||||
all,
|
||||
} => delete::run(db_file_path, query, *all_bindings, *all),
|
||||
Command::Import { keyring_files } => import::run(db_file_path, keyring_files.to_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn print_errors(e: anyhow::Error) {
|
||||
eprint!("{}", e);
|
||||
let mut cause = e.source();
|
||||
while let Some(c) = cause {
|
||||
eprint!(":\n {}", c);
|
||||
cause = c.source();
|
||||
}
|
||||
eprintln!();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use clap::CommandFactory;
|
||||
|
||||
#[test]
|
||||
fn test_cli() {
|
||||
Cli::command().debug_assert()
|
||||
}
|
||||
}
|
@@ -1,60 +1,19 @@
|
||||
//! Deletes (address, key)-binding(s), and/or a key(s).
|
||||
|
||||
use hagrid_database::types::Fingerprint;
|
||||
use hagrid_database::{Database, Query, Sqlite};
|
||||
use std::convert::TryInto;
|
||||
use std::path::PathBuf;
|
||||
use std::path::Path;
|
||||
|
||||
extern crate anyhow;
|
||||
use anyhow::Result;
|
||||
|
||||
extern crate structopt;
|
||||
use structopt::StructOpt;
|
||||
|
||||
extern crate hagrid_database as database;
|
||||
use crate::database::{Database, KeyDatabase, Query};
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
#[structopt(
|
||||
name = "hagrid-delete",
|
||||
about = "Deletes (address, key)-binding(s), and/or a key(s)."
|
||||
)]
|
||||
pub struct Opt {
|
||||
/// Base directory.
|
||||
#[structopt(parse(from_os_str))]
|
||||
base: PathBuf,
|
||||
|
||||
/// E-Mail address, Fingerprint, or KeyID of the TPK to delete.
|
||||
/// If a Fingerprint or KeyID is given, --all is implied.
|
||||
query: String,
|
||||
|
||||
/// Also delete all bindings.
|
||||
#[structopt(long = "all-bindings")]
|
||||
pub(crate) fn run(
|
||||
db_file_path: impl AsRef<Path>,
|
||||
query: &Query,
|
||||
all_bindings: bool,
|
||||
mut all: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let db = &Sqlite::new_file(
|
||||
&db_file_path,
|
||||
Sqlite::log_dir_path_from_db_file_path(&db_file_path)?,
|
||||
)?;
|
||||
|
||||
/// Also delete all bindings and the key.
|
||||
#[structopt(long = "all")]
|
||||
all: bool,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
if let Err(e) = real_main() {
|
||||
eprint!("{}", e);
|
||||
let mut cause = e.source();
|
||||
while let Some(c) = cause {
|
||||
eprint!(":\n {}", c);
|
||||
cause = c.source();
|
||||
}
|
||||
eprintln!();
|
||||
::std::process::exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
fn real_main() -> Result<()> {
|
||||
let opt = Opt::from_args();
|
||||
let db = KeyDatabase::new_from_base(opt.base.canonicalize()?)?;
|
||||
delete(&db, &opt.query.parse()?, opt.all_bindings, opt.all)
|
||||
}
|
||||
|
||||
fn delete(db: &KeyDatabase, query: &Query, all_bindings: bool, mut all: bool) -> Result<()> {
|
||||
match query {
|
||||
Query::ByFingerprint(_) | Query::ByKeyID(_) => {
|
||||
eprintln!(
|
||||
@@ -70,13 +29,13 @@ fn delete(db: &KeyDatabase, query: &Query, all_bindings: bool, mut all: bool) ->
|
||||
.lookup(query)?
|
||||
.ok_or_else(|| anyhow::format_err!("No TPK matching {:?}", query))?;
|
||||
|
||||
let fp: database::types::Fingerprint = tpk.fingerprint().try_into()?;
|
||||
let fp: Fingerprint = tpk.fingerprint().try_into()?;
|
||||
let mut results = Vec::new();
|
||||
|
||||
// First, delete the bindings.
|
||||
if all_bindings || all {
|
||||
results.push(("all bindings".into(), db.set_email_unpublished_all(&fp)));
|
||||
} else if let Query::ByEmail(ref email) = query {
|
||||
} else if let Query::ByEmail(email) = query {
|
||||
results.push((email.to_string(), db.set_email_unpublished(&fp, email)));
|
||||
} else {
|
||||
unreachable!()
|
@@ -1,32 +1,25 @@
|
||||
use std::cmp::min;
|
||||
use std::convert::TryInto;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
||||
use anyhow::Result;
|
||||
use sequoia_openpgp::Packet;
|
||||
use sequoia_openpgp::parse::{PacketParser, PacketParserResult, Parse};
|
||||
|
||||
extern crate tempfile;
|
||||
|
||||
extern crate sequoia_openpgp as openpgp;
|
||||
use openpgp::parse::{PacketParser, PacketParserResult, Parse};
|
||||
use openpgp::Packet;
|
||||
|
||||
extern crate hagrid_database as database;
|
||||
use database::{Database, ImportResult, KeyDatabase};
|
||||
use hagrid_database::{Database, EmailAddressStatus, ImportResult, Sqlite};
|
||||
|
||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
|
||||
use HagridConfig;
|
||||
|
||||
// parsing TPKs takes time, so we benefit from some parallelism. however, the
|
||||
// database is locked during the entire merge operation, so we get diminishing
|
||||
// returns after the first few threads.
|
||||
const NUM_THREADS_MAX: usize = 3;
|
||||
|
||||
#[allow(clippy::needless_collect)]
|
||||
pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>) -> Result<()> {
|
||||
pub fn run(db_file_path: impl AsRef<Path>, input_files: Vec<PathBuf>) -> anyhow::Result<()> {
|
||||
let num_threads = min(NUM_THREADS_MAX, input_files.len());
|
||||
let input_file_chunks = setup_chunks(input_files, num_threads);
|
||||
|
||||
@@ -36,10 +29,10 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
|
||||
let threads: Vec<_> = input_file_chunks
|
||||
.into_iter()
|
||||
.map(|input_file_chunk| {
|
||||
let config = config.clone();
|
||||
let db_file_path = db_file_path.as_ref().to_owned();
|
||||
let multi_progress = multi_progress.clone();
|
||||
thread::spawn(move || {
|
||||
import_from_files(&config, dry_run, input_file_chunk, multi_progress).unwrap();
|
||||
import_from_files(db_file_path, input_file_chunk, multi_progress).unwrap();
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
@@ -54,7 +47,7 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
|
||||
}
|
||||
|
||||
fn setup_chunks(mut input_files: Vec<PathBuf>, num_threads: usize) -> Vec<Vec<PathBuf>> {
|
||||
let chunk_size = (input_files.len() + (num_threads - 1)) / num_threads;
|
||||
let chunk_size = input_files.len().div_ceil(num_threads);
|
||||
(0..num_threads)
|
||||
.map(|_| {
|
||||
let len = input_files.len();
|
||||
@@ -86,7 +79,7 @@ impl<'a> ImportStats<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, result: Result<ImportResult>) {
|
||||
fn update(&mut self, result: anyhow::Result<ImportResult>) {
|
||||
// If a new TPK starts, parse and import.
|
||||
self.count_total += 1;
|
||||
match result {
|
||||
@@ -99,7 +92,7 @@ impl<'a> ImportStats<'a> {
|
||||
}
|
||||
|
||||
fn progress_update(&self) {
|
||||
if (self.count_total % 10) != 0 {
|
||||
if !self.count_total.is_multiple_of(10) {
|
||||
return;
|
||||
}
|
||||
self.progress.set_message(&format!(
|
||||
@@ -115,16 +108,13 @@ impl<'a> ImportStats<'a> {
|
||||
}
|
||||
|
||||
fn import_from_files(
|
||||
config: &HagridConfig,
|
||||
dry_run: bool,
|
||||
db_file_path: impl AsRef<Path>,
|
||||
input_files: Vec<PathBuf>,
|
||||
multi_progress: Arc<MultiProgress>,
|
||||
) -> Result<()> {
|
||||
let db = KeyDatabase::new_internal(
|
||||
config.keys_internal_dir.as_ref().unwrap(),
|
||||
config.keys_external_dir.as_ref().unwrap(),
|
||||
config.tmp_dir.as_ref().unwrap(),
|
||||
dry_run,
|
||||
) -> anyhow::Result<()> {
|
||||
let db = Sqlite::new_file(
|
||||
&db_file_path,
|
||||
Sqlite::log_dir_path_from_db_file_path(&db_file_path)?,
|
||||
)?;
|
||||
|
||||
for input_file in input_files {
|
||||
@@ -134,7 +124,11 @@ fn import_from_files(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgress) -> Result<()> {
|
||||
fn import_from_file(
|
||||
db: &Sqlite,
|
||||
input: &Path,
|
||||
multi_progress: &MultiProgress,
|
||||
) -> anyhow::Result<()> {
|
||||
let input_file = File::open(input)?;
|
||||
|
||||
let bytes_total = input_file.metadata()?.len();
|
||||
@@ -152,15 +146,33 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
|
||||
|
||||
read_file_to_tpks(input_reader, &mut |acc| {
|
||||
let primary_key = acc[0].clone();
|
||||
let key_fpr = match primary_key {
|
||||
Packet::PublicKey(key) => key.fingerprint(),
|
||||
Packet::SecretKey(key) => key.fingerprint(),
|
||||
_ => return,
|
||||
};
|
||||
let result = import_key(db, acc);
|
||||
if let Ok(ref result) = result {
|
||||
let tpk_status = result.as_tpk_status();
|
||||
if !tpk_status.is_revoked {
|
||||
for (email, status) in &tpk_status.email_status {
|
||||
if status == &EmailAddressStatus::NotPublished {
|
||||
db.set_email_published(&key_fpr.clone().try_into().unwrap(), email)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Err(ref e) = result {
|
||||
let key_fpr = match primary_key {
|
||||
Packet::PublicKey(key) => key.fingerprint().to_hex(),
|
||||
Packet::SecretKey(key) => key.fingerprint().to_hex(),
|
||||
_ => "Unknown".to_owned(),
|
||||
};
|
||||
let error = format!("{}:{:05}:{}: {}", filename, stats.count_total, key_fpr, e);
|
||||
let error = format!(
|
||||
"{}:{:05}:{}: {}",
|
||||
filename,
|
||||
stats.count_total,
|
||||
key_fpr.to_hex(),
|
||||
e
|
||||
);
|
||||
progress_bar.println(error);
|
||||
return;
|
||||
}
|
||||
stats.update(result);
|
||||
})?;
|
||||
@@ -172,7 +184,7 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
|
||||
fn read_file_to_tpks(
|
||||
reader: impl Read + Send + Sync,
|
||||
callback: &mut impl FnMut(Vec<Packet>),
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let mut ppr = PacketParser::from_reader(reader)?;
|
||||
let mut acc = Vec::new();
|
||||
|
||||
@@ -195,49 +207,6 @@ fn read_file_to_tpks(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn import_key(db: &KeyDatabase, packets: Vec<Packet>) -> Result<ImportResult> {
|
||||
openpgp::Cert::from_packets(packets.into_iter()).and_then(|tpk| db.merge(tpk))
|
||||
fn import_key(db: &Sqlite, packets: Vec<Packet>) -> anyhow::Result<ImportResult> {
|
||||
sequoia_openpgp::Cert::from_packets(packets.into_iter()).and_then(|tpk| db.merge(tpk))
|
||||
}
|
||||
|
||||
/*
|
||||
#[cfg(test)]
|
||||
mod import_tests {
|
||||
use std::fs::File;
|
||||
use tempfile::tempdir;
|
||||
use openpgp::serialize::Serialize;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let root = tempdir().unwrap();
|
||||
|
||||
let db = KeyDatabase::new_from_base(root.path().to_path_buf()).unwrap();
|
||||
|
||||
// Generate a key and import it.
|
||||
let (tpk, _) = openpgp::tpk::TPKBuilder::autocrypt(
|
||||
None, Some("foo@invalid.example.com".into()))
|
||||
.generate().unwrap();
|
||||
let import_me = root.path().join("import-me");
|
||||
tpk.serialize(&mut File::create(&import_me).unwrap()).unwrap();
|
||||
|
||||
do_import(root.path().to_path_buf(), vec![import_me]).unwrap();
|
||||
|
||||
let check = |query: &str| {
|
||||
let tpk_ = db.lookup(&query.parse().unwrap()).unwrap().unwrap();
|
||||
assert_eq!(tpk.fingerprint(), tpk_.fingerprint());
|
||||
assert_eq!(tpk.subkeys().map(|skb| skb.subkey().fingerprint())
|
||||
.collect::<Vec<_>>(),
|
||||
tpk_.subkeys().map(|skb| skb.subkey().fingerprint())
|
||||
.collect::<Vec<_>>());
|
||||
assert_eq!(tpk_.userids().count(), 0);
|
||||
};
|
||||
|
||||
check(&format!("{}", tpk.primary().fingerprint()));
|
||||
check(&format!("{}", tpk.primary().fingerprint().to_keyid()));
|
||||
check(&format!("{}", tpk.subkeys().nth(0).unwrap().subkey()
|
||||
.fingerprint()));
|
||||
check(&format!("{}", tpk.subkeys().nth(0).unwrap().subkey()
|
||||
.fingerprint().to_keyid()));
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
@@ -1,109 +1,13 @@
|
||||
extern crate anyhow;
|
||||
extern crate clap;
|
||||
extern crate hagrid_database as database;
|
||||
extern crate sequoia_openpgp as openpgp;
|
||||
extern crate tempfile;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate indicatif;
|
||||
extern crate toml;
|
||||
extern crate walkdir;
|
||||
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use clap::{App, Arg, SubCommand};
|
||||
use clap::Parser;
|
||||
|
||||
mod cli;
|
||||
mod delete;
|
||||
mod import;
|
||||
mod regenerate;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct HagridConfigs {
|
||||
debug: HagridConfig,
|
||||
staging: HagridConfig,
|
||||
release: HagridConfig,
|
||||
}
|
||||
const ERROR_EXIT_CODE: i32 = 2;
|
||||
|
||||
// this is not an exact match - Rocket config has more complicated semantics
|
||||
// than a plain toml file.
|
||||
// see also https://github.com/SergioBenitez/Rocket/issues/228
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct HagridConfig {
|
||||
_template_dir: Option<PathBuf>,
|
||||
keys_internal_dir: Option<PathBuf>,
|
||||
keys_external_dir: Option<PathBuf>,
|
||||
_assets_dir: Option<PathBuf>,
|
||||
_token_dir: Option<PathBuf>,
|
||||
tmp_dir: Option<PathBuf>,
|
||||
_maintenance_file: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let matches = App::new("Hagrid Control")
|
||||
.version("0.1")
|
||||
.about("Control hagrid database externally")
|
||||
.arg(
|
||||
Arg::with_name("config")
|
||||
.short("c")
|
||||
.long("config")
|
||||
.value_name("FILE")
|
||||
.help("Sets a custom config file")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("env")
|
||||
.short("e")
|
||||
.long("env")
|
||||
.value_name("ENVIRONMENT")
|
||||
.takes_value(true)
|
||||
.default_value("prod")
|
||||
.possible_values(&["dev", "stage", "prod"]),
|
||||
)
|
||||
.subcommand(SubCommand::with_name("regenerate").about("Regenerate symlink directory"))
|
||||
.subcommand(
|
||||
SubCommand::with_name("import")
|
||||
.about("Import keys into Hagrid")
|
||||
.arg(
|
||||
Arg::with_name("dry run")
|
||||
.short("n")
|
||||
.long("dry-run")
|
||||
.help("don't actually keep imported keys"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("keyring files")
|
||||
.required(true)
|
||||
.multiple(true),
|
||||
),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let config_file = matches.value_of("config").unwrap_or("Rocket.toml");
|
||||
let config_data = fs::read_to_string(config_file).unwrap();
|
||||
let configs: HagridConfigs = toml::from_str(&config_data).unwrap();
|
||||
let config = match matches.value_of("env").unwrap() {
|
||||
"dev" => configs.debug,
|
||||
"stage" => configs.staging,
|
||||
"prod" => configs.release,
|
||||
_ => configs.debug,
|
||||
fn main() {
|
||||
let Ok(_) = cli::dispatch_cmd(&cli::Cli::parse()).map_err(cli::print_errors) else {
|
||||
std::process::exit(ERROR_EXIT_CODE);
|
||||
};
|
||||
|
||||
if let Some(matches) = matches.subcommand_matches("import") {
|
||||
let dry_run = matches.occurrences_of("dry run") > 0;
|
||||
let keyrings: Vec<PathBuf> = matches
|
||||
.values_of_lossy("keyring files")
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|arg| PathBuf::from_str(arg).unwrap())
|
||||
.collect();
|
||||
import::do_import(&config, dry_run, keyrings)?;
|
||||
} else if let Some(_matches) = matches.subcommand_matches("regenerate") {
|
||||
regenerate::do_regenerate(&config)?;
|
||||
} else {
|
||||
println!("{}", matches.usage());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -1,133 +0,0 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use std::path::Path;
|
||||
use std::time::Instant;
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use database::types::Fingerprint;
|
||||
use database::{Database, KeyDatabase, RegenerateResult};
|
||||
use HagridConfig;
|
||||
|
||||
struct RegenerateStats<'a> {
|
||||
progress: &'a ProgressBar,
|
||||
prefix: String,
|
||||
count_total: u64,
|
||||
count_err: u64,
|
||||
count_updated: u64,
|
||||
count_unchanged: u64,
|
||||
count_partial: u64,
|
||||
start_time_partial: Instant,
|
||||
kps_partial: u64,
|
||||
}
|
||||
|
||||
impl<'a> RegenerateStats<'a> {
|
||||
fn new(progress: &'a ProgressBar) -> Self {
|
||||
Self {
|
||||
progress,
|
||||
prefix: "".to_owned(),
|
||||
count_total: 0,
|
||||
count_err: 0,
|
||||
count_updated: 0,
|
||||
count_unchanged: 0,
|
||||
count_partial: 0,
|
||||
start_time_partial: Instant::now(),
|
||||
kps_partial: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, result: Result<RegenerateResult>, fpr: Fingerprint) {
|
||||
// If a new TPK starts, parse and import.
|
||||
self.count_total += 1;
|
||||
self.count_partial += 1;
|
||||
if (self.count_total % 10) == 0 {
|
||||
self.prefix = fpr.to_string()[0..4].to_owned();
|
||||
}
|
||||
match result {
|
||||
Err(e) => {
|
||||
self.progress.println(format!("{}: {}", fpr, e));
|
||||
self.count_err += 1;
|
||||
}
|
||||
Ok(RegenerateResult::Updated) => self.count_updated += 1,
|
||||
Ok(RegenerateResult::Unchanged) => self.count_unchanged += 1,
|
||||
}
|
||||
self.progress_update();
|
||||
}
|
||||
|
||||
fn progress_update(&mut self) {
|
||||
if (self.count_total % 10) != 0 {
|
||||
return;
|
||||
}
|
||||
if self.count_partial >= 1000 {
|
||||
let runtime = (self.start_time_partial.elapsed().as_millis() + 1) as u64;
|
||||
self.kps_partial = (self.count_partial * 1000) / runtime;
|
||||
self.start_time_partial = Instant::now();
|
||||
self.count_partial = 0;
|
||||
}
|
||||
self.progress.set_message(&format!(
|
||||
"prefix {} regenerated {:5} keys, {:5} Updated {:5} Unchanged {:5} Errors ({:3} keys/s)",
|
||||
self.prefix, self.count_total, self.count_updated, self.count_unchanged, self.count_err, self.kps_partial));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn do_regenerate(config: &HagridConfig) -> Result<()> {
|
||||
let db = KeyDatabase::new_internal(
|
||||
config.keys_internal_dir.as_ref().unwrap(),
|
||||
config.keys_external_dir.as_ref().unwrap(),
|
||||
config.tmp_dir.as_ref().unwrap(),
|
||||
false,
|
||||
)?;
|
||||
|
||||
let published_dir = config
|
||||
.keys_external_dir
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.join("links")
|
||||
.join("by-email");
|
||||
let dirs: Vec<_> = WalkDir::new(published_dir)
|
||||
.min_depth(1)
|
||||
.max_depth(1)
|
||||
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|entry| entry.into_path())
|
||||
.collect();
|
||||
|
||||
let progress_bar = ProgressBar::new(dirs.len() as u64);
|
||||
progress_bar.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
|
||||
.progress_chars("##-"),
|
||||
);
|
||||
|
||||
let mut stats = RegenerateStats::new(&progress_bar);
|
||||
|
||||
for dir in dirs {
|
||||
progress_bar.inc(1);
|
||||
regenerate_dir_recursively(&db, &mut stats, &dir)?;
|
||||
}
|
||||
progress_bar.finish();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn regenerate_dir_recursively(
|
||||
db: &KeyDatabase,
|
||||
stats: &mut RegenerateStats,
|
||||
dir: &Path,
|
||||
) -> Result<()> {
|
||||
for path in WalkDir::new(dir)
|
||||
.follow_links(true)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter(|e| e.file_type().is_file())
|
||||
.map(|entry| entry.into_path())
|
||||
{
|
||||
let fpr = KeyDatabase::path_to_primary(&path).unwrap();
|
||||
let result = db.regenerate_links(&fpr);
|
||||
stats.update(result, fpr);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
383
justfile
Normal file
383
justfile
Normal file
@@ -0,0 +1,383 @@
|
||||
[private]
|
||||
default:
|
||||
@just --list
|
||||
|
||||
# ----------------Settings----------------
|
||||
|
||||
set fallback := true
|
||||
|
||||
# ----------------Variables----------------
|
||||
|
||||
SQLITE_DB_FILE_PATH := 'state/keys-internal/keys.sqlite'
|
||||
|
||||
# Regular expresion for sed cmd to match variants of Rust versions
|
||||
# Matched Rust versions: 1, 1.90, 1.90.0
|
||||
|
||||
SED_RUST_VERSION_REGEX := '[0-9](\.[0-9]+){1,2}'
|
||||
RUST_MANIFEST_STABLE_TOML_URL := 'https://static.rust-lang.org/dist/channel-rust-stable.toml'
|
||||
|
||||
# Hierarchycal path in the file to current stable version
|
||||
|
||||
RUST_MANIFEST_STABLE_VERSION_QUERY := 'pkg.rust.version'
|
||||
|
||||
# Regex in sed(-r) format
|
||||
# Parsed string example: "1.90.0 (1159e78c4 2025-09-14)"
|
||||
|
||||
RUST_MANIFEST_STABLE_VERSION_PARSE_REGEX := '^"([0-9]+).([0-9]+).([0-9]+) \(([0-9A-Fa-f]+) (([0-9]+)-([0-9]+)-([0-9]+))\)"$'
|
||||
|
||||
# GOTCH: See documentation to `rust-stable-version` recipe for explanation on what \1, \2, etc. means.
|
||||
|
||||
DEFAULT_RUST_STABLE_VERSION_FORMAT := '\1.\2'
|
||||
GITLAB_CI_FILE_NAME := '.gitlab-ci.yml'
|
||||
GITLAB_CI_FILE_PATH := absolute_path(GITLAB_CI_FILE_NAME)
|
||||
CARGO_FILE_NAME := 'Cargo.toml'
|
||||
CARGO_FILE_PATH := absolute_path(CARGO_FILE_NAME)
|
||||
CARGO_RUST_VERSION_QUERY := 'package.rust-version'
|
||||
CLIPPY_FILE_NAME := 'clippy.toml'
|
||||
CLIPPY_FILE_PATH := absolute_path(CLIPPY_FILE_NAME)
|
||||
CLIPPY_RUST_VERSION_QUERY := 'msrv'
|
||||
TOOLCHAIN_FILE_NAME := 'rust-toolchain.toml'
|
||||
TOOLCHAIN_FILE_PATH := absolute_path(TOOLCHAIN_FILE_NAME)
|
||||
TOOLCHAIN_RUST_VERSION_QUERY := 'toolchain.channel'
|
||||
GIT_BRANCH_NAME_PREFIX := 'upgrade_rust_to_'
|
||||
|
||||
# ----------------Recipes----------------
|
||||
|
||||
# Perform initial setup of developer's system.
|
||||
[group('setup')]
|
||||
init: init-rocket-config
|
||||
|
||||
# Copy Rocket's template configuration from Rocket.toml.dist to Rocket.toml. Rocket is Rust web framework. See https://rocket.rs/guide/v0.5/configuration/#configuration
|
||||
[group('setup')]
|
||||
init-rocket-config:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
[ ! -f Rocket.toml ] \
|
||||
&& cp Rocket.toml.dist Rocket.toml \
|
||||
&& echo "Rocket.toml.dist copied to Rocket.toml" \
|
||||
|| echo "Rocket.toml exists already!"
|
||||
|
||||
# Format justfile
|
||||
[group('fmt')]
|
||||
[group('format')]
|
||||
[group('just')]
|
||||
just-fmt:
|
||||
just --unstable --fmt
|
||||
|
||||
# Format Rust code in all packages (aka path based dependencies)
|
||||
[group('fmt')]
|
||||
[group('format')]
|
||||
cargo-fmt:
|
||||
cargo fmt --all
|
||||
|
||||
# Format all code
|
||||
[group('fmt')]
|
||||
[group('format')]
|
||||
fmt: just-fmt cargo-fmt
|
||||
|
||||
alias f := fmt
|
||||
|
||||
# Check justfile formatting
|
||||
[group('fmt')]
|
||||
[group('just')]
|
||||
[group('lint')]
|
||||
just-lint-fmt:
|
||||
just --unstable --fmt --check
|
||||
|
||||
# Check Rust code formatting in all packages (aka path based dependencies)
|
||||
[group('fmt')]
|
||||
[group('lint')]
|
||||
cargo-lint-fmt:
|
||||
cargo fmt --all -- --check
|
||||
|
||||
# Check formatting of all code
|
||||
[group('fmt')]
|
||||
[group('lint')]
|
||||
lint-fmt: just-lint-fmt cargo-lint-fmt
|
||||
|
||||
alias lf := lint-fmt
|
||||
|
||||
# Lint Rust code with Clippy
|
||||
[group('clippy')]
|
||||
[group('lint')]
|
||||
clippy-lint:
|
||||
cargo clippy --tests --no-deps --workspace
|
||||
|
||||
alias cl := clippy-lint
|
||||
|
||||
# Lint all code
|
||||
[group('lint')]
|
||||
lint: lint-fmt clippy-lint
|
||||
|
||||
alias l := lint
|
||||
|
||||
# Fix compilation warnings by applying compiler suggestions
|
||||
[group('fix')]
|
||||
cargo-fix *args:
|
||||
cargo fix --workspace {{ args }}
|
||||
|
||||
# Apply Clippy's lint suggestions, i.e. fix Clippy linting warnings or errors
|
||||
[group('clippy')]
|
||||
[group('fix')]
|
||||
clippy-fix *args:
|
||||
cargo clippy --fix --tests --no-deps --workspace {{ args }}
|
||||
|
||||
# Fix lint and compilation warnings and errors. Pass given arguments to all sub-recipes, i.e. `just fix --allow-dirty` calls `just cargo-fix --allow-dirty` and `just clippy-fix --allow-dirty`.
|
||||
[group('fix')]
|
||||
fix *args: (cargo-fix args) (clippy-fix args)
|
||||
|
||||
# Check Rust code errors
|
||||
[group('compile')]
|
||||
check:
|
||||
cargo check
|
||||
|
||||
alias c := check
|
||||
|
||||
# Compile all Rust code
|
||||
[group('compile')]
|
||||
build *args='--workspace':
|
||||
cargo build {{ args }}
|
||||
|
||||
alias b := build
|
||||
|
||||
# Run all tests (i.e. --workspace), but when args given pass them to `cargo test`, e.g. `just test fs::tests::init`
|
||||
[group('test')]
|
||||
test args='--workspace':
|
||||
cargo test {{ args }}
|
||||
|
||||
alias t := test
|
||||
|
||||
# Run continuous check of Rust code errors. Detect file changes and repeat check automatically. Ctrl+c to exit. You can pass additional arguments, e.g. --notify (-N).
|
||||
[group('compile')]
|
||||
[group('watch')]
|
||||
watch-check *args:
|
||||
cargo watch --ignore *.pot {{ args }}
|
||||
|
||||
alias wc := watch-check
|
||||
|
||||
# Run web server and automatically restart on changes. Ctrl+c to exit. You can pass additional arguments, e.g. --notify (-N).
|
||||
[group('compile')]
|
||||
[group('run')]
|
||||
[group('watch')]
|
||||
watch-run *args:
|
||||
cargo watch --exec 'run --bin hagrid' --ignore *.pot {{ args }}
|
||||
|
||||
alias wr := watch-run
|
||||
|
||||
# Run tests every time files changed. Ctrl+c to exit. You can pass additional arguments, e.g. --notify (-N).
|
||||
[group('test')]
|
||||
[group('watch')]
|
||||
watch-test *args:
|
||||
cargo watch --exec 'test --workspace' --ignore *.pot {{ args }}
|
||||
|
||||
alias wt := watch-test
|
||||
|
||||
# Run web server
|
||||
[group('run')]
|
||||
run:
|
||||
cargo run
|
||||
|
||||
alias r := run
|
||||
alias run-hagrid := run
|
||||
alias hagrid := run
|
||||
|
||||
# Run "hagridctl" which automate some operations working with database externally, e.g. import keys
|
||||
[group('run')]
|
||||
run-hagridctl *args:
|
||||
cargo run --package hagridctl -- {{ args }}
|
||||
|
||||
alias hagridctl := run-hagridctl
|
||||
|
||||
# Run "tester" which allows to seed database with sample data, e.g. for testing
|
||||
[group('run')]
|
||||
run-tester *args:
|
||||
cargo run --package tester -- {{ args }}
|
||||
|
||||
alias tester := run-tester
|
||||
|
||||
# Clean compilation artifacts (i.e. "target" directory)
|
||||
[group('clean')]
|
||||
clean:
|
||||
cargo clean
|
||||
|
||||
# Clean changes to translation files
|
||||
[group('clean')]
|
||||
[group('translation')]
|
||||
clean-translations:
|
||||
git restore po/
|
||||
|
||||
# Open database prompt
|
||||
[group('database')]
|
||||
@db:
|
||||
command -v sqlite3 \
|
||||
&& echo "See sqlite3 CLI Documentation: https://sqlite.org/cli.html\n" \
|
||||
&& sqlite3 {{ SQLITE_DB_FILE_PATH }} \
|
||||
|| echo "sqlite3 command has not been found. Please, install it using system's package manager or refer to documentation https://sqlite.org/cli.html for installation." >&2
|
||||
|
||||
# Translate *.hbs templates of web pages
|
||||
[group('translation')]
|
||||
translate-templates:
|
||||
./make-translated-templates
|
||||
|
||||
# Upgrade Rust to a given version, by default current stable Rust version is used
|
||||
[group('housekeeping')]
|
||||
upgrade-rust version=`just _rust-stable-version`: _ensure-no-vcs-changes && _upgrade-rust-fixes-reminder
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
readonly OLD_VERSIONS=$( \
|
||||
for recipe in _current-ci-rust-version _current-cargo-rust-version _current-clippy-rust-version _current-toolchain-rust-version; do \
|
||||
just $recipe; \
|
||||
done \
|
||||
| sort -u \
|
||||
); \
|
||||
|
||||
just _upgrade-rust-git-create-branch "{{ replace(version, '.', '_') }}" "{{ GIT_BRANCH_NAME_PREFIX + replace(version, '.', '_') }}"
|
||||
|
||||
for recipe in _upgrade-rust-ci _upgrade-rust-cargo _upgrade-rust-clippy _upgrade-rust-toolchain; do \
|
||||
just $recipe '{{ version }}'; \
|
||||
done \
|
||||
|
||||
just _upgrade-rust-git-commit \
|
||||
"${OLD_VERSIONS//$'\n'/, }" \
|
||||
"{{ version }}" \
|
||||
"{{ GITLAB_CI_FILE_PATH }} {{ CARGO_FILE_PATH }} {{ CLIPPY_FILE_PATH }} {{ TOOLCHAIN_FILE_PATH }}" \
|
||||
"{{ GITLAB_CI_FILE_NAME }} {{ CARGO_FILE_NAME }} {{ CLIPPY_FILE_NAME }} {{ TOOLCHAIN_FILE_NAME }}"
|
||||
|
||||
_ensure-no-vcs-changes:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
readonly CHANGED_FILES=$(git ls-files --deleted --modified --others --exclude-standard -- :/); \
|
||||
git diff-index --quiet HEAD -- >&2 && true
|
||||
readonly HAS_STAGED_FILES=$?
|
||||
if [ -n "$CHANGED_FILES" ] || [ $HAS_STAGED_FILES != 0 ]; \
|
||||
then \
|
||||
echo -e "{{ RED }}You have working directory changes! \nTo avoid loosing or corrupting your changes commit or stash (git stash -u) them before running commands which change code automatically!{{ NORMAL }}"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
_upgrade-rust-git-create-branch branched_version branch_name:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
readonly CURRENT_BRANCH=$(git branch --show-current)
|
||||
if [[ "$CURRENT_BRANCH" == *{{ branched_version }}* ]]; then
|
||||
echo "{{ GREEN }}It looks like you switched to new branch manually. Continue...{{ NORMAL }}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
while true; do
|
||||
read -p "Would you like to create new branch ({{ branch_name }})? [y/n]: " input
|
||||
case "$input" in
|
||||
y)
|
||||
break
|
||||
;;
|
||||
n)
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "{{ RED }}Incorrect input. Please use only y or n.{{ NORMAL }}"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
git switch --create "{{ branch_name }}"
|
||||
|
||||
_upgrade-rust-fixes-reminder:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
echo -e "\n {{ YELLOW }}Don't forget to fix linting (just lint) and compilation (just check) warnings and errors!{{ NORMAL }}\n"
|
||||
|
||||
[confirm('Would you like to commit changes? [y/n]')]
|
||||
_upgrade-rust-git-commit old_versions new_version file_paths file_names:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
echo "Commiting changes ..."; \
|
||||
git add {{ file_paths }}; \
|
||||
sed -r 's/^ {4}//' <<'MSG' | git commit --file -
|
||||
Upgrade Rust toolchain: {{ old_versions }} -> {{ new_version }}
|
||||
|
||||
If you don't have toolchain installed and you use rustup run:
|
||||
|
||||
$ rustup toolchain install --profile default --component rustfmt,clippy {{ new_version }}
|
||||
|
||||
NOTE: It might be that you have {{ new_version }}.0 installed as stable toolchain, in
|
||||
that case you still have to run the above command to install exactly {{ new_version }}.
|
||||
|
||||
Command: `just upgrade-rust`
|
||||
|
||||
Changes:
|
||||
- Upgrade version of used toolchain in the following places:
|
||||
{{ ' - ' + replace_regex(file_names, '\s+', "\n - ") }}
|
||||
MSG
|
||||
|
||||
_upgrade-rust version file_path file_name current_version_recipe version_error_msg_part success_msg_part substitude_cmd:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
readonly OLD=$(just {{ current_version_recipe }} {{ file_path }}); \
|
||||
[ -z "$OLD" ] \
|
||||
&& ( \
|
||||
echo "{{ RED }}{{ file_name }}{{ NORMAL }}: Can't determine {{ version_error_msg_part }} before upgrade" >&2; \
|
||||
exit 1; \
|
||||
); \
|
||||
sed -r -i "{{ substitude_cmd }}" {{ file_path }} \
|
||||
&& ( \
|
||||
readonly NEW=$(just {{ current_version_recipe }} {{ file_path }}); \
|
||||
[ -z "NEW" ] \
|
||||
&& ( \
|
||||
echo "{{ RED }}{{ file_name }}{{ NORMAL }}: Can't determine {{ version_error_msg_part }} after upgrade" >&2; \
|
||||
exit 1; \
|
||||
); \
|
||||
|
||||
echo "{{ GREEN }}{{ file_name }}{{ NORMAL }}: {{ success_msg_part }}: {{ BOLD }}$OLD{{ NORMAL }} -> {{ BOLD }}$NEW{{ NORMAL }}"; \
|
||||
) \
|
||||
|| echo "{{ RED }}{{ file_name }}{{ NORMAL }}: Upgrade failed" >&2
|
||||
|
||||
# Upgrade GitLab CI's Rust to a given version
|
||||
_upgrade-rust-ci version: (_upgrade-rust version GITLAB_CI_FILE_PATH GITLAB_CI_FILE_NAME '_current-ci-image' 'CI image version' 'image upgraded' 's|image:\s+\"rust:(' + SED_RUST_VERSION_REGEX + ')-([a-z]+)\"\s*$|image: \"rust:' + version + '-\3\"|')
|
||||
|
||||
# Upgrade current Rust version in Cargo.toml
|
||||
_upgrade-rust-cargo version: (_upgrade-rust version CARGO_FILE_PATH CARGO_FILE_NAME '_current-cargo-rust-version' 'Rust version in ' + CARGO_FILE_NAME 'version upgraded' 's|rust-version\s*=\s*\"(' + SED_RUST_VERSION_REGEX + ')\"|rust-version = \"' + version + '\"|')
|
||||
|
||||
# Upgrade current Rust version in clippy.toml
|
||||
_upgrade-rust-clippy version: (_upgrade-rust version CLIPPY_FILE_PATH CLIPPY_FILE_NAME '_current-clippy-rust-version' 'Rust version in ' + CLIPPY_FILE_NAME 'version upgraded' 's|msrv\s*=\s*\"(' + SED_RUST_VERSION_REGEX + ')\"|msrv = \"' + version + '\"|')
|
||||
|
||||
# Upgrade current Rust version in rust-toolchain.toml
|
||||
_upgrade-rust-toolchain version: (_upgrade-rust version TOOLCHAIN_FILE_PATH TOOLCHAIN_FILE_NAME '_current-toolchain-rust-version' 'Rust version in ' + CLIPPY_FILE_NAME 'version upgraded' 's|channel\s*=\s*\"(' + SED_RUST_VERSION_REGEX + ')\"|channel = \"' + version + '\"|')
|
||||
|
||||
# Get version of current stable Rust
|
||||
#
|
||||
# Parsed string example: "1.90.0 (1159e78c4 2025-09-14)"
|
||||
# Parsed components:
|
||||
# \1 - MAJOR
|
||||
# \2 - MINOR
|
||||
# \3 - PATCH
|
||||
# \4 - HASH
|
||||
# \5 - RELEASE DATE
|
||||
# \6 - RELEASE YEAR
|
||||
# \7 - RELEASE MONTH
|
||||
# \8 - RELEASE DAY
|
||||
#
|
||||
# Example of custom format: just rust-stable-version '\5 \4 \1.\2'
|
||||
|
||||
# Ouputs: 2025-09-14 1159e78c4 1.90
|
||||
_rust-stable-version format=DEFAULT_RUST_STABLE_VERSION_FORMAT:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
curl -s {{ RUST_MANIFEST_STABLE_TOML_URL }} \
|
||||
| tq {{ RUST_MANIFEST_STABLE_VERSION_QUERY }} \
|
||||
| sed -rn 's|{{ RUST_MANIFEST_STABLE_VERSION_PARSE_REGEX }}|{{ format }}|p'
|
||||
|
||||
# Extract current CI image in use
|
||||
_current-ci-image file_path=GITLAB_CI_FILE_PATH:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
sed -rn "s|\s*image:\s+\"(rust:({{ SED_RUST_VERSION_REGEX }})-([a-z]+))\"\s*$|\1|p" "{{ file_path }}"
|
||||
|
||||
_current-ci-rust-version file_path=GITLAB_CI_FILE_PATH:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
sed -rn "s|\s*image:\s+\"rust:({{ SED_RUST_VERSION_REGEX }})-([a-z]+)\"\s*$|\1|p" "{{ file_path }}"
|
||||
|
||||
# Extract current Rust version from Cargo.toml
|
||||
_current-cargo-rust-version file_path=CARGO_FILE_PATH:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
tq --file "{{ file_path }}" --raw {{ CARGO_RUST_VERSION_QUERY }}
|
||||
|
||||
# Extract current Rust version from clippy.toml
|
||||
_current-clippy-rust-version file_path=CLIPPY_FILE_PATH:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
tq --file "{{ file_path }}" --raw {{ CLIPPY_RUST_VERSION_QUERY }}
|
||||
|
||||
# Extract current Rust version from rust-toolchain.toml
|
||||
_current-toolchain-rust-version file_path=TOOLCHAIN_FILE_PATH:
|
||||
#!/usr/bin/env -S bash -euo pipefail
|
||||
tq --file "{{ file_path }}" --raw {{ TOOLCHAIN_RUST_VERSION_QUERY }}
|
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env zsh
|
||||
#!/usr/bin/env -S zsh -euo pipefail
|
||||
|
||||
for i in templates-untranslated/**/*.hbs; do
|
||||
local template=${${i#templates-untranslated/}}
|
||||
|
@@ -1 +0,0 @@
|
||||
1.70.0
|
3
rust-toolchain.toml
Normal file
3
rust-toolchain.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "1.86"
|
||||
components = ["rustfmt", "clippy"]
|
@@ -3,7 +3,7 @@ let
|
||||
"https://github.com/oxalica/rust-overlay/archive/master.tar.gz");
|
||||
|
||||
pkgs = import <nixpkgs> { overlays = [ oxalica_overlay ]; };
|
||||
rust_channel = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain;
|
||||
rust_channel = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||
#rust_channel = pkgs.rust-bin.stable.latest.default;
|
||||
in
|
||||
pkgs.mkShell {
|
||||
@@ -17,6 +17,7 @@ pkgs.mkShell {
|
||||
];
|
||||
|
||||
buildInputs = with pkgs; [
|
||||
sqlite
|
||||
openssl
|
||||
|
||||
clang
|
||||
|
@@ -1,58 +1,177 @@
|
||||
use lazy_static::lazy_static;
|
||||
use hagrid_database::types::Email;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::database::types::Email;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
// from https://github.com/mailcheck/mailcheck/wiki/List-of-Popular-Domains
|
||||
lazy_static! {
|
||||
static ref POPULAR_DOMAINS: HashSet<&'static str> = vec!(
|
||||
static POPULAR_DOMAINS: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
|
||||
vec![
|
||||
/* Default domains included */
|
||||
"aol.com", "att.net", "comcast.net", "facebook.com", "gmail.com", "gmx.com", "googlemail.com",
|
||||
"google.com", "hotmail.com", "hotmail.co.uk", "mac.com", "me.com", "mail.com", "msn.com",
|
||||
"live.com", "sbcglobal.net", "verizon.net", "yahoo.com", "yahoo.co.uk",
|
||||
|
||||
"aol.com",
|
||||
"att.net",
|
||||
"comcast.net",
|
||||
"facebook.com",
|
||||
"gmail.com",
|
||||
"gmx.com",
|
||||
"googlemail.com",
|
||||
"google.com",
|
||||
"hotmail.com",
|
||||
"hotmail.co.uk",
|
||||
"mac.com",
|
||||
"me.com",
|
||||
"mail.com",
|
||||
"msn.com",
|
||||
"live.com",
|
||||
"sbcglobal.net",
|
||||
"verizon.net",
|
||||
"yahoo.com",
|
||||
"yahoo.co.uk",
|
||||
/* Other global domains */
|
||||
"email.com", "fastmail.fm", "games.com" /* AOL */, "gmx.net", "hush.com", "hushmail.com", "icloud.com",
|
||||
"iname.com", "inbox.com", "lavabit.com", "love.com" /* AOL */, "mailbox.org", "posteo.de", "outlook.com", "pobox.com", "protonmail.ch", "protonmail.com", "tutanota.de", "tutanota.com", "tutamail.com", "tuta.io",
|
||||
"keemail.me", "rocketmail.com" /* Yahoo */, "safe-mail.net", "wow.com" /* AOL */, "ygm.com" /* AOL */,
|
||||
"ymail.com" /* Yahoo */, "zoho.com", "yandex.com",
|
||||
|
||||
"email.com",
|
||||
"fastmail.fm",
|
||||
"games.com", /* AOL */
|
||||
"gmx.net",
|
||||
"hush.com",
|
||||
"hushmail.com",
|
||||
"icloud.com",
|
||||
"iname.com",
|
||||
"inbox.com",
|
||||
"lavabit.com",
|
||||
"love.com", /* AOL */
|
||||
"mailbox.org",
|
||||
"posteo.de",
|
||||
"outlook.com",
|
||||
"pobox.com",
|
||||
"protonmail.ch",
|
||||
"protonmail.com",
|
||||
"tutanota.de",
|
||||
"tutanota.com",
|
||||
"tutamail.com",
|
||||
"tuta.io",
|
||||
"keemail.me",
|
||||
"rocketmail.com", /* Yahoo */
|
||||
"safe-mail.net",
|
||||
"wow.com", /* AOL */
|
||||
"ygm.com", /* AOL */
|
||||
"ymail.com", /* Yahoo */
|
||||
"zoho.com",
|
||||
"yandex.com",
|
||||
/* United States ISP domains */
|
||||
"bellsouth.net", "charter.net", "cox.net", "earthlink.net", "juno.com",
|
||||
|
||||
"bellsouth.net",
|
||||
"charter.net",
|
||||
"cox.net",
|
||||
"earthlink.net",
|
||||
"juno.com",
|
||||
/* British ISP domains */
|
||||
"btinternet.com", "virginmedia.com", "blueyonder.co.uk", "freeserve.co.uk", "live.co.uk",
|
||||
"ntlworld.com", "o2.co.uk", "orange.net", "sky.com", "talktalk.co.uk", "tiscali.co.uk",
|
||||
"virgin.net", "wanadoo.co.uk", "bt.com",
|
||||
|
||||
"btinternet.com",
|
||||
"virginmedia.com",
|
||||
"blueyonder.co.uk",
|
||||
"freeserve.co.uk",
|
||||
"live.co.uk",
|
||||
"ntlworld.com",
|
||||
"o2.co.uk",
|
||||
"orange.net",
|
||||
"sky.com",
|
||||
"talktalk.co.uk",
|
||||
"tiscali.co.uk",
|
||||
"virgin.net",
|
||||
"wanadoo.co.uk",
|
||||
"bt.com",
|
||||
/* Domains used in Asia */
|
||||
"sina.com", "sina.cn", "qq.com", "naver.com", "hanmail.net", "daum.net", "nate.com", "yahoo.co.jp", "yahoo.co.kr", "yahoo.co.id", "yahoo.co.in", "yahoo.com.sg", "yahoo.com.ph", "163.com", "yeah.net", "126.com", "21cn.com", "aliyun.com", "foxmail.com",
|
||||
|
||||
"sina.com",
|
||||
"sina.cn",
|
||||
"qq.com",
|
||||
"naver.com",
|
||||
"hanmail.net",
|
||||
"daum.net",
|
||||
"nate.com",
|
||||
"yahoo.co.jp",
|
||||
"yahoo.co.kr",
|
||||
"yahoo.co.id",
|
||||
"yahoo.co.in",
|
||||
"yahoo.com.sg",
|
||||
"yahoo.com.ph",
|
||||
"163.com",
|
||||
"yeah.net",
|
||||
"126.com",
|
||||
"21cn.com",
|
||||
"aliyun.com",
|
||||
"foxmail.com",
|
||||
/* French ISP domains */
|
||||
"hotmail.fr", "live.fr", "laposte.net", "yahoo.fr", "wanadoo.fr", "orange.fr", "gmx.fr", "sfr.fr", "neuf.fr", "free.fr",
|
||||
|
||||
"hotmail.fr",
|
||||
"live.fr",
|
||||
"laposte.net",
|
||||
"yahoo.fr",
|
||||
"wanadoo.fr",
|
||||
"orange.fr",
|
||||
"gmx.fr",
|
||||
"sfr.fr",
|
||||
"neuf.fr",
|
||||
"free.fr",
|
||||
/* German ISP domains */
|
||||
"gmx.de", "hotmail.de", "live.de", "online.de", "t-online.de" /* T-Mobile */, "web.de", "yahoo.de",
|
||||
|
||||
"gmx.de",
|
||||
"hotmail.de",
|
||||
"live.de",
|
||||
"online.de",
|
||||
"t-online.de", /* T-Mobile */
|
||||
"web.de",
|
||||
"yahoo.de",
|
||||
/* Italian ISP domains */
|
||||
"libero.it", "virgilio.it", "hotmail.it", "aol.it", "tiscali.it", "alice.it", "live.it", "yahoo.it", "email.it", "tin.it", "poste.it", "teletu.it",
|
||||
|
||||
"libero.it",
|
||||
"virgilio.it",
|
||||
"hotmail.it",
|
||||
"aol.it",
|
||||
"tiscali.it",
|
||||
"alice.it",
|
||||
"live.it",
|
||||
"yahoo.it",
|
||||
"email.it",
|
||||
"tin.it",
|
||||
"poste.it",
|
||||
"teletu.it",
|
||||
/* Russian ISP domains */
|
||||
"mail.ru", "rambler.ru", "yandex.ru", "ya.ru", "list.ru",
|
||||
|
||||
"mail.ru",
|
||||
"rambler.ru",
|
||||
"yandex.ru",
|
||||
"ya.ru",
|
||||
"list.ru",
|
||||
/* Belgian ISP domains */
|
||||
"hotmail.be", "live.be", "skynet.be", "voo.be", "tvcablenet.be", "telenet.be",
|
||||
|
||||
"hotmail.be",
|
||||
"live.be",
|
||||
"skynet.be",
|
||||
"voo.be",
|
||||
"tvcablenet.be",
|
||||
"telenet.be",
|
||||
/* Argentinian ISP domains */
|
||||
"hotmail.com.ar", "live.com.ar", "yahoo.com.ar", "fibertel.com.ar", "speedy.com.ar", "arnet.com.ar",
|
||||
|
||||
"hotmail.com.ar",
|
||||
"live.com.ar",
|
||||
"yahoo.com.ar",
|
||||
"fibertel.com.ar",
|
||||
"speedy.com.ar",
|
||||
"arnet.com.ar",
|
||||
/* Domains used in Mexico */
|
||||
"yahoo.com.mx", "live.com.mx", "hotmail.es", "hotmail.com.mx", "prodigy.net.mx",
|
||||
|
||||
"yahoo.com.mx",
|
||||
"live.com.mx",
|
||||
"hotmail.es",
|
||||
"hotmail.com.mx",
|
||||
"prodigy.net.mx",
|
||||
/* Domains used in Brazil */
|
||||
"yahoo.com.br", "hotmail.com.br", "outlook.com.br", "uol.com.br", "bol.com.br", "terra.com.br", "ig.com.br", "itelefonica.com.br", "r7.com", "zipmail.com.br", "globo.com", "globomail.com", "oi.com.br"
|
||||
).into_iter().collect();
|
||||
}
|
||||
"yahoo.com.br",
|
||||
"hotmail.com.br",
|
||||
"outlook.com.br",
|
||||
"uol.com.br",
|
||||
"bol.com.br",
|
||||
"terra.com.br",
|
||||
"ig.com.br",
|
||||
"itelefonica.com.br",
|
||||
"r7.com",
|
||||
"zipmail.com.br",
|
||||
"globo.com",
|
||||
"globomail.com",
|
||||
"oi.com.br",
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
});
|
||||
|
||||
pub fn anonymize_address(email: &Email) -> Option<String> {
|
||||
email
|
||||
|
64
src/app/config.rs
Normal file
64
src/app/config.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use rocket::figment::Figment;
|
||||
use rocket::serde::Deserialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub const DEFAULT_MAIL_RATE_LIMIT: u64 = 60; // in seconds
|
||||
pub const DEFAULT_MAINTENANCE_FILE_NAME: &str = "maintenance";
|
||||
pub const DEFAULT_LOCALIZED_TEMPLATE_DIR_NAME: &str = "localized";
|
||||
|
||||
pub fn load(figment: &Figment) -> Configuration {
|
||||
figment
|
||||
.extract::<Configuration>()
|
||||
.expect("Rocket config must succeed")
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct Configuration {
|
||||
// database
|
||||
pub keys_internal_dir: PathBuf,
|
||||
// hagrid state
|
||||
pub assets_dir: PathBuf,
|
||||
// state
|
||||
#[serde(rename = "base-URI")]
|
||||
pub base_uri: String,
|
||||
#[serde(rename = "base-URI-Onion")]
|
||||
pub base_uri_onion: Option<String>,
|
||||
// stateful_token_service
|
||||
pub token_dir: PathBuf,
|
||||
// stateless_token_service
|
||||
pub token_secret: String,
|
||||
pub token_validity: u64,
|
||||
// mail_service
|
||||
pub email_template_dir: PathBuf,
|
||||
pub from: String,
|
||||
pub filemail_into: Option<PathBuf>,
|
||||
pub local_smtp: Option<bool>,
|
||||
// rate_limiter
|
||||
#[serde(default = "Configuration::default_mail_rate_limit")]
|
||||
pub mail_rate_limit: u64,
|
||||
// maintenance
|
||||
#[serde(default = "Configuration::default_maintenance_file")]
|
||||
pub maintenance_file: PathBuf,
|
||||
// localized_template_list
|
||||
pub template_dir: PathBuf,
|
||||
// prometheus
|
||||
#[serde(default)]
|
||||
pub enable_prometheus: bool,
|
||||
}
|
||||
|
||||
impl Configuration {
|
||||
pub fn base_uri_onion(&self) -> &str {
|
||||
self.base_uri_onion
|
||||
.as_deref()
|
||||
.unwrap_or_else(|| &self.base_uri)
|
||||
}
|
||||
|
||||
pub fn default_mail_rate_limit() -> u64 {
|
||||
DEFAULT_MAIL_RATE_LIMIT
|
||||
}
|
||||
|
||||
pub fn default_maintenance_file() -> PathBuf {
|
||||
PathBuf::from(DEFAULT_MAINTENANCE_FILE_NAME)
|
||||
}
|
||||
}
|
26
src/app/mod.rs
Normal file
26
src/app/mod.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
pub mod config;
|
||||
pub mod state;
|
||||
|
||||
use crate::initializers;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn run() {
|
||||
let rocket = configure_rocket(rocket::build());
|
||||
|
||||
run_rocket(rocket).unwrap_or_else(|e| eprintln!("Hagrid Error: {e}"))
|
||||
}
|
||||
|
||||
pub fn configure_rocket(rocket: Rocket<Build>) -> Rocket<Build> {
|
||||
let config = config::load(rocket.figment());
|
||||
|
||||
initializers::run(rocket, &config).expect("Initializers error")
|
||||
}
|
||||
|
||||
pub fn run_rocket(rocket: Rocket<Build>) -> Result<(), rocket::Error> {
|
||||
::rocket::async_main(async move {
|
||||
let _rocket = rocket.ignite().await?.launch().await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
10
src/app/state.rs
Normal file
10
src/app/state.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct ApplicationState {
|
||||
/// Assets directory, mounted to /assets, served by hagrid or nginx
|
||||
pub assets_dir: PathBuf,
|
||||
|
||||
/// XXX
|
||||
pub base_uri: String,
|
||||
pub base_uri_onion: String,
|
||||
}
|
@@ -1,29 +1,36 @@
|
||||
use lazy_static::lazy_static;
|
||||
use rocket_prometheus::prometheus;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use crate::anonymize_utils;
|
||||
|
||||
use crate::database::types::Email;
|
||||
use hagrid_database::types::Email;
|
||||
|
||||
lazy_static! {
|
||||
static ref KEY_UPLOAD: LabelCounter =
|
||||
LabelCounter::new("hagrid_key_upload", "Uploaded keys", &["result"]);
|
||||
static ref MAIL_SENT: LabelCounter = LabelCounter::new(
|
||||
static KEY_UPLOAD: LazyLock<LabelCounter> =
|
||||
LazyLock::new(|| LabelCounter::new("hagrid_key_upload", "Uploaded keys", &["result"]));
|
||||
|
||||
static MAIL_SENT: LazyLock<LabelCounter> = LazyLock::new(|| {
|
||||
LabelCounter::new(
|
||||
"hagrid_mail_sent",
|
||||
"Sent verification mails",
|
||||
&["type", "domain"]
|
||||
);
|
||||
static ref KEY_ADDRESS_PUBLISHED: LabelCounter = LabelCounter::new(
|
||||
&["type", "domain"],
|
||||
)
|
||||
});
|
||||
|
||||
static KEY_ADDRESS_PUBLISHED: LazyLock<LabelCounter> = LazyLock::new(|| {
|
||||
LabelCounter::new(
|
||||
"hagrid_key_address_published",
|
||||
"Verified email addresses",
|
||||
&["domain"]
|
||||
);
|
||||
static ref KEY_ADDRESS_UNPUBLISHED: LabelCounter = LabelCounter::new(
|
||||
&["domain"],
|
||||
)
|
||||
});
|
||||
|
||||
static KEY_ADDRESS_UNPUBLISHED: LazyLock<LabelCounter> = LazyLock::new(|| {
|
||||
LabelCounter::new(
|
||||
"hagrid_key_address_unpublished",
|
||||
"Unpublished email addresses",
|
||||
&["domain"]
|
||||
);
|
||||
}
|
||||
&["domain"],
|
||||
)
|
||||
});
|
||||
|
||||
pub fn register_counters(registry: &prometheus::Registry) {
|
||||
KEY_UPLOAD.register(registry);
|
||||
|
124
src/dump.rs
124
src/dump.rs
@@ -1,26 +1,30 @@
|
||||
// This file is externally pulled from sequoia upstream. Don't bother with any warnings or linting
|
||||
#![allow(warnings)]
|
||||
#![allow(clippy)]
|
||||
#![allow(unknown_lints)]
|
||||
|
||||
use std::io::{self, Read};
|
||||
|
||||
use self::openpgp::crypto::mpi;
|
||||
use self::openpgp::crypto::S2K;
|
||||
use self::openpgp::fmt::hex;
|
||||
use self::openpgp::packet::header::CTB;
|
||||
use self::openpgp::packet::prelude::*;
|
||||
use self::openpgp::packet::signature::subpacket::{Subpacket, SubpacketValue};
|
||||
use self::openpgp::packet::{header::BodyLength, Header, Signature};
|
||||
use self::openpgp::parse::{map::Map, PacketParserResult, Parse};
|
||||
use self::openpgp::types::{Duration, SymmetricAlgorithm, Timestamp};
|
||||
use self::openpgp::{Packet, Result};
|
||||
use sequoia_openpgp as openpgp;
|
||||
use sequoia_openpgp::crypto::S2K;
|
||||
use sequoia_openpgp::crypto::mpi;
|
||||
use sequoia_openpgp::fmt::hex;
|
||||
use sequoia_openpgp::packet::header::CTB;
|
||||
use sequoia_openpgp::packet::prelude::*;
|
||||
use sequoia_openpgp::packet::signature::subpacket::{Subpacket, SubpacketValue};
|
||||
use sequoia_openpgp::packet::{Header, Signature, header::BodyLength};
|
||||
use sequoia_openpgp::parse::{PacketParserResult, Parse, map::Map};
|
||||
use sequoia_openpgp::types::{Duration, SymmetricAlgorithm, Timestamp};
|
||||
use sequoia_openpgp::{Packet, Result};
|
||||
|
||||
pub struct SessionKey {
|
||||
pub session_key: openpgp::crypto::SessionKey,
|
||||
pub session_key: sequoia_openpgp::crypto::SessionKey,
|
||||
pub symmetric_algo: Option<SymmetricAlgorithm>,
|
||||
}
|
||||
|
||||
impl SessionKey {
|
||||
/// Returns an object that implements Display for explicitly opting into
|
||||
/// printing a `SessionKey`.
|
||||
pub fn display_sensitive(&self) -> SessionKeyDisplay {
|
||||
pub fn display_sensitive(&self) -> SessionKeyDisplay<'_> {
|
||||
SessionKeyDisplay { csk: self }
|
||||
}
|
||||
}
|
||||
@@ -37,7 +41,7 @@ pub struct SessionKeyDisplay<'a> {
|
||||
}
|
||||
|
||||
/// Print the session key without prefix in hexadecimal representation.
|
||||
impl<'a> std::fmt::Display for SessionKeyDisplay<'a> {
|
||||
impl std::fmt::Display for SessionKeyDisplay<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
let sk = self.csk;
|
||||
write!(f, "{}", hex::encode(&sk.session_key))
|
||||
@@ -94,7 +98,7 @@ pub fn dump<W>(
|
||||
where
|
||||
W: Into<Option<usize>>,
|
||||
{
|
||||
let mut ppr = self::openpgp::parse::PacketParserBuilder::from_reader(input)?
|
||||
let mut ppr = sequoia_openpgp::parse::PacketParserBuilder::from_reader(input)?
|
||||
.map(hex)
|
||||
.build()?;
|
||||
let mut message_encrypted = false;
|
||||
@@ -122,7 +126,7 @@ where
|
||||
let decrypted_with = if let Some(algo) = sk.symmetric_algo {
|
||||
// We know which algorithm to use, so only try decrypting
|
||||
// with that one.
|
||||
pp.decrypt(algo, &sk.session_key).is_ok().then(|| algo)
|
||||
pp.decrypt(algo, &sk.session_key).is_ok().then_some(algo)
|
||||
} else {
|
||||
// We don't know which algorithm to use,
|
||||
// try to find one that decrypts the message.
|
||||
@@ -334,7 +338,7 @@ impl PacketDumper {
|
||||
map: Option<&Map>,
|
||||
additional_fields: Option<&Vec<String>>,
|
||||
) -> Result<()> {
|
||||
use self::openpgp::Packet::*;
|
||||
use sequoia_openpgp::Packet::*;
|
||||
|
||||
if let Some(tag) = p.kind() {
|
||||
write!(output, "{}", tag)?;
|
||||
@@ -465,7 +469,7 @@ impl PacketDumper {
|
||||
|
||||
let ii = format!("{} ", i);
|
||||
match secrets {
|
||||
SecretKeyMaterial::Unencrypted(ref u) => {
|
||||
SecretKeyMaterial::Unencrypted(u) => {
|
||||
writeln!(output, "{}", i)?;
|
||||
writeln!(output, "{} Unencrypted", ii)?;
|
||||
if pd.mpis {
|
||||
@@ -514,7 +518,7 @@ impl PacketDumper {
|
||||
})?;
|
||||
}
|
||||
}
|
||||
SecretKeyMaterial::Encrypted(ref e) => {
|
||||
SecretKeyMaterial::Encrypted(e) => {
|
||||
writeln!(output, "{}", i)?;
|
||||
writeln!(output, "{} Encrypted", ii)?;
|
||||
write!(output, "{} S2K: ", ii)?;
|
||||
@@ -533,17 +537,17 @@ impl PacketDumper {
|
||||
}
|
||||
|
||||
match p {
|
||||
Unknown(ref u) => {
|
||||
Unknown(u) => {
|
||||
writeln!(output, "{} Tag: {}", i, u.tag())?;
|
||||
writeln!(output, "{} Error: {}", i, u.error())?;
|
||||
}
|
||||
|
||||
PublicKey(ref k) => dump_key(self, output, i, k)?,
|
||||
PublicSubkey(ref k) => dump_key(self, output, i, k)?,
|
||||
SecretKey(ref k) => dump_key(self, output, i, k)?,
|
||||
SecretSubkey(ref k) => dump_key(self, output, i, k)?,
|
||||
PublicKey(k) => dump_key(self, output, i, k)?,
|
||||
PublicSubkey(k) => dump_key(self, output, i, k)?,
|
||||
SecretKey(k) => dump_key(self, output, i, k)?,
|
||||
SecretSubkey(k) => dump_key(self, output, i, k)?,
|
||||
|
||||
Signature(ref s) => {
|
||||
Signature(s) => {
|
||||
writeln!(output, "{} Version: {}", i, s.version())?;
|
||||
writeln!(output, "{} Type: {}", i, s.typ())?;
|
||||
writeln!(output, "{} Pk algo: {}", i, s.pk_algo())?;
|
||||
@@ -624,7 +628,7 @@ impl PacketDumper {
|
||||
}
|
||||
}
|
||||
|
||||
OnePassSig(ref o) => {
|
||||
OnePassSig(o) => {
|
||||
writeln!(output, "{} Version: {}", i, o.version())?;
|
||||
writeln!(output, "{} Type: {}", i, o.typ())?;
|
||||
writeln!(output, "{} Pk algo: {}", i, o.pk_algo())?;
|
||||
@@ -633,7 +637,7 @@ impl PacketDumper {
|
||||
writeln!(output, "{} Last: {}", i, o.last())?;
|
||||
}
|
||||
|
||||
Trust(ref p) => {
|
||||
Trust(p) => {
|
||||
writeln!(output, "{} Value:", i)?;
|
||||
let mut hd = hex::Dumper::new(
|
||||
&mut output,
|
||||
@@ -642,7 +646,7 @@ impl PacketDumper {
|
||||
hd.write_ascii(p.value())?;
|
||||
}
|
||||
|
||||
UserID(ref u) => {
|
||||
UserID(u) => {
|
||||
writeln!(
|
||||
output,
|
||||
"{} Value: {}",
|
||||
@@ -651,8 +655,8 @@ impl PacketDumper {
|
||||
)?;
|
||||
}
|
||||
|
||||
UserAttribute(ref u) => {
|
||||
use self::openpgp::packet::user_attribute::{Image, Subpacket};
|
||||
UserAttribute(u) => {
|
||||
use sequoia_openpgp::packet::user_attribute::{Image, Subpacket};
|
||||
|
||||
for subpacket in u.subpackets() {
|
||||
match subpacket {
|
||||
@@ -689,7 +693,7 @@ impl PacketDumper {
|
||||
|
||||
Marker(_) => {}
|
||||
|
||||
Literal(ref l) => {
|
||||
Literal(l) => {
|
||||
writeln!(output, "{} Format: {}", i, l.format())?;
|
||||
if let Some(filename) = l.filename() {
|
||||
writeln!(
|
||||
@@ -704,11 +708,11 @@ impl PacketDumper {
|
||||
}
|
||||
}
|
||||
|
||||
CompressedData(ref c) => {
|
||||
CompressedData(c) => {
|
||||
writeln!(output, "{} Algorithm: {}", i, c.algo())?;
|
||||
}
|
||||
|
||||
PKESK(ref p) => {
|
||||
PKESK(p) => {
|
||||
writeln!(output, "{} Version: {}", i, p.version())?;
|
||||
writeln!(output, "{} Recipient: {}", i, p.recipient())?;
|
||||
writeln!(output, "{} Pk algo: {}", i, p.pk_algo())?;
|
||||
@@ -749,10 +753,10 @@ impl PacketDumper {
|
||||
}
|
||||
}
|
||||
|
||||
SKESK(ref s) => {
|
||||
SKESK(s) => {
|
||||
writeln!(output, "{} Version: {}", i, s.version())?;
|
||||
match s {
|
||||
self::openpgp::packet::SKESK::V4(ref s) => {
|
||||
sequoia_openpgp::packet::SKESK::V4(s) => {
|
||||
writeln!(output, "{} Symmetric algo: {}", i, s.symmetric_algo())?;
|
||||
write!(output, "{} S2K: ", i)?;
|
||||
self.dump_s2k(output, i, s.s2k())?;
|
||||
@@ -761,7 +765,7 @@ impl PacketDumper {
|
||||
}
|
||||
}
|
||||
|
||||
self::openpgp::packet::SKESK::V5(ref s) => {
|
||||
sequoia_openpgp::packet::SKESK::V5(s) => {
|
||||
writeln!(output, "{} Symmetric algo: {}", i, s.symmetric_algo())?;
|
||||
writeln!(output, "{} AEAD: {}", i, s.aead_algo())?;
|
||||
write!(output, "{} S2K: ", i)?;
|
||||
@@ -780,11 +784,11 @@ impl PacketDumper {
|
||||
}
|
||||
}
|
||||
|
||||
SEIP(ref s) => {
|
||||
SEIP(s) => {
|
||||
writeln!(output, "{} Version: {}", i, s.version())?;
|
||||
}
|
||||
|
||||
MDC(ref m) => {
|
||||
MDC(m) => {
|
||||
writeln!(output, "{} Digest: {}", i, hex::encode(m.digest()))?;
|
||||
writeln!(
|
||||
output,
|
||||
@@ -794,7 +798,7 @@ impl PacketDumper {
|
||||
)?;
|
||||
}
|
||||
|
||||
AED(ref a) => {
|
||||
AED(a) => {
|
||||
writeln!(output, "{} Version: {}", i, a.version())?;
|
||||
writeln!(output, "{} Symmetric algo: {}", i, a.symmetric_algo())?;
|
||||
writeln!(output, "{} AEAD: {}", i, a.aead())?;
|
||||
@@ -903,7 +907,7 @@ impl PacketDumper {
|
||||
"{} Trust signature: level {} trust {}",
|
||||
i, level, trust
|
||||
)?,
|
||||
RegularExpression(ref r) => write!(
|
||||
RegularExpression(r) => write!(
|
||||
output,
|
||||
"{} Regular expression: {}",
|
||||
i,
|
||||
@@ -913,7 +917,7 @@ impl PacketDumper {
|
||||
KeyExpirationTime(t) => {
|
||||
write!(output, "{} Key expiration time: {}", i, t.convert())?
|
||||
}
|
||||
PreferredSymmetricAlgorithms(ref c) => write!(
|
||||
PreferredSymmetricAlgorithms(c) => write!(
|
||||
output,
|
||||
"{} Symmetric algo preferences: {}",
|
||||
i,
|
||||
@@ -929,7 +933,7 @@ impl PacketDumper {
|
||||
write!(output, ", sensitive")?;
|
||||
}
|
||||
}
|
||||
Issuer(ref is) => write!(output, "{} Issuer: {}", i, is)?,
|
||||
Issuer(is) => write!(output, "{} Issuer: {}", i, is)?,
|
||||
NotationData(n) => {
|
||||
if n.flags().human_readable() {
|
||||
write!(output, "{} Notation: {}", i, n)?;
|
||||
@@ -950,7 +954,7 @@ impl PacketDumper {
|
||||
hexdump_unknown(output, n.value())?;
|
||||
}
|
||||
}
|
||||
PreferredHashAlgorithms(ref h) => write!(
|
||||
PreferredHashAlgorithms(h) => write!(
|
||||
output,
|
||||
"{} Hash preferences: {}",
|
||||
i,
|
||||
@@ -959,7 +963,7 @@ impl PacketDumper {
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)?,
|
||||
PreferredCompressionAlgorithms(ref c) => write!(
|
||||
PreferredCompressionAlgorithms(c) => write!(
|
||||
output,
|
||||
"{} Compression preferences: {}",
|
||||
i,
|
||||
@@ -968,45 +972,43 @@ impl PacketDumper {
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)?,
|
||||
KeyServerPreferences(ref p) => {
|
||||
write!(output, "{} Keyserver preferences: {:?}", i, p)?
|
||||
}
|
||||
PreferredKeyServer(ref k) => write!(
|
||||
KeyServerPreferences(p) => write!(output, "{} Keyserver preferences: {:?}", i, p)?,
|
||||
PreferredKeyServer(k) => write!(
|
||||
output,
|
||||
"{} Preferred keyserver: {}",
|
||||
i,
|
||||
String::from_utf8_lossy(k)
|
||||
)?,
|
||||
PrimaryUserID(p) => write!(output, "{} Primary User ID: {}", i, p)?,
|
||||
PolicyURI(ref p) => write!(
|
||||
PolicyURI(p) => write!(
|
||||
output,
|
||||
"{} Policy URI: {}",
|
||||
i,
|
||||
String::from_utf8_lossy(p)
|
||||
)?,
|
||||
KeyFlags(ref k) => write!(output, "{} Key flags: {:?}", i, k)?,
|
||||
SignersUserID(ref u) => write!(
|
||||
KeyFlags(k) => write!(output, "{} Key flags: {:?}", i, k)?,
|
||||
SignersUserID(u) => write!(
|
||||
output,
|
||||
"{} Signer's User ID: {}",
|
||||
i,
|
||||
String::from_utf8_lossy(u)
|
||||
)?,
|
||||
ReasonForRevocation { code, ref reason } => {
|
||||
ReasonForRevocation { code, reason } => {
|
||||
let reason = String::from_utf8_lossy(reason);
|
||||
write!(
|
||||
output,
|
||||
"{} Reason for revocation: {}{}{}",
|
||||
i,
|
||||
code,
|
||||
if reason.len() > 0 { ", " } else { "" },
|
||||
if reason.is_empty() { "" } else { ", " },
|
||||
reason
|
||||
)?
|
||||
}
|
||||
Features(ref f) => write!(output, "{} Features: {:?}", i, f)?,
|
||||
Features(f) => write!(output, "{} Features: {:?}", i, f)?,
|
||||
SignatureTarget {
|
||||
pk_algo,
|
||||
hash_algo,
|
||||
ref digest,
|
||||
digest,
|
||||
} => write!(
|
||||
output,
|
||||
"{} Signature target: {}, {}, {}",
|
||||
@@ -1020,8 +1022,8 @@ impl PacketDumper {
|
||||
{
|
||||
write!(output, "{} Embedded signature: ", i)?
|
||||
}
|
||||
IssuerFingerprint(ref fp) => write!(output, "{} Issuer Fingerprint: {}", i, fp)?,
|
||||
PreferredAEADAlgorithms(ref c) => write!(
|
||||
IssuerFingerprint(fp) => write!(output, "{} Issuer Fingerprint: {}", i, fp)?,
|
||||
PreferredAEADAlgorithms(c) => write!(
|
||||
output,
|
||||
"{} AEAD preferences: {}",
|
||||
i,
|
||||
@@ -1030,7 +1032,7 @@ impl PacketDumper {
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)?,
|
||||
IntendedRecipient(ref fp) => write!(output, "{} Intended Recipient: {}", i, fp)?,
|
||||
IntendedRecipient(fp) => write!(output, "{} Intended Recipient: {}", i, fp)?,
|
||||
AttestedCertifications(digests) => {
|
||||
write!(output, "{} Attested Certifications:", i)?;
|
||||
if digests.is_empty() {
|
||||
@@ -1050,7 +1052,7 @@ impl PacketDumper {
|
||||
match s.value() {
|
||||
Unknown { .. } => (),
|
||||
NotationData { .. } => (),
|
||||
EmbeddedSignature(ref sig) => {
|
||||
EmbeddedSignature(sig) => {
|
||||
if s.critical() {
|
||||
write!(output, " (critical)")?;
|
||||
}
|
||||
@@ -1078,14 +1080,14 @@ impl PacketDumper {
|
||||
writeln!(output, "Simple")?;
|
||||
writeln!(output, "{} Hash: {}", i, hash)?;
|
||||
}
|
||||
Salted { hash, ref salt } => {
|
||||
Salted { hash, salt } => {
|
||||
writeln!(output, "Salted")?;
|
||||
writeln!(output, "{} Hash: {}", i, hash)?;
|
||||
writeln!(output, "{} Salt: {}", i, hex::encode(salt))?;
|
||||
}
|
||||
Iterated {
|
||||
hash,
|
||||
ref salt,
|
||||
salt,
|
||||
hash_bytes,
|
||||
} => {
|
||||
writeln!(output, "Iterated")?;
|
||||
|
@@ -6,57 +6,101 @@ fn _dummy() {
|
||||
t!("Error message: {{ internal_error }}");
|
||||
t!("There was an error with your request:");
|
||||
t!("We found an entry for <span class=\"email\">{{ query }}</span>:");
|
||||
t!("<strong>Hint:</strong> It's more convenient to use <span class=\"brand\">keys.openpgp.org</span> from your OpenPGP software.<br /> Take a look at our <a href=\"/about/usage\">usage guide</a> for details.");
|
||||
t!(
|
||||
"<strong>Hint:</strong> It's more convenient to use <span class=\"brand\">keys.openpgp.org</span> from your OpenPGP software.<br /> Take a look at our <a href=\"/about/usage\">usage guide</a> for details."
|
||||
);
|
||||
t!("debug info");
|
||||
t!("Search by Email Address / Key ID / Fingerprint");
|
||||
t!("Search");
|
||||
t!("You can also <a href=\"/upload\">upload</a> or <a href=\"/manage\">manage</a> your key.");
|
||||
t!("Find out more <a href=\"/about\">about this service</a>.");
|
||||
t!("News:");
|
||||
t!("<a href=\"/about/news#2019-11-12-celebrating-100k\">Celebrating 100.000 verified addresses! 📈</a> (2019-11-12)");
|
||||
t!(
|
||||
"<a href=\"/about/news#2019-11-12-celebrating-100k\">Celebrating 100.000 verified addresses! 📈</a> (2019-11-12)"
|
||||
);
|
||||
t!("v{{ version }} built from");
|
||||
t!("Powered by <a href=\"https://sequoia-pgp.org\">Sequoia-PGP</a>");
|
||||
t!("Background image retrieved from <a href=\"https://www.toptal.com/designers/subtlepatterns/subtle-grey/\">Subtle Patterns</a> under CC BY-SA 3.0");
|
||||
t!(
|
||||
"Background image retrieved from <a href=\"https://www.toptal.com/designers/subtlepatterns/subtle-grey/\">Subtle Patterns</a> under CC BY-SA 3.0"
|
||||
);
|
||||
t!("Maintenance Mode");
|
||||
t!("Manage your key");
|
||||
t!("Enter any verified email address for your key");
|
||||
t!("Send link");
|
||||
t!("We will send you an email with a link you can use to remove any of your email addresses from search.");
|
||||
t!("Managing the key <span class=\"fingerprint\"><a href=\"{{ key_link }}\" target=\"_blank\">{{ key_fpr }}</a></span>.");
|
||||
t!(
|
||||
"We will send you an email with a link you can use to remove any of your email addresses from search."
|
||||
);
|
||||
t!(
|
||||
"Managing the key <span class=\"fingerprint\"><a href=\"{{ key_link }}\" target=\"_blank\">{{ key_fpr }}</a></span>."
|
||||
);
|
||||
t!("Your key is published with the following identity information:");
|
||||
t!("Delete");
|
||||
t!("Clicking \"delete\" on any address will remove it from this key. It will no longer appear in a search.<br /> To add another address, <a href=\"/upload\">upload</a> the key again.");
|
||||
t!("Your key is published as only non-identity information. (<a href=\"/about\" target=\"_blank\">What does this mean?</a>)");
|
||||
t!(
|
||||
"Clicking \"delete\" on any address will remove it from this key. It will no longer appear in a search.<br /> To add another address, <a href=\"/upload\">upload</a> the key again."
|
||||
);
|
||||
t!(
|
||||
"Your key is published as only non-identity information. (<a href=\"/about\" target=\"_blank\">What does this mean?</a>)"
|
||||
);
|
||||
t!("To add an address, <a href=\"/upload\">upload</a> the key again.");
|
||||
t!("We have sent an email with further instructions to <span class=\"email\">{{ address }}</span>.");
|
||||
t!(
|
||||
"We have sent an email with further instructions to <span class=\"email\">{{ address }}</span>."
|
||||
);
|
||||
t!("This address has already been verified.");
|
||||
t!("Your key <span class=\"fingerprint\">{{ key_fpr }}</span> is now published for the identity <a href=\"{{userid_link}}\" target=\"_blank\"><span class=\"email\">{{ userid }}</span></a>.");
|
||||
t!(
|
||||
"Your key <span class=\"fingerprint\">{{ key_fpr }}</span> is now published for the identity <a href=\"{{userid_link}}\" target=\"_blank\"><span class=\"email\">{{ userid }}</span></a>."
|
||||
);
|
||||
t!("Upload your key");
|
||||
t!("Upload");
|
||||
t!("Need more info? Check our <a target=\"_blank\" href=\"/about\">intro</a> and <a target=\"_blank\" href=\"/about/usage\">usage guide</a>.");
|
||||
t!("You uploaded the key <span class=\"fingerprint\"><a href=\"{{ key_link }}\" target=\"_blank\">{{ key_fpr }}</a></span>.");
|
||||
t!(
|
||||
"Need more info? Check our <a target=\"_blank\" href=\"/about\">intro</a> and <a target=\"_blank\" href=\"/about/usage\">usage guide</a>."
|
||||
);
|
||||
t!(
|
||||
"You uploaded the key <span class=\"fingerprint\"><a href=\"{{ key_link }}\" target=\"_blank\">{{ key_fpr }}</a></span>."
|
||||
);
|
||||
t!("This key is revoked.");
|
||||
t!("It is published without identity information and can't be made available for search by email address (<a href=\"/about\" target=\"_blank\">what does this mean?</a>).");
|
||||
t!("This key is now published with the following identity information (<a href=\"/about\" target=\"_blank\">what does this mean?</a>):");
|
||||
t!(
|
||||
"It is published without identity information and can't be made available for search by email address (<a href=\"/about\" target=\"_blank\">what does this mean?</a>)."
|
||||
);
|
||||
t!(
|
||||
"This key is now published with the following identity information (<a href=\"/about\" target=\"_blank\">what does this mean?</a>):"
|
||||
);
|
||||
t!("Published");
|
||||
t!("This key is now published with only non-identity information. (<a href=\"/about\" target=\"_blank\">What does this mean?</a>)");
|
||||
t!(
|
||||
"This key is now published with only non-identity information. (<a href=\"/about\" target=\"_blank\">What does this mean?</a>)"
|
||||
);
|
||||
t!("To make the key available for search by email address, you can verify it belongs to you:");
|
||||
t!("Verification Pending");
|
||||
t!("<strong>Note:</strong> Some providers delay emails for up to 15 minutes to prevent spam. Please be patient.");
|
||||
t!(
|
||||
"<strong>Note:</strong> Some providers delay emails for up to 15 minutes to prevent spam. Please be patient."
|
||||
);
|
||||
t!("Send Verification Email");
|
||||
t!("This key contains one identity that could not be parsed as an email address.<br /> This identity can't be published on <span class=\"brand\">keys.openpgp.org</span>. (<a href=\"/about/faq#non-email-uids\" target=\"_blank\">Why?</a>)");
|
||||
t!("This key contains {{ count_unparsed }} identities that could not be parsed as an email address.<br /> These identities can't be published on <span class=\"brand\">keys.openpgp.org</span>. (<a href=\"/about/faq#non-email-uids\" target=\"_blank\">Why?</a>)");
|
||||
t!("This key contains one revoked identity, which is not published. (<a href=\"/about/faq#revoked-uids\" target=\"_blank\">Why?</a>)");
|
||||
t!("This key contains {{ count_revoked }} revoked identities, which are not published. (<a href=\"/about/faq#revoked-uids\" target=\"_blank\">Why?</a>)");
|
||||
t!(
|
||||
"This key contains one identity that could not be parsed as an email address.<br /> This identity can't be published on <span class=\"brand\">keys.openpgp.org</span>. (<a href=\"/about/faq#non-email-uids\" target=\"_blank\">Why?</a>)"
|
||||
);
|
||||
t!(
|
||||
"This key contains {{ count_unparsed }} identities that could not be parsed as an email address.<br /> These identities can't be published on <span class=\"brand\">keys.openpgp.org</span>. (<a href=\"/about/faq#non-email-uids\" target=\"_blank\">Why?</a>)"
|
||||
);
|
||||
t!(
|
||||
"This key contains one revoked identity, which is not published. (<a href=\"/about/faq#revoked-uids\" target=\"_blank\">Why?</a>)"
|
||||
);
|
||||
t!(
|
||||
"This key contains {{ count_revoked }} revoked identities, which are not published. (<a href=\"/about/faq#revoked-uids\" target=\"_blank\">Why?</a>)"
|
||||
);
|
||||
t!("Your keys have been successfully uploaded:");
|
||||
t!("<strong>Note:</strong> To make keys searchable by email address, you must upload them individually.");
|
||||
t!(
|
||||
"<strong>Note:</strong> To make keys searchable by email address, you must upload them individually."
|
||||
);
|
||||
t!("Verifying your email address…");
|
||||
t!("If the process doesn't complete after a few seconds, please <input type=\"submit\" class=\"textbutton\" value=\"click here\" />.");
|
||||
t!(
|
||||
"If the process doesn't complete after a few seconds, please <input type=\"submit\" class=\"textbutton\" value=\"click here\" />."
|
||||
);
|
||||
|
||||
t!("Manage your key on {{domain}}");
|
||||
|
||||
t!("Hi,");
|
||||
t!("This is an automated message from <a href=\"{{base_uri}}\" style=\"text-decoration:none; color: #333\">{{domain}}</a>.");
|
||||
t!(
|
||||
"This is an automated message from <a href=\"{{base_uri}}\" style=\"text-decoration:none; color: #333\">{{domain}}</a>."
|
||||
);
|
||||
t!("If you didn't request this message, please ignore it.");
|
||||
t!("OpenPGP key: <tt>{{primary_fp}}</tt>");
|
||||
t!("To manage and delete listed addresses on this key, please follow the link below:");
|
||||
@@ -74,10 +118,14 @@ fn _dummy() {
|
||||
t!("Verify {{userid}} for your key on {{domain}}");
|
||||
|
||||
t!("Hi,");
|
||||
t!("This is an automated message from <a href=\"{{base_uri}}\" style=\"text-decoration:none; color: #333\">{{domain}}</a>.");
|
||||
t!(
|
||||
"This is an automated message from <a href=\"{{base_uri}}\" style=\"text-decoration:none; color: #333\">{{domain}}</a>."
|
||||
);
|
||||
t!("If you didn't request this message, please ignore it.");
|
||||
t!("OpenPGP key: <tt>{{primary_fp}}</tt>");
|
||||
t!("To let others find this key from your email address \"<a rel=\"nofollow\" href=\"#\" style=\"text-decoration:none; color: #333\">{{userid}}</a>\", please click the link below:");
|
||||
t!(
|
||||
"To let others find this key from your email address \"<a rel=\"nofollow\" href=\"#\" style=\"text-decoration:none; color: #333\">{{userid}}</a>\", please click the link below:"
|
||||
);
|
||||
t!("You can find more info at <a href=\"{{base_uri}}/about\">{{domain}}/about</a>.");
|
||||
t!("distributing OpenPGP keys since 2019");
|
||||
|
||||
@@ -85,7 +133,9 @@ fn _dummy() {
|
||||
t!("This is an automated message from {{domain}}.");
|
||||
t!("If you didn't request this message, please ignore it.");
|
||||
t!("OpenPGP key: {{primary_fp}}");
|
||||
t!("To let others find this key from your email address \"{{userid}}\",\nplease follow the link below:");
|
||||
t!(
|
||||
"To let others find this key from your email address \"{{userid}}\",\nplease follow the link below:"
|
||||
);
|
||||
t!("You can find more info at {{base_uri}}/about");
|
||||
t!("distributing OpenPGP keys since 2019");
|
||||
}
|
||||
|
@@ -14,11 +14,11 @@ impl I18NHelper {
|
||||
}
|
||||
|
||||
pub fn get_catalog(&self, lang: &str) -> &gettext::Catalog {
|
||||
let (_, ref catalog) = self
|
||||
let (_, catalog) = self
|
||||
.catalogs
|
||||
.iter()
|
||||
.find(|(candidate, _)| *candidate == lang)
|
||||
.unwrap_or_else(|| self.catalogs.get(0).unwrap());
|
||||
.unwrap_or_else(|| self.catalogs.first().unwrap());
|
||||
catalog
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ impl Output for StringOutput {
|
||||
impl HelperDef for I18NHelper {
|
||||
fn call<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
h: &Helper<'reg, 'rc>,
|
||||
h: &Helper<'rc>,
|
||||
reg: &'reg Handlebars,
|
||||
context: &'rc Context,
|
||||
rcx: &mut RenderContext<'reg, '_>,
|
||||
|
@@ -1,5 +1,5 @@
|
||||
use crate::database::Query;
|
||||
use gettext_macros::i18n;
|
||||
use hagrid_database::Query;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
pub fn describe_query_error(i18n: &I18n, q: &Query) -> String {
|
||||
|
18
src/initializers/db_service.rs
Normal file
18
src/initializers/db_service.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use crate::app::config::Configuration;
|
||||
use hagrid_database::Sqlite;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::Result<Sqlite> {
|
||||
let keys_internal_dir = config.keys_internal_dir.as_path();
|
||||
|
||||
Sqlite::new_file(
|
||||
Sqlite::db_file_path(keys_internal_dir),
|
||||
Sqlite::log_dir_path(keys_internal_dir),
|
||||
)
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
pub fn register(rocket: Rocket<Build>, _config: &Configuration, state: Sqlite) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
17
src/initializers/i18n.rs
Normal file
17
src/initializers/i18n.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use crate::app::config::Configuration;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(
|
||||
_config: &Configuration,
|
||||
) -> super::InfallibleResult<Vec<(&'static str, gettext::Catalog)>> {
|
||||
Ok(Some(crate::get_i18n()))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
state: Vec<(&'static str, gettext::Catalog)>,
|
||||
) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
23
src/initializers/localized_template_list.rs
Normal file
23
src/initializers/localized_template_list.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::template_helpers::TemplateOverrides;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::Result<TemplateOverrides> {
|
||||
let localized_template_list = TemplateOverrides::load(
|
||||
config.template_dir.as_path(),
|
||||
crate::app::config::DEFAULT_LOCALIZED_TEMPLATE_DIR_NAME,
|
||||
);
|
||||
|
||||
println!("{:?}", localized_template_list);
|
||||
|
||||
localized_template_list.map(Some)
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
state: TemplateOverrides,
|
||||
) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
30
src/initializers/mail_service.rs
Normal file
30
src/initializers/mail_service.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::mail;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::Result<mail::Service> {
|
||||
let Configuration {
|
||||
from,
|
||||
base_uri,
|
||||
email_template_dir: tmpl_dir,
|
||||
..
|
||||
} = config;
|
||||
|
||||
(if let Some(path) = config.filemail_into.as_deref() {
|
||||
mail::Service::filemail(from, base_uri, tmpl_dir, path)
|
||||
} else if let Some(true) = config.local_smtp {
|
||||
mail::Service::localsmtp(from, base_uri, tmpl_dir)
|
||||
} else {
|
||||
mail::Service::sendmail(from, base_uri, tmpl_dir)
|
||||
})
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
state: mail::Service,
|
||||
) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
16
src/initializers/maintenance_mode.rs
Normal file
16
src/initializers/maintenance_mode.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::web::maintenance::MaintenanceMode;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::InfallibleResult<MaintenanceMode> {
|
||||
Ok(Some(MaintenanceMode::new(config.maintenance_file.clone())))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
fairing: MaintenanceMode,
|
||||
) -> Rocket<Build> {
|
||||
rocket.attach(fairing)
|
||||
}
|
59
src/initializers/mod.rs
Normal file
59
src/initializers/mod.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
mod db_service;
|
||||
mod i18n;
|
||||
mod localized_template_list;
|
||||
mod mail_service;
|
||||
mod maintenance_mode;
|
||||
mod prometheus;
|
||||
mod rate_limiter;
|
||||
mod routes;
|
||||
pub mod state;
|
||||
mod stateful_token_service;
|
||||
mod stateless_token_service;
|
||||
mod template;
|
||||
|
||||
use crate::app::config::Configuration;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
// GOTCHA:
|
||||
// Initializer's `init` fn can fail and return error or initializer can be disabled e.g.,
|
||||
// by configuration in that case it has nothing to register in rocket i.e.,
|
||||
// `init` returns Ok(None)
|
||||
type Result<T, E = anyhow::Error> = anyhow::Result<Option<T>, E>;
|
||||
type InfallibleResult<T> = Result<T, std::convert::Infallible>;
|
||||
|
||||
macro_rules! initialize {
|
||||
( rocket => $rocket:ident, config => $config:ident; $( $module:ident ),+ $(,)? ) => {{
|
||||
let rocket = $rocket;
|
||||
let config = &$config;
|
||||
$(
|
||||
let rocket = if let Some(value) = $module::init(config)? {
|
||||
$module::register(rocket, config, value)
|
||||
} else {
|
||||
rocket
|
||||
};
|
||||
)+
|
||||
|
||||
rocket
|
||||
}};
|
||||
}
|
||||
|
||||
pub fn run(rocket: Rocket<Build>, config: &Configuration) -> anyhow::Result<Rocket<Build>> {
|
||||
Ok(initialize!(rocket => rocket, config => config;
|
||||
// If you add a new initializer module under `hagrid::initializers`
|
||||
// (e.g. `src/initializers/your_new_initializer_module.rs`), add its initializer
|
||||
// name to this list so it is actually executed.
|
||||
template,
|
||||
maintenance_mode,
|
||||
i18n,
|
||||
state,
|
||||
stateless_token_service,
|
||||
stateful_token_service,
|
||||
mail_service,
|
||||
db_service,
|
||||
rate_limiter,
|
||||
localized_template_list,
|
||||
routes,
|
||||
prometheus,
|
||||
))
|
||||
}
|
27
src/initializers/prometheus.rs
Normal file
27
src/initializers/prometheus.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::counters;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
use rocket_prometheus::PrometheusMetrics;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::InfallibleResult<PrometheusMetrics> {
|
||||
if !config.enable_prometheus {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let prometheus = PrometheusMetrics::new();
|
||||
|
||||
counters::register_counters(prometheus.registry());
|
||||
|
||||
Ok(Some(prometheus))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
prometheus_fairing_and_metrics_routes: PrometheusMetrics,
|
||||
) -> Rocket<Build> {
|
||||
rocket
|
||||
.attach(prometheus_fairing_and_metrics_routes.clone())
|
||||
.mount("/metrics", prometheus_fairing_and_metrics_routes)
|
||||
}
|
16
src/initializers/rate_limiter.rs
Normal file
16
src/initializers/rate_limiter.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::InfallibleResult<RateLimiter> {
|
||||
Ok(Some(RateLimiter::new(config.mail_rate_limit)))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
state: RateLimiter,
|
||||
) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
16
src/initializers/routes.rs
Normal file
16
src/initializers/routes.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::routes;
|
||||
use rocket::Rocket;
|
||||
use rocket::{Build, Route};
|
||||
|
||||
pub fn init(_config: &Configuration) -> super::InfallibleResult<Vec<Route>> {
|
||||
Ok(Some(routes::routes()))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
routes: Vec<Route>,
|
||||
) -> Rocket<Build> {
|
||||
rocket.mount("/", routes)
|
||||
}
|
20
src/initializers/state.rs
Normal file
20
src/initializers/state.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::app::state::ApplicationState;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::InfallibleResult<ApplicationState> {
|
||||
Ok(Some(ApplicationState {
|
||||
assets_dir: config.assets_dir.clone(),
|
||||
base_uri: config.base_uri.clone(),
|
||||
base_uri_onion: config.base_uri_onion().to_owned(),
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
state: ApplicationState,
|
||||
) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
16
src/initializers/stateful_token_service.rs
Normal file
16
src/initializers/stateful_token_service.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::app::config::Configuration;
|
||||
use hagrid_database::StatefulTokens;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::Result<StatefulTokens> {
|
||||
StatefulTokens::new(config.token_dir.as_path()).map(Some)
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
state: StatefulTokens,
|
||||
) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
15
src/initializers/stateless_token_service.rs
Normal file
15
src/initializers/stateless_token_service.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::tokens::Service;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
|
||||
pub fn init(config: &Configuration) -> super::InfallibleResult<Service> {
|
||||
Ok(Some(Service::init(
|
||||
&config.token_secret,
|
||||
config.token_validity,
|
||||
)))
|
||||
}
|
||||
|
||||
pub fn register(rocket: Rocket<Build>, _config: &Configuration, state: Service) -> Rocket<Build> {
|
||||
rocket.manage(state)
|
||||
}
|
24
src/initializers/template.rs
Normal file
24
src/initializers/template.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use crate::app::config::Configuration;
|
||||
use crate::i18n::I18NHelper;
|
||||
use rocket::Build;
|
||||
use rocket::Rocket;
|
||||
use rocket::fairing::Fairing;
|
||||
use rocket_dyn_templates::{Engines, Template};
|
||||
|
||||
pub fn init(_config: &Configuration) -> super::InfallibleResult<impl Fairing> {
|
||||
Ok(Some(Template::custom(|engines: &mut Engines| {
|
||||
let i18ns = crate::get_i18n();
|
||||
let i18n_helper = I18NHelper::new(i18ns);
|
||||
engines
|
||||
.handlebars
|
||||
.register_helper("text", Box::new(i18n_helper));
|
||||
})))
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
rocket: Rocket<Build>,
|
||||
_config: &Configuration,
|
||||
fairing: impl Fairing,
|
||||
) -> Rocket<Build> {
|
||||
rocket.attach(fairing)
|
||||
}
|
33
src/lib.rs
Normal file
33
src/lib.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
#![recursion_limit = "1024"]
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
gettext_macros::init_i18n!("hagrid", en, de, ja);
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
gettext_macros::init_i18n!(
|
||||
"hagrid", en, de, fr, it, ja, nb, pl, tr, zh_Hans, ko, nl, ru, ar, sv, es, ro
|
||||
);
|
||||
|
||||
mod anonymize_utils;
|
||||
pub mod app;
|
||||
mod counters;
|
||||
mod dump;
|
||||
mod gettext_strings;
|
||||
mod i18n;
|
||||
mod i18n_helpers;
|
||||
mod initializers;
|
||||
mod mail;
|
||||
mod rate_limiter;
|
||||
mod routes;
|
||||
mod sealed_state;
|
||||
mod template_helpers;
|
||||
mod tokens;
|
||||
mod web;
|
||||
|
||||
gettext_macros::compile_i18n!();
|
||||
|
||||
// The include_i18n macro must be called after compile_i18n, which must be called after i18n macros
|
||||
// *in compilation order*. We use a helper function here to make this order consistent.
|
||||
pub fn get_i18n() -> Vec<(&'static str, gettext::Catalog)> {
|
||||
gettext_macros::include_i18n!()
|
||||
}
|
62
src/mail.rs
62
src/mail.rs
@@ -1,10 +1,10 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::counters;
|
||||
use lettre::message::{header, Mailbox, MultiPart, SinglePart};
|
||||
use lettre::{FileTransport, SendmailTransport, Transport as LettreTransport};
|
||||
use anyhow::anyhow;
|
||||
use lettre::message::{Mailbox, MultiPart, SinglePart, header};
|
||||
use lettre::{FileTransport, SendmailTransport, SmtpTransport, Transport as LettreTransport};
|
||||
use rocket_dyn_templates::handlebars::Handlebars;
|
||||
use serde::Serialize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use uuid::Uuid;
|
||||
|
||||
use gettext_macros::i18n;
|
||||
@@ -12,10 +12,11 @@ use rocket_i18n::I18n;
|
||||
|
||||
use crate::template_helpers;
|
||||
|
||||
use crate::database::types::Email;
|
||||
use crate::Result;
|
||||
use hagrid_database::types::Email;
|
||||
|
||||
mod context {
|
||||
use serde_derive::Serialize;
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct Verification {
|
||||
pub lang: String,
|
||||
@@ -53,18 +54,29 @@ pub struct Service {
|
||||
}
|
||||
|
||||
enum Transport {
|
||||
LocalSmtp,
|
||||
Sendmail,
|
||||
Filemail(PathBuf),
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Sends mail via sendmail.
|
||||
pub fn sendmail(from: &str, base_uri: &str, template_dir: &Path) -> Result<Self> {
|
||||
pub fn sendmail(from: &str, base_uri: &str, template_dir: &Path) -> anyhow::Result<Self> {
|
||||
Self::new(from, base_uri, template_dir, Transport::Sendmail)
|
||||
}
|
||||
|
||||
/// Sends mail via local smtp server.
|
||||
pub fn localsmtp(from: &str, base_uri: &str, template_dir: &Path) -> anyhow::Result<Self> {
|
||||
Self::new(from, base_uri, template_dir, Transport::LocalSmtp)
|
||||
}
|
||||
|
||||
/// Sends mail by storing it in the given directory.
|
||||
pub fn filemail(from: &str, base_uri: &str, template_dir: &Path, path: &Path) -> Result<Self> {
|
||||
pub fn filemail(
|
||||
from: &str,
|
||||
base_uri: &str,
|
||||
template_dir: &Path,
|
||||
path: &Path,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::new(
|
||||
from,
|
||||
base_uri,
|
||||
@@ -73,7 +85,12 @@ impl Service {
|
||||
)
|
||||
}
|
||||
|
||||
fn new(from: &str, base_uri: &str, template_dir: &Path, transport: Transport) -> Result<Self> {
|
||||
fn new(
|
||||
from: &str,
|
||||
base_uri: &str,
|
||||
template_dir: &Path,
|
||||
transport: Transport,
|
||||
) -> anyhow::Result<Self> {
|
||||
let templates = template_helpers::load_handlebars(template_dir)?;
|
||||
let domain = url::Url::parse(base_uri)?
|
||||
.host_str()
|
||||
@@ -97,7 +114,7 @@ impl Service {
|
||||
tpk_name: String,
|
||||
userid: &Email,
|
||||
token: &str,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let ctx = context::Verification {
|
||||
lang: i18n.lang.to_string(),
|
||||
primary_fp: tpk_name,
|
||||
@@ -131,7 +148,7 @@ impl Service {
|
||||
tpk_name: String,
|
||||
recipient: &Email,
|
||||
link_path: &str,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let ctx = context::Manage {
|
||||
lang: i18n.lang.to_string(),
|
||||
primary_fp: tpk_name,
|
||||
@@ -162,7 +179,7 @@ impl Service {
|
||||
tpk_name: String,
|
||||
userid: &Email,
|
||||
token: &str,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let ctx = context::Welcome {
|
||||
lang: "en".to_owned(),
|
||||
primary_fp: tpk_name,
|
||||
@@ -187,7 +204,7 @@ impl Service {
|
||||
template: &str,
|
||||
locale: &str,
|
||||
ctx: impl Serialize,
|
||||
) -> Result<(String, String)> {
|
||||
) -> anyhow::Result<(String, String)> {
|
||||
let html = self
|
||||
.templates
|
||||
.render(&format!("{}/{}.htm", locale, template), &ctx)
|
||||
@@ -209,7 +226,7 @@ impl Service {
|
||||
template: &str,
|
||||
locale: &str,
|
||||
ctx: impl Serialize,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let (html, txt) = self.render_template(template, locale, ctx)?;
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
@@ -246,6 +263,10 @@ impl Service {
|
||||
)?;
|
||||
|
||||
match self.transport {
|
||||
Transport::LocalSmtp => {
|
||||
let transport = SmtpTransport::unencrypted_localhost();
|
||||
transport.send(&email)?;
|
||||
}
|
||||
Transport::Sendmail => {
|
||||
let transport = SendmailTransport::new();
|
||||
transport.send(&email)?;
|
||||
@@ -263,7 +284,7 @@ impl Service {
|
||||
/// Returns and removes the first mail it finds from the given
|
||||
/// directory.
|
||||
#[cfg(test)]
|
||||
pub fn pop_mail(dir: &Path) -> Result<Option<String>> {
|
||||
pub fn pop_mail(dir: &Path) -> anyhow::Result<Option<String>> {
|
||||
use std::{fs, fs::read_to_string};
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
@@ -279,11 +300,11 @@ pub fn pop_mail(dir: &Path) -> Result<Option<String>> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::web::get_i18n;
|
||||
use crate::get_i18n;
|
||||
|
||||
use super::*;
|
||||
use std::str::FromStr;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use tempfile::{TempDir, tempdir};
|
||||
|
||||
const BASEDIR: &str = "http://localhost/";
|
||||
const FROM: &str = "test@localhost";
|
||||
@@ -326,12 +347,7 @@ mod test {
|
||||
let headers: Vec<_> = mail_content
|
||||
.lines()
|
||||
.filter(|line| line.contains(": "))
|
||||
.map(|line| {
|
||||
let mut it = line.splitn(2, ": ");
|
||||
let h = it.next().unwrap();
|
||||
let v = it.next().unwrap();
|
||||
(h, v)
|
||||
})
|
||||
.map(|line| line.split_once(": ").unwrap())
|
||||
.collect();
|
||||
assert!(headers.contains(&("Content-Type", "text/plain; charset=utf-8")));
|
||||
assert!(headers.contains(&("Content-Type", "text/html; charset=utf-8")));
|
||||
|
43
src/main.rs
43
src/main.rs
@@ -1,42 +1,3 @@
|
||||
#![recursion_limit = "1024"]
|
||||
|
||||
#[macro_use]
|
||||
extern crate anyhow;
|
||||
use anyhow::Result;
|
||||
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate regex;
|
||||
|
||||
extern crate hagrid_database as database;
|
||||
|
||||
use gettext_macros::init_i18n;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
init_i18n!("hagrid", en, de, ja);
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
init_i18n!("hagrid", en, de, fr, it, ja, nb, pl, tr, zh_Hans, ko, nl, ru, ar, sv, es, ro);
|
||||
|
||||
mod anonymize_utils;
|
||||
mod counters;
|
||||
mod dump;
|
||||
mod gettext_strings;
|
||||
mod i18n;
|
||||
mod i18n_helpers;
|
||||
mod mail;
|
||||
mod rate_limiter;
|
||||
mod sealed_state;
|
||||
mod template_helpers;
|
||||
mod tokens;
|
||||
mod web;
|
||||
|
||||
#[launch]
|
||||
fn rocket() -> _ {
|
||||
web::serve().expect("Rocket config must succeed")
|
||||
fn main() {
|
||||
hagrid::app::run()
|
||||
}
|
||||
|
111
src/routes/about.rs
Normal file
111
src/routes/about.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
use rocket_codegen::get;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
#[get("/about")]
|
||||
pub fn about(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/about", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/about/news")]
|
||||
pub fn news(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/news", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/about/faq")]
|
||||
pub fn faq(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/faq", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/about/usage")]
|
||||
pub fn usage(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/usage", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/about/privacy")]
|
||||
pub fn privacy(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/privacy", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/about/api")]
|
||||
pub fn apidoc(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/api", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/about/stats")]
|
||||
pub fn stats(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("about/stats", i18n, origin)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use ::rocket::http::{ContentType, Header, Status};
|
||||
use ::rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod get_about {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/about";
|
||||
|
||||
#[rstest]
|
||||
fn landing_page_is_visible_with_translations(
|
||||
#[from(client)] (_tmpdir, client): (TempDir, Client),
|
||||
) {
|
||||
assert::response(
|
||||
client
|
||||
.get(URI)
|
||||
.header(Header::new("Accept-Language", "de"))
|
||||
.dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"Hagrid",
|
||||
);
|
||||
// TODO check translation
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn privacy_policy_is_visible(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"distribution and discovery",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod get_about_privacy {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/about/privacy";
|
||||
|
||||
#[rstest]
|
||||
fn privacy_policy_is_visible(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"Public Key Data",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod get_about_api {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/about/api";
|
||||
|
||||
#[rstest]
|
||||
fn api_docs_are_visible(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"/vks/v1/by-keyid",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
1
src/routes/api.rs
Normal file
1
src/routes/api.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod rest;
|
1
src/routes/api/rest/mod.rs
Normal file
1
src/routes/api/rest/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod vks;
|
173
src/routes/api/rest/vks.rs
Normal file
173
src/routes/api/rest/vks.rs
Normal file
@@ -0,0 +1,173 @@
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::web::vks_api::{JsonErrorResponse, JsonResult, json};
|
||||
use crate::web::{MyResponse, RequestOrigin, vks, vks_api};
|
||||
use crate::{mail, tokens, web};
|
||||
use hagrid_database::types::{Email, Fingerprint, KeyID};
|
||||
use hagrid_database::{Query, Sqlite, StatefulTokens};
|
||||
use rocket::http::Status;
|
||||
use rocket::serde::json::{Error as JsonError, Json};
|
||||
use rocket_codegen::{get, post};
|
||||
use rocket_i18n::{I18n, Translations};
|
||||
|
||||
#[post("/vks/v1/upload", format = "json", data = "<data>")]
|
||||
pub fn upload_json(
|
||||
db: &rocket::State<Sqlite>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
data: Result<Json<json::UploadRequest>, JsonError>,
|
||||
) -> JsonResult {
|
||||
let data = vks_api::json_or_error(data)?;
|
||||
|
||||
use crate::web::{vks, vks_api};
|
||||
use std::io::Cursor;
|
||||
|
||||
let data_reader = Cursor::new(data.keytext.as_bytes());
|
||||
let result = vks::process_key(db, &i18n, tokens_stateless, rate_limiter, data_reader);
|
||||
|
||||
vks_api::upload_ok_json(result)
|
||||
}
|
||||
|
||||
#[post("/vks/v1/upload", rank = 2)]
|
||||
pub fn upload_fallback(origin: RequestOrigin) -> JsonErrorResponse {
|
||||
let error_msg = format!(
|
||||
"expected application/json data. see {}/about/api for api docs.",
|
||||
origin.get_base_uri()
|
||||
);
|
||||
JsonErrorResponse(Status::BadRequest, error_msg)
|
||||
}
|
||||
|
||||
#[post("/vks/v1/request-verify", format = "json", data = "<data>")]
|
||||
pub fn request_verify_json(
|
||||
db: &rocket::State<Sqlite>,
|
||||
langs: &rocket::State<Translations>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
data: Result<Json<json::VerifyRequest>, JsonError>,
|
||||
) -> JsonResult {
|
||||
let data = vks_api::json_or_error(data)?;
|
||||
let json::VerifyRequest {
|
||||
token,
|
||||
addresses,
|
||||
locale,
|
||||
} = data.into_inner();
|
||||
let i18n = vks_api::get_locale(langs, locale.unwrap_or_default());
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
addresses,
|
||||
);
|
||||
vks_api::upload_ok_json(result)
|
||||
}
|
||||
|
||||
#[post("/vks/v1/request-verify", rank = 2)]
|
||||
pub fn request_verify_fallback(origin: RequestOrigin) -> JsonErrorResponse {
|
||||
let error_msg = format!(
|
||||
"expected application/json data. see {}/about/api for api docs.",
|
||||
origin.get_base_uri()
|
||||
);
|
||||
JsonErrorResponse(Status::BadRequest, error_msg)
|
||||
}
|
||||
|
||||
#[get("/vks/v1/by-fingerprint/<fpr>")]
|
||||
pub fn vks_v1_by_fingerprint(db: &rocket::State<Sqlite>, i18n: I18n, fpr: String) -> MyResponse {
|
||||
let query = match fpr.parse::<Fingerprint>() {
|
||||
Ok(fpr) => Query::ByFingerprint(fpr),
|
||||
Err(_) => return MyResponse::bad_request_plain("malformed fingerprint"),
|
||||
};
|
||||
|
||||
web::key_to_response_plain(db, i18n, query)
|
||||
}
|
||||
|
||||
#[get("/vks/v1/by-email/<email>")]
|
||||
pub fn vks_v1_by_email(db: &rocket::State<Sqlite>, i18n: I18n, email: String) -> MyResponse {
|
||||
let email = email.replace("%40", "@");
|
||||
let query = match email.parse::<Email>() {
|
||||
Ok(email) => Query::ByEmail(email),
|
||||
Err(_) => return MyResponse::bad_request_plain("malformed e-mail address"),
|
||||
};
|
||||
|
||||
web::key_to_response_plain(db, i18n, query)
|
||||
}
|
||||
|
||||
#[get("/vks/v1/by-keyid/<kid>")]
|
||||
pub fn vks_v1_by_keyid(db: &rocket::State<Sqlite>, i18n: I18n, kid: String) -> MyResponse {
|
||||
let query = match kid.parse::<KeyID>() {
|
||||
Ok(keyid) => Query::ByKeyID(keyid),
|
||||
Err(_) => return MyResponse::bad_request_plain("malformed key id"),
|
||||
};
|
||||
|
||||
web::key_to_response_plain(db, i18n, query)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use rocket::http::ContentType;
|
||||
use rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod post_vks_v1_upload {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/vks/v1/upload";
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
#[from(serialized_cert)] (_cert_name, serialized_cert): (&str, Vec<u8>),
|
||||
) {
|
||||
let request_closure = || {
|
||||
client
|
||||
.post(URI)
|
||||
.header(ContentType::JSON)
|
||||
.body(format!(
|
||||
r#"{{ "keytext": "{}" }}"#,
|
||||
base64::encode(&serialized_cert)
|
||||
))
|
||||
.dispatch()
|
||||
};
|
||||
|
||||
test::maintenance(request_closure, tmpdir, ContentType::JSON, maintenance_text);
|
||||
}
|
||||
}
|
||||
|
||||
mod post_vks_v1_request_verify_json {
|
||||
use super::*;
|
||||
use crate::web::tests::common;
|
||||
|
||||
const URI: &str = "/vks/v1/request-verify";
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
#[from(serialized_cert)] (cert_name, serialized_cert): (&str, Vec<u8>),
|
||||
) {
|
||||
let token = common::assert::vks_publish_json_get_token(&client, &serialized_cert);
|
||||
|
||||
let json = format!(r#"{{"token":"{}","addresses":["{}"]}}"#, token, cert_name);
|
||||
|
||||
let request_closure = || {
|
||||
client
|
||||
.post(URI)
|
||||
.header(ContentType::JSON)
|
||||
.body(json.as_bytes())
|
||||
.dispatch()
|
||||
};
|
||||
|
||||
test::maintenance(request_closure, tmpdir, ContentType::JSON, maintenance_text);
|
||||
}
|
||||
}
|
||||
}
|
9
src/routes/assets.rs
Normal file
9
src/routes/assets.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use crate::app::state::ApplicationState;
|
||||
use rocket::fs::NamedFile;
|
||||
use rocket_codegen::get;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[get("/assets/<file..>")]
|
||||
pub async fn files(file: PathBuf, state: &rocket::State<ApplicationState>) -> Option<NamedFile> {
|
||||
NamedFile::open(state.assets_dir.join(file)).await.ok()
|
||||
}
|
8
src/routes/atom.rs
Normal file
8
src/routes/atom.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
use rocket_codegen::get;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
#[get("/atom.xml")]
|
||||
pub fn news_atom(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::xml("atom", i18n, origin)
|
||||
}
|
@@ -1,15 +1,16 @@
|
||||
use std::io;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use rocket::get;
|
||||
use rocket_i18n::I18n;
|
||||
use std::io;
|
||||
|
||||
use crate::dump::{self, Kind};
|
||||
use crate::i18n_helpers::describe_query_error;
|
||||
use crate::web::MyResponse;
|
||||
|
||||
use crate::database::{Database, KeyDatabase, Query};
|
||||
use hagrid_database::{Database, Query, Sqlite};
|
||||
|
||||
#[get("/debug?<q>")]
|
||||
pub fn debug_info(db: &rocket::State<KeyDatabase>, i18n: I18n, q: String) -> MyResponse {
|
||||
pub fn debug_info(db: &rocket::State<Sqlite>, i18n: I18n, q: String) -> MyResponse {
|
||||
let query = match q.parse::<Query>() {
|
||||
Ok(query) => query,
|
||||
Err(_) => return MyResponse::bad_request_plain("bad request"),
|
29
src/routes/errors.rs
Normal file
29
src/routes/errors.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use crate::web::RequestOrigin;
|
||||
use crate::web::templates::Bare;
|
||||
use crate::web::templates::HagridLayout;
|
||||
use rocket::http::Status;
|
||||
use rocket::response::status::Custom;
|
||||
use rocket_codegen::get;
|
||||
use rocket_dyn_templates::Template;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
#[get("/errors/<code>/<template>")]
|
||||
pub fn errors(
|
||||
i18n: I18n,
|
||||
origin: RequestOrigin,
|
||||
code: u16,
|
||||
template: String,
|
||||
) -> Result<Custom<Template>, &'static str> {
|
||||
if !template
|
||||
.chars()
|
||||
.all(|x| x == '-' || char::is_ascii_alphabetic(&x))
|
||||
{
|
||||
return Err("bad request");
|
||||
}
|
||||
let status_code = Status::from_code(code).ok_or("bad request")?;
|
||||
let response_body = Template::render(
|
||||
format!("errors/{}-{}", code, template),
|
||||
HagridLayout::new(Bare { dummy: () }, i18n, origin),
|
||||
);
|
||||
Ok(Custom(status_code, response_body))
|
||||
}
|
33
src/routes/index.rs
Normal file
33
src/routes/index.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
use rocket_codegen::get;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
#[get("/")]
|
||||
pub fn root(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("index", i18n, origin)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use ::rocket::http::{ContentType, Status};
|
||||
use ::rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod get_root {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/";
|
||||
|
||||
#[rstest]
|
||||
fn landing_page_is_visible(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"Hagrid",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
29
src/routes/maintenance.rs
Normal file
29
src/routes/maintenance.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use crate::web::MyResponse;
|
||||
use crate::web::maintenance::JsonErrorMessage;
|
||||
use crate::web::maintenance::templates::MaintenanceMode;
|
||||
use crate::web::util::get_commit_sha;
|
||||
use rocket_codegen::get;
|
||||
use rocket_dyn_templates::Template;
|
||||
use rocket_i18n::I18n;
|
||||
use serde_json::json;
|
||||
|
||||
#[get("/maintenance/plain/<message>")]
|
||||
pub fn maintenance_error_plain(message: String) -> MyResponse {
|
||||
MyResponse::MaintenancePlain(message)
|
||||
}
|
||||
|
||||
#[get("/maintenance/json/<message>")]
|
||||
pub fn maintenance_error_json(message: String) -> MyResponse {
|
||||
MyResponse::MaintenanceJson(json!(JsonErrorMessage { message }))
|
||||
}
|
||||
|
||||
#[get("/maintenance/web/<message>")]
|
||||
pub fn maintenance_error_web(message: String, i18n: I18n) -> MyResponse {
|
||||
let ctx = MaintenanceMode {
|
||||
message,
|
||||
version: env!("VERGEN_SEMVER").to_string(),
|
||||
commit: get_commit_sha(),
|
||||
lang: i18n.lang.to_owned(),
|
||||
};
|
||||
MyResponse::Maintenance(Template::render("maintenance", ctx))
|
||||
}
|
192
src/routes/manage.rs
Normal file
192
src/routes/manage.rs
Normal file
@@ -0,0 +1,192 @@
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::web::manage::{StatelessVerifyToken, forms, templates};
|
||||
use crate::web::{MyResponse, RequestOrigin, manage};
|
||||
use crate::{mail, tokens};
|
||||
use anyhow::anyhow;
|
||||
use gettext_macros::i18n;
|
||||
use hagrid_database::types::{Email, Fingerprint};
|
||||
use hagrid_database::{Database, Query, Sqlite};
|
||||
use rocket::form::Form;
|
||||
use rocket::uri;
|
||||
use rocket_codegen::{get, post};
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
#[get("/manage")]
|
||||
pub fn vks_manage(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("manage/manage", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/manage/<token>")]
|
||||
pub fn vks_manage_key(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<Sqlite>,
|
||||
i18n: I18n,
|
||||
token: String,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
) -> MyResponse {
|
||||
if let Ok(StatelessVerifyToken { fpr }) = token_service.check(&token) {
|
||||
match db.lookup(&Query::ByFingerprint(fpr)) {
|
||||
Ok(Some(tpk)) => {
|
||||
let fp = Fingerprint::try_from(tpk.fingerprint()).unwrap();
|
||||
let mut emails: Vec<Email> = tpk
|
||||
.userids()
|
||||
.flat_map(|u| u.userid().to_string().parse::<Email>())
|
||||
.collect();
|
||||
emails.sort_unstable();
|
||||
emails.dedup();
|
||||
let uid_status = emails
|
||||
.into_iter()
|
||||
.map(|email| templates::ManageKeyUidStatus {
|
||||
address: email.to_string(),
|
||||
published: true,
|
||||
})
|
||||
.collect();
|
||||
let key_link = uri!(crate::routes::vks::search(q = fp.to_string())).to_string();
|
||||
let context = templates::ManageKey {
|
||||
key_fpr: fp.to_string(),
|
||||
key_link,
|
||||
uid_status,
|
||||
token,
|
||||
base_uri: origin.get_base_uri().to_owned(),
|
||||
};
|
||||
MyResponse::ok("manage/manage_key", context, i18n, origin)
|
||||
}
|
||||
Ok(None) => MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "This link is invalid or expired")),
|
||||
i18n,
|
||||
origin,
|
||||
),
|
||||
Err(e) => MyResponse::ise(e),
|
||||
}
|
||||
} else {
|
||||
MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "This link is invalid or expired")),
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/manage", data = "<request>")]
|
||||
pub fn vks_manage_post(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::ManageRequest>,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
) -> MyResponse {
|
||||
let email = match request.search_term.parse::<Email>() {
|
||||
Ok(email) => email,
|
||||
Err(_) => {
|
||||
return MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "Malformed address: {}"; request.search_term.as_str())),
|
||||
i18n,
|
||||
origin,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let tpk = match db.lookup(&Query::ByEmail(email.clone())) {
|
||||
Ok(Some(tpk)) => tpk,
|
||||
Ok(None) => {
|
||||
return MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "No key for address: {}"; request.search_term.as_str())),
|
||||
i18n,
|
||||
origin,
|
||||
);
|
||||
}
|
||||
Err(e) => return MyResponse::ise(e),
|
||||
};
|
||||
|
||||
let email_exists = tpk
|
||||
.userids()
|
||||
.flat_map(|binding| binding.userid().to_string().parse::<Email>())
|
||||
.any(|candidate| candidate == email);
|
||||
|
||||
if !email_exists {
|
||||
return MyResponse::ise(anyhow!("Internal error: address check failed!"));
|
||||
}
|
||||
|
||||
if !rate_limiter.action_perform(format!("manage-{}", &email)) {
|
||||
return MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(
|
||||
i18n.catalog,
|
||||
"A request has already been sent for this address recently."
|
||||
)),
|
||||
i18n,
|
||||
origin,
|
||||
);
|
||||
}
|
||||
|
||||
let fpr: Fingerprint = tpk.fingerprint().try_into().unwrap();
|
||||
let fpr_text = fpr.to_string();
|
||||
let token = token_service.create(&StatelessVerifyToken { fpr });
|
||||
let link_path = uri!(vks_manage_key(token)).to_string();
|
||||
|
||||
let base_uri = origin.get_base_uri();
|
||||
if let Err(e) = mail_service.send_manage_token(&i18n, base_uri, fpr_text, &email, &link_path) {
|
||||
return MyResponse::ise(e);
|
||||
}
|
||||
|
||||
let ctx = templates::ManageLinkSent {
|
||||
address: email.to_string(),
|
||||
};
|
||||
MyResponse::ok("manage/manage_link_sent", ctx, i18n, origin)
|
||||
}
|
||||
|
||||
#[post("/manage/unpublish", data = "<request>")]
|
||||
pub fn vks_manage_unpublish(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<Sqlite>,
|
||||
i18n: I18n,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
request: Form<forms::ManageDelete>,
|
||||
) -> MyResponse {
|
||||
manage::vks_manage_unpublish_or_fail(origin, db, token_service, i18n, request)
|
||||
.unwrap_or_else(MyResponse::ise)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use ::rocket::http::{ContentType, Status};
|
||||
use ::rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod get_manage {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/manage";
|
||||
|
||||
#[rstest]
|
||||
fn delete_form_is_visible(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"any verified email address",
|
||||
);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
) {
|
||||
test::maintenance(
|
||||
|| client.get(URI).dispatch(),
|
||||
tmpdir,
|
||||
ContentType::HTML,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
364
src/routes/mod.rs
Normal file
364
src/routes/mod.rs
Normal file
@@ -0,0 +1,364 @@
|
||||
mod about;
|
||||
mod api;
|
||||
mod assets;
|
||||
mod atom;
|
||||
mod debug;
|
||||
mod errors;
|
||||
mod index;
|
||||
pub mod maintenance;
|
||||
pub mod manage;
|
||||
mod pks;
|
||||
pub mod vks;
|
||||
mod wkd;
|
||||
|
||||
use rocket::{Route, routes};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![
|
||||
// infra
|
||||
index::root,
|
||||
about::about,
|
||||
about::news,
|
||||
atom::news_atom,
|
||||
about::privacy,
|
||||
about::apidoc,
|
||||
about::faq,
|
||||
about::usage,
|
||||
assets::files,
|
||||
about::stats,
|
||||
errors::errors,
|
||||
// VKSv1
|
||||
api::rest::vks::vks_v1_by_email,
|
||||
api::rest::vks::vks_v1_by_fingerprint,
|
||||
api::rest::vks::vks_v1_by_keyid,
|
||||
api::rest::vks::upload_json,
|
||||
api::rest::vks::upload_fallback,
|
||||
api::rest::vks::request_verify_json,
|
||||
api::rest::vks::request_verify_fallback,
|
||||
// User interaction.
|
||||
vks::search,
|
||||
vks::upload,
|
||||
vks::upload_post_form,
|
||||
vks::upload_post_form_data,
|
||||
vks::request_verify_form,
|
||||
vks::request_verify_form_data,
|
||||
vks::verify_confirm,
|
||||
vks::verify_confirm_form,
|
||||
vks::quick_upload,
|
||||
vks::quick_upload_proceed,
|
||||
// Debug
|
||||
debug::debug_info,
|
||||
// HKP
|
||||
pks::pks_lookup,
|
||||
pks::pks_add_form,
|
||||
pks::pks_add_form_data,
|
||||
pks::pks_internal_index,
|
||||
// WKD
|
||||
wkd::wkd_policy,
|
||||
wkd::wkd_query,
|
||||
// Manage
|
||||
manage::vks_manage,
|
||||
manage::vks_manage_key,
|
||||
manage::vks_manage_post,
|
||||
manage::vks_manage_unpublish,
|
||||
// Maintenance error page
|
||||
maintenance::maintenance_error_web,
|
||||
maintenance::maintenance_error_json,
|
||||
maintenance::maintenance_error_plain,
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use ::rocket::local::blocking::Client;
|
||||
use common::*;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Contains reusable code which can be shared across different tests, like response assertions,
|
||||
/// fixtures to set up the test environment and particular test implementations
|
||||
/// which can be seeded with different values, but the implementation scheme remains the same.
|
||||
pub mod common {
|
||||
use crate::app::configure_rocket;
|
||||
use ::rocket::http::ContentType;
|
||||
use ::rocket::local::blocking::Client;
|
||||
use rstest::fixture;
|
||||
use sequoia_openpgp::Cert;
|
||||
use sequoia_openpgp::cert::CertBuilder;
|
||||
use sequoia_openpgp::serialize::Serialize;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::{TempDir, tempdir};
|
||||
|
||||
/// Fake base URI to use in tests.
|
||||
#[fixture]
|
||||
pub fn base_uri() -> &'static str {
|
||||
"http://local.connection"
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn base_uri_onion() -> &'static str {
|
||||
"http://local.connection.onion"
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn cert_name() -> &'static str {
|
||||
"foo@invalid.example.com"
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn alt_cert_name() -> &'static str {
|
||||
"bar@invalid.example.com"
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn cert<'a>(cert_name: &'a str) -> (&'a str, Cert) {
|
||||
let (tpk, _) = CertBuilder::new()
|
||||
.add_signing_subkey()
|
||||
.add_transport_encryption_subkey()
|
||||
.add_userid(cert_name)
|
||||
.generate()
|
||||
.unwrap();
|
||||
|
||||
(cert_name, tpk)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn serialized_cert<'a>(
|
||||
#[from(cert)] (cert_name, tpk): (&'a str, Cert),
|
||||
) -> (&'a str, Vec<u8>) {
|
||||
let mut tpk_serialized = Vec::new();
|
||||
tpk.serialize(&mut tpk_serialized).unwrap();
|
||||
|
||||
(cert_name, tpk_serialized)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn key_multipart_form_data(
|
||||
#[from(serialized_cert)] (_cert_name, serialized_cert): (&str, Vec<u8>),
|
||||
) -> (ContentType, Vec<u8>) {
|
||||
let ct = ContentType::new("multipart", "form-data").with_params((
|
||||
"boundary",
|
||||
"---------------------------14733842173518794281682249499",
|
||||
));
|
||||
|
||||
let header = b"-----------------------------14733842173518794281682249499\r\n\
|
||||
Content-Disposition: form-data; name=\"csrf\"\r\n\
|
||||
\r\n\
|
||||
\r\n\
|
||||
-----------------------------14733842173518794281682249499\r\n\
|
||||
Content-Disposition: form-data; name=\"keytext\"; filename=\".k\"\r\n\
|
||||
Content-Type: application/octet-stream\r\n\
|
||||
\r\n";
|
||||
let footer = b"\r\n-----------------------------14733842173518794281682249499--";
|
||||
|
||||
let mut body = Vec::new();
|
||||
body.extend_from_slice(header);
|
||||
body.extend_from_slice(&serialized_cert);
|
||||
body.extend_from_slice(footer);
|
||||
|
||||
(ct, body)
|
||||
}
|
||||
|
||||
/// Creates a configuration and empty state dir for testing purposes.
|
||||
///
|
||||
/// Note that you need to keep the returned TempDir alive for the
|
||||
/// duration of your test. To debug the test, mem::forget it to
|
||||
/// prevent cleanup.
|
||||
#[fixture]
|
||||
pub fn configuration(
|
||||
base_uri: &str,
|
||||
base_uri_onion: &str,
|
||||
) -> (TempDir, ::rocket::figment::Figment) {
|
||||
let root = tempdir().expect("valid temporary directory for configuration");
|
||||
|
||||
let filemail = root.path().join("filemail");
|
||||
::std::fs::create_dir_all(&filemail).expect("valid temporary directory for filemail");
|
||||
|
||||
let base_dir: PathBuf = root.path().into();
|
||||
|
||||
let config = ::rocket::Config::figment()
|
||||
.select("staging")
|
||||
.merge(("root", root.path()))
|
||||
.merge((
|
||||
"template_dir",
|
||||
::std::env::current_dir()
|
||||
.unwrap()
|
||||
.join("dist/templates")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
))
|
||||
.merge((
|
||||
"email_template_dir",
|
||||
::std::env::current_dir()
|
||||
.unwrap()
|
||||
.join("dist/email-templates")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
))
|
||||
.merge((
|
||||
"assets_dir",
|
||||
::std::env::current_dir()
|
||||
.unwrap()
|
||||
.join("dist/assets")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
))
|
||||
.merge((
|
||||
"keys_internal_dir",
|
||||
base_dir.join("keys_internal").to_str().unwrap(),
|
||||
))
|
||||
.merge((
|
||||
"keys_external_dir",
|
||||
base_dir.join("keys_external").to_str().unwrap(),
|
||||
))
|
||||
.merge(("tmp_dir", base_dir.join("tmp").to_str().unwrap()))
|
||||
.merge(("token_dir", base_dir.join("tokens").to_str().unwrap()))
|
||||
.merge((
|
||||
"maintenance_file",
|
||||
base_dir.join("maintenance").to_str().unwrap(),
|
||||
))
|
||||
.merge(("base-URI", base_uri))
|
||||
.merge(("base-URI-Onion", base_uri_onion))
|
||||
.merge(("from", "from@example.com"))
|
||||
.merge(("token_secret", "hagrid"))
|
||||
.merge(("token_validity", 3600u64))
|
||||
.merge((
|
||||
"filemail_into",
|
||||
filemail
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.expect("path is valid UTF8"),
|
||||
));
|
||||
|
||||
(root, config)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn rocket(
|
||||
#[from(configuration)] (tmpdir, config): (TempDir, ::rocket::figment::Figment),
|
||||
) -> (TempDir, ::rocket::Rocket<::rocket::Build>) {
|
||||
(tmpdir, configure_rocket(::rocket::custom(config)))
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn client(
|
||||
#[from(rocket)] (tmpdir, rocket): (TempDir, ::rocket::Rocket<::rocket::Build>),
|
||||
) -> (TempDir, Client) {
|
||||
let client = Client::untracked(rocket).expect("valid rocket instance");
|
||||
|
||||
(tmpdir, client)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn maintenance_text() -> &'static str {
|
||||
"maintenance-message"
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
pub fn token() -> &'static str {
|
||||
"token-pseudo-random"
|
||||
}
|
||||
|
||||
/// Contains reusable assertion helpers.
|
||||
pub mod assert {
|
||||
use ::rocket::http::{ContentType, Status};
|
||||
use ::rocket::local::blocking::LocalResponse;
|
||||
use hagrid_database::{Database, Sqlite};
|
||||
|
||||
pub fn consistency(rocket: &::rocket::Rocket<::rocket::Orbit>) {
|
||||
let db = rocket.state::<Sqlite>().unwrap();
|
||||
db.check_consistency().unwrap();
|
||||
}
|
||||
|
||||
pub fn response(
|
||||
response: LocalResponse,
|
||||
status: Status,
|
||||
content_type: ContentType,
|
||||
present_page_text: &str,
|
||||
) {
|
||||
assert_eq!(response.status(), status);
|
||||
assert_eq!(response.content_type(), Some(content_type));
|
||||
|
||||
let body = response.into_string().unwrap();
|
||||
println!("{body}");
|
||||
assert!(body.contains(present_page_text));
|
||||
}
|
||||
|
||||
pub fn response_does_not_contain_text(
|
||||
response: LocalResponse,
|
||||
status: Status,
|
||||
content_type: ContentType,
|
||||
absent_page_text: &str,
|
||||
) {
|
||||
assert_eq!(response.status(), status);
|
||||
assert_eq!(response.content_type(), Some(content_type));
|
||||
|
||||
let body = response.into_string().unwrap();
|
||||
println!("{body}");
|
||||
assert!(!body.contains(absent_page_text));
|
||||
}
|
||||
|
||||
pub mod maintenance {
|
||||
use super::*;
|
||||
|
||||
pub fn response(
|
||||
response: LocalResponse,
|
||||
content_type: ContentType,
|
||||
maintenance_text: &str,
|
||||
) {
|
||||
super::response(
|
||||
response,
|
||||
Status::ServiceUnavailable,
|
||||
content_type,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains reusable test implementations.
|
||||
pub mod test {
|
||||
use super::*;
|
||||
use ::rocket::http::{ContentType, Status};
|
||||
use ::rocket::local::blocking::LocalResponse;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// NOTE: As dispatch_request closure called twice data referenced by closure has to be
|
||||
/// cloned instead of moved into.
|
||||
pub fn maintenance<'c>(
|
||||
dispatch_request: impl Fn() -> LocalResponse<'c>,
|
||||
tmpdir: TempDir,
|
||||
content_type: ContentType,
|
||||
maintenance_text: &str,
|
||||
) {
|
||||
let maintenance_path = tmpdir.path().join("maintenance");
|
||||
let mut file = File::create(&maintenance_path).unwrap();
|
||||
file.write_all(maintenance_text.as_bytes()).unwrap();
|
||||
|
||||
// Check that endpoint return a maintenance message
|
||||
assert::maintenance::response(
|
||||
dispatch_request(),
|
||||
content_type.clone(),
|
||||
maintenance_text,
|
||||
);
|
||||
|
||||
fs::remove_file(&maintenance_path).unwrap();
|
||||
|
||||
assert::response_does_not_contain_text(
|
||||
dispatch_request(),
|
||||
Status::Ok,
|
||||
content_type,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn basic_consistency(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::consistency(client.rocket());
|
||||
}
|
||||
}
|
182
src/routes/pks.rs
Normal file
182
src/routes/pks.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::web::hkp::Hkp;
|
||||
use crate::web::vks::response::UploadResponse;
|
||||
use crate::web::{MyResponse, RequestOrigin, hkp, vks_web};
|
||||
use crate::{mail, tokens, web};
|
||||
use hagrid_database::{Query, Sqlite};
|
||||
use rocket::Data;
|
||||
use rocket::http::ContentType;
|
||||
use rocket_codegen::{get, post};
|
||||
use rocket_i18n::I18n;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[post("/pks/add", format = "multipart/form-data", data = "<data>")]
|
||||
pub async fn pks_add_form_data(
|
||||
db: &rocket::State<Sqlite>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
cont_type: &ContentType,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match vks_web::process_post_form_data(db, tokens_stateless, rate_limiter, i18n, cont_type, data)
|
||||
.await
|
||||
{
|
||||
Ok(_) => MyResponse::plain("Ok".into()),
|
||||
Err(err) => MyResponse::ise(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/pks/add",
|
||||
format = "application/x-www-form-urlencoded",
|
||||
data = "<data>"
|
||||
)]
|
||||
pub async fn pks_add_form(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<Sqlite>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
i18n: I18n,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match vks_web::process_post_form(db, tokens_stateless, rate_limiter, &i18n, data).await {
|
||||
Ok(UploadResponse::Ok {
|
||||
is_new_key,
|
||||
key_fpr,
|
||||
primary_uid,
|
||||
token,
|
||||
status,
|
||||
..
|
||||
}) => {
|
||||
let msg = hkp::pks_add_ok(
|
||||
&origin,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
token,
|
||||
status,
|
||||
is_new_key,
|
||||
key_fpr,
|
||||
primary_uid,
|
||||
);
|
||||
MyResponse::plain(msg)
|
||||
}
|
||||
Ok(_) => {
|
||||
let msg = format!(
|
||||
"Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload",
|
||||
baseuri = origin.get_base_uri()
|
||||
);
|
||||
MyResponse::plain(msg)
|
||||
}
|
||||
Err(err) => MyResponse::ise(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/pks/lookup?<op>&<search>")]
|
||||
pub fn pks_lookup(
|
||||
db: &rocket::State<Sqlite>,
|
||||
i18n: I18n,
|
||||
op: Option<String>,
|
||||
search: Option<String>,
|
||||
) -> MyResponse {
|
||||
let search = search.unwrap_or_default();
|
||||
let key = match Hkp::from_str(&search) {
|
||||
Ok(key) => key,
|
||||
Err(_) => return MyResponse::bad_request_plain("Invalid search query!"),
|
||||
};
|
||||
let query = match key {
|
||||
Hkp::Fingerprint { fpr } => Query::ByFingerprint(fpr),
|
||||
Hkp::KeyID { keyid } => Query::ByKeyID(keyid),
|
||||
Hkp::Email { email } => Query::ByEmail(email),
|
||||
Hkp::ShortKeyID { query: _, .. } => {
|
||||
return MyResponse::bad_request_plain(
|
||||
"Search by short key ids is not supported, sorry!",
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(op) = op {
|
||||
match op.as_str() {
|
||||
"index" => hkp::key_to_hkp_index(db, i18n, query),
|
||||
"get" => web::key_to_response_plain(db, i18n, query),
|
||||
"vindex" => MyResponse::not_implemented_plain("vindex not implemented"),
|
||||
s if s.starts_with("x-") => {
|
||||
MyResponse::not_implemented_plain("x-* operations not implemented")
|
||||
}
|
||||
&_ => MyResponse::bad_request_plain("Invalid op parameter!"),
|
||||
}
|
||||
} else {
|
||||
MyResponse::bad_request_plain("op parameter required!")
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/pks/internal/index/<query_string>")]
|
||||
pub fn pks_internal_index(
|
||||
db: &rocket::State<Sqlite>,
|
||||
i18n: I18n,
|
||||
query_string: String,
|
||||
) -> MyResponse {
|
||||
match query_string.parse() {
|
||||
Ok(query) => hkp::key_to_hkp_index(db, i18n, query),
|
||||
Err(_) => MyResponse::bad_request_plain("Invalid search query!"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use rocket::http::{ContentType, Status};
|
||||
use rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod get_pks_lookup {
|
||||
use super::*;
|
||||
|
||||
#[rstest]
|
||||
fn not_supported_search_query(
|
||||
#[from(client)] (_tmpdir, client): (TempDir, Client),
|
||||
#[values(
|
||||
"/pks/lookup?op=get&search=0x1234abcd",
|
||||
"/pks/lookup?op=get&search=1234abcd"
|
||||
)]
|
||||
uri: &str,
|
||||
) {
|
||||
assert::response(
|
||||
client.get(uri).dispatch(),
|
||||
Status::BadRequest,
|
||||
ContentType::HTML,
|
||||
"not supported",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod post_pks_add_multipart_form_data {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/pks/add";
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
#[from(key_multipart_form_data)] (content_type, key_body): (ContentType, Vec<u8>),
|
||||
) {
|
||||
let request_closure = || {
|
||||
client
|
||||
.post(URI)
|
||||
.header(content_type.to_owned())
|
||||
.body(&key_body)
|
||||
.dispatch()
|
||||
};
|
||||
|
||||
test::maintenance(
|
||||
request_closure,
|
||||
tmpdir,
|
||||
ContentType::Plain,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
328
src/routes/vks.rs
Normal file
328
src/routes/vks.rs
Normal file
@@ -0,0 +1,328 @@
|
||||
// GOTCHA:
|
||||
// This module is kinda special as it contains a number of various handlers
|
||||
// which related to WebUI of Verifying Keys Service (hence VKS),
|
||||
// but their URLs DO NOT start with /vks.
|
||||
// Otherwise, these handlers would be scattered all over the other modules.
|
||||
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::web::vks::response::PublishResponse;
|
||||
use crate::web::vks_web::{UPLOAD_LIMIT, forms, template};
|
||||
use crate::web::{MyResponse, RequestOrigin, vks, vks_web};
|
||||
use crate::{mail, tokens};
|
||||
use anyhow::anyhow;
|
||||
use gettext_macros::i18n;
|
||||
use hagrid_database::{Query, Sqlite, StatefulTokens};
|
||||
use rocket::Data;
|
||||
use rocket::form::Form;
|
||||
use rocket::http::ContentType;
|
||||
use rocket::uri;
|
||||
use rocket_codegen::{get, post, put};
|
||||
use rocket_i18n::I18n;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[get("/upload")]
|
||||
pub fn upload(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("upload/upload", i18n, origin)
|
||||
}
|
||||
|
||||
#[post("/upload/submit", format = "multipart/form-data", data = "<data>")]
|
||||
pub async fn upload_post_form_data(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
cont_type: &ContentType,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match vks_web::process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type).await
|
||||
{
|
||||
Ok(response) => MyResponse::upload_response(response, i18n, origin),
|
||||
Err(err) => MyResponse::bad_request("upload/upload", err, i18n, origin),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/search?<q>")]
|
||||
pub fn search(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
i18n: I18n,
|
||||
q: String,
|
||||
) -> MyResponse {
|
||||
match q.parse::<Query>() {
|
||||
Ok(query) => vks_web::key_to_response(db, origin, i18n, q, query),
|
||||
Err(e) => MyResponse::bad_request("index", e, i18n, origin),
|
||||
}
|
||||
}
|
||||
|
||||
#[put("/", data = "<data>")]
|
||||
pub async fn quick_upload(
|
||||
db: &rocket::State<Sqlite>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
origin: RequestOrigin,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
let buf = match data.open(UPLOAD_LIMIT).into_bytes().await {
|
||||
Ok(buf) => buf.into_inner(),
|
||||
Err(error) => return MyResponse::bad_request("400-plain", anyhow!(error), i18n, origin),
|
||||
};
|
||||
|
||||
MyResponse::upload_response_quick(
|
||||
vks::process_key(db, &i18n, tokens_stateless, rate_limiter, Cursor::new(buf)),
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
|
||||
#[get("/upload/<token>", rank = 2)]
|
||||
pub fn quick_upload_proceed(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
token: String,
|
||||
) -> MyResponse {
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
vec![],
|
||||
);
|
||||
MyResponse::upload_response(result, i18n, origin)
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/upload/submit",
|
||||
format = "application/x-www-form-urlencoded",
|
||||
data = "<data>"
|
||||
)]
|
||||
pub async fn upload_post_form(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match vks_web::process_post_form(db, tokens_stateless, rate_limiter, &i18n, data).await {
|
||||
Ok(response) => MyResponse::upload_response(response, i18n, origin),
|
||||
Err(err) => MyResponse::bad_request("upload/upload", err, i18n, origin),
|
||||
}
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/upload/request-verify",
|
||||
format = "application/x-www-form-urlencoded",
|
||||
data = "<request>"
|
||||
)]
|
||||
pub fn request_verify_form(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::VerifyRequest>,
|
||||
) -> MyResponse {
|
||||
let forms::VerifyRequest { token, address } = request.into_inner();
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
vec![address],
|
||||
);
|
||||
MyResponse::upload_response(result, i18n, origin)
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/upload/request-verify",
|
||||
format = "multipart/form-data",
|
||||
data = "<request>"
|
||||
)]
|
||||
pub fn request_verify_form_data(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::VerifyRequest>,
|
||||
) -> MyResponse {
|
||||
let forms::VerifyRequest { token, address } = request.into_inner();
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
vec![address],
|
||||
);
|
||||
MyResponse::upload_response(result, i18n, origin)
|
||||
}
|
||||
|
||||
#[post("/verify/<token>")]
|
||||
pub fn verify_confirm(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
token_service: &rocket::State<StatefulTokens>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
token: String,
|
||||
) -> MyResponse {
|
||||
let rate_limit_id = format!("verify-token-{}", &token);
|
||||
match vks::verify_confirm(db, &i18n, token_service, token) {
|
||||
PublishResponse::Ok { fingerprint, email } => {
|
||||
rate_limiter.action_perform(rate_limit_id);
|
||||
let userid_link = uri!(search(q = &email)).to_string();
|
||||
let context = template::Verify {
|
||||
userid: email,
|
||||
key_fpr: fingerprint,
|
||||
userid_link,
|
||||
};
|
||||
|
||||
MyResponse::ok("upload/publish-result", context, i18n, origin)
|
||||
}
|
||||
PublishResponse::Error(error) => {
|
||||
let error_msg = if rate_limiter.action_check(rate_limit_id) {
|
||||
anyhow!(error)
|
||||
} else {
|
||||
anyhow!(i18n!(
|
||||
i18n.catalog,
|
||||
"This address has already been verified."
|
||||
))
|
||||
};
|
||||
MyResponse::bad_request("400", error_msg, i18n, origin)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/verify/<token>")]
|
||||
pub fn verify_confirm_form(origin: RequestOrigin, i18n: I18n, token: String) -> MyResponse {
|
||||
MyResponse::ok(
|
||||
"upload/verification-form",
|
||||
template::VerifyForm { token },
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use ::rocket::http::{ContentType, Status};
|
||||
use ::rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod get_upload {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/upload";
|
||||
|
||||
#[rstest]
|
||||
fn upload_form_is_visible(#[from(client)] (_tmpdir, client): (TempDir, Client)) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::HTML,
|
||||
"upload",
|
||||
);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
) {
|
||||
test::maintenance(
|
||||
|| client.get(URI).dispatch(),
|
||||
tmpdir,
|
||||
ContentType::HTML,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod get_search {
|
||||
use super::*;
|
||||
|
||||
#[rstest]
|
||||
fn not_supported_search_query(
|
||||
#[from(client)] (_tmpdir, client): (TempDir, Client),
|
||||
#[values("/search?q=0x1234abcd", "/search?q=1234abcd")] uri: &str,
|
||||
) {
|
||||
assert::response(
|
||||
client.get(uri).dispatch(),
|
||||
Status::BadRequest,
|
||||
ContentType::HTML,
|
||||
"not supported",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod get_verify_token {
|
||||
use super::*;
|
||||
|
||||
fn uri(token: &str) -> String {
|
||||
const URI: &str = "/verify";
|
||||
|
||||
[URI, token].join("/")
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
token: &str,
|
||||
) {
|
||||
test::maintenance(
|
||||
|| client.get(uri(token)).dispatch(),
|
||||
tmpdir,
|
||||
ContentType::HTML,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod put_root {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/";
|
||||
|
||||
#[rstest]
|
||||
fn maintenance(
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
maintenance_text: &str,
|
||||
#[from(serialized_cert)] (_cert_name, serialized_cert): (&str, Vec<u8>),
|
||||
) {
|
||||
let request_closure = || client.put(URI).body(&serialized_cert).dispatch();
|
||||
|
||||
test::maintenance(
|
||||
request_closure,
|
||||
tmpdir,
|
||||
ContentType::Plain,
|
||||
maintenance_text,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
46
src/routes/wkd.rs
Normal file
46
src/routes/wkd.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use crate::web::MyResponse;
|
||||
use hagrid_database::{Database, Sqlite};
|
||||
use rocket::get;
|
||||
|
||||
// WKD queries
|
||||
#[get("/.well-known/openpgpkey/<domain>/hu/<wkd_hash>")]
|
||||
pub fn wkd_query(db: &rocket::State<Sqlite>, domain: String, wkd_hash: String) -> MyResponse {
|
||||
match db.by_domain_and_hash_wkd(&domain, &wkd_hash) {
|
||||
Some(key) => MyResponse::wkd(key, &wkd_hash),
|
||||
None => MyResponse::not_found_plain("No key found for this email address."),
|
||||
}
|
||||
}
|
||||
|
||||
// Policy requests.
|
||||
// 200 response with an empty body.
|
||||
#[get("/.well-known/openpgpkey/<_domain>/policy")]
|
||||
pub fn wkd_policy(_domain: String) -> MyResponse {
|
||||
MyResponse::plain("".to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::routes::tests::common::*;
|
||||
use rocket::http::{ContentType, Status};
|
||||
use rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod get_wkd_policy {
|
||||
use super::*;
|
||||
|
||||
const URI: &str = "/.well-known/openpgpkey/example.org/policy";
|
||||
|
||||
#[rstest]
|
||||
fn wkd_policy_respond_successfully_with_empty_body(
|
||||
#[from(client)] (_tmpdir, client): (TempDir, Client),
|
||||
) {
|
||||
assert::response(
|
||||
client.get(URI).dispatch(),
|
||||
Status::Ok,
|
||||
ContentType::Plain,
|
||||
"",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,64 +1,50 @@
|
||||
use ring::aead::{open_in_place, seal_in_place, Algorithm, AES_256_GCM};
|
||||
use ring::aead::{OpeningKey, SealingKey};
|
||||
use ring::digest;
|
||||
use ring::hmac;
|
||||
use ring::rand::{SecureRandom, SystemRandom};
|
||||
use aes_gcm::{
|
||||
AeadCore, Aes256Gcm, Key, KeyInit, Nonce,
|
||||
aead::{Aead, OsRng},
|
||||
};
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
// Keep these in sync, and keep the key len synced with the `private` docs as
|
||||
// well as the `KEYS_INFO` const in secure::Key.
|
||||
static ALGO: &Algorithm = &AES_256_GCM;
|
||||
const NONCE_LEN: usize = 12;
|
||||
|
||||
pub struct SealedState {
|
||||
sealing_key: SealingKey,
|
||||
opening_key: OpeningKey,
|
||||
cipher: Aes256Gcm,
|
||||
}
|
||||
|
||||
impl SealedState {
|
||||
pub fn new(secret: &str) -> Self {
|
||||
let salt = hmac::SigningKey::new(&digest::SHA256, b"hagrid");
|
||||
let mut key = vec![0; 32];
|
||||
ring::hkdf::extract_and_expand(&salt, secret.as_bytes(), b"", &mut key);
|
||||
let mut hash = Sha256::new();
|
||||
hash.update(b"hagrid");
|
||||
hash.update(secret);
|
||||
let hashed_secret = hash.finalize();
|
||||
let key = Key::<Aes256Gcm>::from_slice(&hashed_secret);
|
||||
let cipher = Aes256Gcm::new(key);
|
||||
|
||||
let sealing_key = SealingKey::new(ALGO, key.as_ref()).expect("sealing key creation");
|
||||
let opening_key = OpeningKey::new(ALGO, key.as_ref()).expect("sealing key creation");
|
||||
|
||||
SealedState {
|
||||
sealing_key,
|
||||
opening_key,
|
||||
}
|
||||
SealedState { cipher }
|
||||
}
|
||||
|
||||
pub fn unseal(&self, data: &[u8]) -> Result<String, &'static str> {
|
||||
if data.len() < NONCE_LEN {
|
||||
return Err("invalid sealed value: too short");
|
||||
}
|
||||
let (nonce, sealed) = data.split_at(NONCE_LEN);
|
||||
let mut sealed_copy = sealed.to_vec();
|
||||
let unsealed = open_in_place(&self.opening_key, nonce, &[], 0, &mut sealed_copy)
|
||||
let (sealed, nonce) = data.split_at(data.len() - NONCE_LEN);
|
||||
let unsealed = self
|
||||
.cipher
|
||||
.decrypt(Nonce::from_slice(nonce), sealed)
|
||||
.map_err(|_| "invalid key/nonce/value: bad seal")?;
|
||||
|
||||
::std::str::from_utf8(unsealed)
|
||||
core::str::from_utf8(&unsealed)
|
||||
.map(|s| s.to_string())
|
||||
.map_err(|_| "bad unsealed utf8")
|
||||
}
|
||||
|
||||
pub fn seal(&self, input: &str) -> Vec<u8> {
|
||||
let mut data;
|
||||
let output_len = {
|
||||
let overhead = ALGO.tag_len();
|
||||
data = vec![0; NONCE_LEN + input.len() + overhead];
|
||||
|
||||
let (nonce, in_out) = data.split_at_mut(NONCE_LEN);
|
||||
SystemRandom::new()
|
||||
.fill(nonce)
|
||||
.expect("couldn't random fill nonce");
|
||||
in_out[..input.len()].copy_from_slice(input.as_bytes());
|
||||
|
||||
seal_in_place(&self.sealing_key, nonce, &[], in_out, overhead).expect("in-place seal")
|
||||
};
|
||||
|
||||
data[..(NONCE_LEN + output_len)].to_vec()
|
||||
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||
let mut sealed = self
|
||||
.cipher
|
||||
.encrypt(&nonce, input.as_bytes())
|
||||
.expect("sealing works");
|
||||
sealed.extend(nonce);
|
||||
sealed
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,8 +55,10 @@ mod tests {
|
||||
#[test]
|
||||
fn test_encrypt_decrypt() {
|
||||
let sv = SealedState::new("swag");
|
||||
|
||||
let sealed = sv.seal("test");
|
||||
|
||||
// use a different instance to make sure no internal state remains
|
||||
let sv = SealedState::new("swag");
|
||||
let unsealed = sv.unseal(sealed.as_slice()).unwrap();
|
||||
|
||||
assert_eq!("test", unsealed);
|
||||
|
@@ -3,15 +3,14 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use rocket_dyn_templates::handlebars::Handlebars;
|
||||
|
||||
use crate::get_i18n;
|
||||
use crate::i18n::I18NHelper;
|
||||
use crate::web::get_i18n;
|
||||
use crate::Result;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateOverrides(String, HashSet<String>);
|
||||
|
||||
impl TemplateOverrides {
|
||||
pub fn load(template_path: &Path, localized_dir: &str) -> Result<Self> {
|
||||
pub fn load(template_path: &Path, localized_dir: &str) -> anyhow::Result<Self> {
|
||||
load_localized_template_names(template_path, localized_dir)
|
||||
.map(|vec| Self(localized_dir.to_owned(), vec))
|
||||
}
|
||||
@@ -29,7 +28,7 @@ impl TemplateOverrides {
|
||||
fn load_localized_template_names(
|
||||
template_path: &Path,
|
||||
localized_dir: &str,
|
||||
) -> Result<HashSet<String>> {
|
||||
) -> anyhow::Result<HashSet<String>> {
|
||||
let language_glob = template_path.join(localized_dir).join("*");
|
||||
glob::glob(language_glob.to_str().expect("valid glob path string"))
|
||||
.unwrap()
|
||||
@@ -43,14 +42,14 @@ fn load_localized_template_names(
|
||||
.map(move |path| {
|
||||
// TODO this is a hack
|
||||
let template_name =
|
||||
remove_extension(remove_extension(path.strip_prefix(&template_path)?));
|
||||
remove_extension(remove_extension(path.strip_prefix(template_path)?));
|
||||
Ok(template_name.to_string_lossy().into_owned())
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn load_handlebars(template_dir: &Path) -> Result<Handlebars<'static>> {
|
||||
pub fn load_handlebars(template_dir: &Path) -> anyhow::Result<Handlebars<'static>> {
|
||||
let mut handlebars = Handlebars::new();
|
||||
|
||||
let i18ns = get_i18n();
|
||||
|
@@ -1,7 +1,8 @@
|
||||
use crate::sealed_state::SealedState;
|
||||
use anyhow::anyhow;
|
||||
|
||||
use crate::Result;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
pub trait StatelessSerializable: Serialize + DeserializeOwned {}
|
||||
|
||||
@@ -38,7 +39,7 @@ impl Service {
|
||||
base64::encode_config(&token_sealed, base64::URL_SAFE_NO_PAD)
|
||||
}
|
||||
|
||||
pub fn check<T>(&self, token_encoded: &str) -> Result<T>
|
||||
pub fn check<T>(&self, token_encoded: &str) -> anyhow::Result<T>
|
||||
where
|
||||
T: StatelessSerializable,
|
||||
{
|
||||
@@ -113,7 +114,7 @@ mod tests {
|
||||
let payload = TestStruct1 {
|
||||
payload: "hello".to_owned(),
|
||||
};
|
||||
let token = "rwM_S9gZaRQaf6DLvmWtZSipQhH_G5ronSIJv2FrMdwGBPSYYQ-1jaP58dTHU5WuC14vb8jxmz2Xf_b3pqzpCGTEJj9drm4t";
|
||||
let token = "C6fCPAGv93nZqDQXodl-bsDgzkxqbjDtbeR6Be4v_UHJfL2UJxG2imzmUlK1PfLT4QzNIRWsdFDYWrx_aCgLZ4MgVQWYyazn";
|
||||
let mt = Service::init("secret", 60);
|
||||
|
||||
let check_result = mt.check(token);
|
||||
|
213
src/web/hkp.rs
213
src/web/hkp.rs
@@ -1,27 +1,22 @@
|
||||
use std::fmt;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use rocket::http::ContentType;
|
||||
use rocket::Data;
|
||||
use rocket_i18n::I18n;
|
||||
use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
|
||||
use url::percent_encoding::{DEFAULT_ENCODE_SET, utf8_percent_encode};
|
||||
|
||||
use crate::database::types::{Email, Fingerprint, KeyID};
|
||||
use crate::database::{Database, KeyDatabase, Query};
|
||||
use hagrid_database::{
|
||||
Database, Query, Sqlite,
|
||||
types::{Email, Fingerprint, KeyID},
|
||||
};
|
||||
|
||||
use crate::i18n_helpers::describe_query_error;
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
|
||||
use crate::tokens;
|
||||
|
||||
use crate::mail;
|
||||
use crate::web;
|
||||
use crate::web::vks::response::EmailStatus;
|
||||
use crate::web::vks::response::UploadResponse;
|
||||
use crate::web::{vks_web, MyResponse, RequestOrigin};
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Hkp {
|
||||
@@ -34,10 +29,10 @@ pub enum Hkp {
|
||||
impl fmt::Display for Hkp {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Hkp::Fingerprint { ref fpr, .. } => write!(f, "{}", fpr),
|
||||
Hkp::KeyID { ref keyid, .. } => write!(f, "{}", keyid),
|
||||
Hkp::Email { ref email, .. } => write!(f, "{}", email),
|
||||
Hkp::ShortKeyID { ref query, .. } => write!(f, "{}", query),
|
||||
Hkp::Fingerprint { fpr, .. } => write!(f, "{}", fpr),
|
||||
Hkp::KeyID { keyid, .. } => write!(f, "{}", keyid),
|
||||
Hkp::Email { email, .. } => write!(f, "{}", email),
|
||||
Hkp::ShortKeyID { query, .. } => write!(f, "{}", query),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -68,67 +63,7 @@ impl std::str::FromStr for Hkp {
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/pks/add", format = "multipart/form-data", data = "<data>")]
|
||||
pub async fn pks_add_form_data(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
cont_type: &ContentType,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match vks_web::process_post_form_data(db, tokens_stateless, rate_limiter, i18n, cont_type, data)
|
||||
.await
|
||||
{
|
||||
Ok(_) => MyResponse::plain("Ok".into()),
|
||||
Err(err) => MyResponse::ise(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/pks/add",
|
||||
format = "application/x-www-form-urlencoded",
|
||||
data = "<data>"
|
||||
)]
|
||||
pub async fn pks_add_form(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
i18n: I18n,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match vks_web::process_post_form(db, tokens_stateless, rate_limiter, &i18n, data).await {
|
||||
Ok(UploadResponse::Ok {
|
||||
is_new_key,
|
||||
key_fpr,
|
||||
primary_uid,
|
||||
token,
|
||||
status,
|
||||
..
|
||||
}) => {
|
||||
let msg = pks_add_ok(
|
||||
&origin,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
token,
|
||||
status,
|
||||
is_new_key,
|
||||
key_fpr,
|
||||
primary_uid,
|
||||
);
|
||||
MyResponse::plain(msg)
|
||||
}
|
||||
Ok(_) => {
|
||||
let msg = format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
|
||||
MyResponse::plain(msg)
|
||||
}
|
||||
Err(err) => MyResponse::ise(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn pks_add_ok(
|
||||
pub fn pks_add_ok(
|
||||
origin: &RequestOrigin,
|
||||
mail_service: &mail::Service,
|
||||
rate_limiter: &RateLimiter,
|
||||
@@ -139,7 +74,10 @@ fn pks_add_ok(
|
||||
primary_uid: Option<Email>,
|
||||
) -> String {
|
||||
if primary_uid.is_none() {
|
||||
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
|
||||
return format!(
|
||||
"Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload",
|
||||
baseuri = origin.get_base_uri()
|
||||
);
|
||||
}
|
||||
let primary_uid = primary_uid.unwrap();
|
||||
|
||||
@@ -149,7 +87,10 @@ fn pks_add_ok(
|
||||
return "Upload successful. This is a new key, a welcome email has been sent."
|
||||
.to_string();
|
||||
}
|
||||
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
|
||||
return format!(
|
||||
"Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload",
|
||||
baseuri = origin.get_base_uri()
|
||||
);
|
||||
}
|
||||
|
||||
let has_unverified = status.iter().any(|(_, v)| *v == EmailStatus::Unpublished);
|
||||
@@ -157,7 +98,10 @@ fn pks_add_ok(
|
||||
return "Upload successful.".to_string();
|
||||
}
|
||||
|
||||
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
|
||||
format!(
|
||||
"Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload",
|
||||
baseuri = origin.get_base_uri()
|
||||
)
|
||||
}
|
||||
|
||||
fn send_welcome_mail(
|
||||
@@ -172,57 +116,7 @@ fn send_welcome_mail(
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
#[get("/pks/lookup?<op>&<search>")]
|
||||
pub fn pks_lookup(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
i18n: I18n,
|
||||
op: Option<String>,
|
||||
search: Option<String>,
|
||||
) -> MyResponse {
|
||||
let search = search.unwrap_or_default();
|
||||
let key = match Hkp::from_str(&search) {
|
||||
Ok(key) => key,
|
||||
Err(_) => return MyResponse::bad_request_plain("Invalid search query!"),
|
||||
};
|
||||
let query = match key {
|
||||
Hkp::Fingerprint { fpr } => Query::ByFingerprint(fpr),
|
||||
Hkp::KeyID { keyid } => Query::ByKeyID(keyid),
|
||||
Hkp::Email { email } => Query::ByEmail(email),
|
||||
Hkp::ShortKeyID { query: _, .. } => {
|
||||
return MyResponse::bad_request_plain(
|
||||
"Search by short key ids is not supported, sorry!",
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(op) = op {
|
||||
match op.as_str() {
|
||||
"index" => key_to_hkp_index(db, i18n, query),
|
||||
"get" => web::key_to_response_plain(db, i18n, query),
|
||||
"vindex" => MyResponse::not_implemented_plain("vindex not implemented"),
|
||||
s if s.starts_with("x-") => {
|
||||
MyResponse::not_implemented_plain("x-* operations not implemented")
|
||||
}
|
||||
&_ => MyResponse::bad_request_plain("Invalid op parameter!"),
|
||||
}
|
||||
} else {
|
||||
MyResponse::bad_request_plain("op parameter required!")
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/pks/internal/index/<query_string>")]
|
||||
pub fn pks_internal_index(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
i18n: I18n,
|
||||
query_string: String,
|
||||
) -> MyResponse {
|
||||
match query_string.parse() {
|
||||
Ok(query) => key_to_hkp_index(db, i18n, query),
|
||||
Err(_) => MyResponse::bad_request_plain("Invalid search query!"),
|
||||
}
|
||||
}
|
||||
|
||||
fn key_to_hkp_index(db: &rocket::State<KeyDatabase>, i18n: I18n, query: Query) -> MyResponse {
|
||||
pub fn key_to_hkp_index(db: &rocket::State<Sqlite>, i18n: I18n, query: Query) -> MyResponse {
|
||||
use sequoia_openpgp::policy::StandardPolicy;
|
||||
use sequoia_openpgp::types::RevocationStatus;
|
||||
|
||||
@@ -288,25 +182,30 @@ fn key_to_hkp_index(db: &rocket::State<KeyDatabase>, i18n: I18n, query: Query) -
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rocket::http::ContentType;
|
||||
use rocket::http::Status;
|
||||
use ::rocket::local::blocking::Client;
|
||||
use rstest::rstest;
|
||||
|
||||
use sequoia_openpgp::serialize::Serialize;
|
||||
use ::rocket::http::ContentType;
|
||||
use ::rocket::http::Status;
|
||||
|
||||
use crate::mail::pop_mail;
|
||||
use crate::web::tests::*;
|
||||
use crate::routes::tests::common::*;
|
||||
use crate::web::tests::common;
|
||||
use sequoia_openpgp::Cert;
|
||||
use sequoia_openpgp::serialize::Serialize;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn hkp() {
|
||||
let (tmpdir, client) = client().unwrap();
|
||||
#[rstest]
|
||||
fn hkp(
|
||||
base_uri: &str,
|
||||
#[from(cert)] (cert_name, tpk): (&str, Cert),
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
) {
|
||||
let filemail_into = tmpdir.path().join("filemail");
|
||||
|
||||
// eprintln!("LEAKING: {:?}", tmpdir);
|
||||
// ::std::mem::forget(tmpdir);
|
||||
|
||||
// Generate a key and upload it.
|
||||
let tpk = build_cert("foo@invalid.example.com");
|
||||
|
||||
// Prepare to /pks/add
|
||||
let mut armored = Vec::new();
|
||||
{
|
||||
@@ -349,14 +248,14 @@ mod tests {
|
||||
assert!(upload_mail.is_none());
|
||||
|
||||
// We should not be able to look it up by email address.
|
||||
check_null_responses_by_email(&client, "foo@invalid.example.com");
|
||||
common::assert::check_null_responses_by_email(&client, cert_name);
|
||||
|
||||
// And check that we can get it back via the machine readable
|
||||
// interface.
|
||||
check_mr_responses_by_fingerprint(&client, &tpk, 0);
|
||||
common::assert::check_mr_responses_by_fingerprint(&client, &tpk, 0);
|
||||
|
||||
// And check that we can see the human-readable result page.
|
||||
check_hr_responses_by_fingerprint(&client, &tpk, 0);
|
||||
common::assert::check_hr_responses_by_fingerprint(&client, &tpk, 0, base_uri);
|
||||
|
||||
// Upload the same key again, make sure the welcome mail is not sent again
|
||||
let response = client
|
||||
@@ -369,18 +268,20 @@ mod tests {
|
||||
let welcome_mail = pop_mail(filemail_into.as_path()).unwrap();
|
||||
assert!(welcome_mail.is_none());
|
||||
|
||||
assert_consistency(client.rocket());
|
||||
assert::consistency(client.rocket());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hkp_add_two() {
|
||||
let (tmpdir, client) = client().unwrap();
|
||||
#[rstest]
|
||||
fn hkp_add_two(
|
||||
base_uri: &str,
|
||||
#[from(cert)] (_, tpk_0): (&str, Cert),
|
||||
#[with(alt_cert_name())]
|
||||
#[from(cert)]
|
||||
(_, tpk_1): (&str, Cert),
|
||||
#[from(client)] (tmpdir, client): (TempDir, Client),
|
||||
) {
|
||||
let filemail_into = tmpdir.path().join("filemail");
|
||||
|
||||
// Generate two keys and upload them.
|
||||
let tpk_0 = build_cert("foo@invalid.example.com");
|
||||
let tpk_1 = build_cert("bar@invalid.example.com");
|
||||
|
||||
// Prepare to /pks/add
|
||||
let mut armored_first = Vec::new();
|
||||
let mut armored_both = Vec::new();
|
||||
@@ -429,11 +330,11 @@ mod tests {
|
||||
let upload_mail_1 = pop_mail(filemail_into.as_path()).unwrap();
|
||||
assert!(upload_mail_1.is_none());
|
||||
|
||||
check_mr_responses_by_fingerprint(&client, &tpk_0, 0);
|
||||
check_mr_responses_by_fingerprint(&client, &tpk_1, 0);
|
||||
check_hr_responses_by_fingerprint(&client, &tpk_0, 0);
|
||||
check_hr_responses_by_fingerprint(&client, &tpk_1, 0);
|
||||
common::assert::check_mr_responses_by_fingerprint(&client, &tpk_0, 0);
|
||||
common::assert::check_mr_responses_by_fingerprint(&client, &tpk_1, 0);
|
||||
common::assert::check_hr_responses_by_fingerprint(&client, &tpk_0, 0, base_uri);
|
||||
common::assert::check_hr_responses_by_fingerprint(&client, &tpk_1, 0, base_uri);
|
||||
|
||||
assert_consistency(client.rocket());
|
||||
assert::consistency(client.rocket());
|
||||
}
|
||||
}
|
||||
|
@@ -1,20 +1,18 @@
|
||||
use rocket::fairing::{Fairing, Info, Kind};
|
||||
use rocket::http::Method;
|
||||
use rocket::{Data, Request};
|
||||
use rocket_dyn_templates::Template;
|
||||
use rocket_i18n::I18n;
|
||||
use serde_json::json;
|
||||
|
||||
use rocket::{async_trait, uri};
|
||||
use serde_derive::Serialize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::web::MyResponse;
|
||||
|
||||
pub struct MaintenanceMode {
|
||||
maintenance_file: PathBuf,
|
||||
}
|
||||
|
||||
mod templates {
|
||||
pub mod templates {
|
||||
use serde_derive::Serialize;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct MaintenanceMode {
|
||||
pub message: String,
|
||||
@@ -41,13 +39,19 @@ impl Fairing for MaintenanceMode {
|
||||
|
||||
let path = request.uri().path().as_str();
|
||||
if self.is_request_json(path) {
|
||||
request.set_uri(uri!(maintenance_error_json(message)));
|
||||
request.set_uri(uri!(crate::routes::maintenance::maintenance_error_json(
|
||||
message
|
||||
)));
|
||||
request.set_method(Method::Get);
|
||||
} else if self.is_request_plain(path, request.method()) {
|
||||
request.set_uri(uri!(maintenance_error_plain(message)));
|
||||
request.set_uri(uri!(crate::routes::maintenance::maintenance_error_plain(
|
||||
message
|
||||
)));
|
||||
request.set_method(Method::Get);
|
||||
} else if self.is_request_web(path) {
|
||||
request.set_uri(uri!(maintenance_error_web(message)));
|
||||
request.set_uri(uri!(crate::routes::maintenance::maintenance_error_web(
|
||||
message
|
||||
)));
|
||||
request.set_method(Method::Get);
|
||||
}
|
||||
}
|
||||
@@ -78,28 +82,7 @@ impl MaintenanceMode {
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/maintenance/plain/<message>")]
|
||||
pub fn maintenance_error_plain(message: String) -> MyResponse {
|
||||
MyResponse::MaintenancePlain(message)
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonErrorMessage {
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[get("/maintenance/json/<message>")]
|
||||
pub fn maintenance_error_json(message: String) -> MyResponse {
|
||||
MyResponse::MaintenanceJson(json!(JsonErrorMessage { message }))
|
||||
}
|
||||
|
||||
#[get("/maintenance/web/<message>")]
|
||||
pub fn maintenance_error_web(message: String, i18n: I18n) -> MyResponse {
|
||||
let ctx = templates::MaintenanceMode {
|
||||
message,
|
||||
version: env!("VERGEN_SEMVER").to_string(),
|
||||
commit: env!("VERGEN_SHA_SHORT").to_string(),
|
||||
lang: i18n.lang.to_owned(),
|
||||
};
|
||||
MyResponse::Maintenance(Template::render("maintenance", ctx))
|
||||
pub struct JsonErrorMessage {
|
||||
pub message: String,
|
||||
}
|
||||
|
@@ -1,25 +1,21 @@
|
||||
use rocket::form::Form;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
use crate::Result;
|
||||
|
||||
use gettext_macros::i18n;
|
||||
|
||||
use crate::counters;
|
||||
use crate::database::{types::Email, types::Fingerprint, Database, KeyDatabase};
|
||||
use crate::mail;
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::tokens::{self, StatelessSerializable};
|
||||
use crate::web::vks_web;
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
use hagrid_database::{Database, Sqlite, types::Email, types::Fingerprint};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct StatelessVerifyToken {
|
||||
fpr: Fingerprint,
|
||||
pub struct StatelessVerifyToken {
|
||||
pub fpr: Fingerprint,
|
||||
}
|
||||
impl StatelessSerializable for StatelessVerifyToken {}
|
||||
|
||||
mod templates {
|
||||
pub mod templates {
|
||||
use serde_derive::Serialize;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ManageKey {
|
||||
pub key_fpr: String,
|
||||
@@ -42,6 +38,8 @@ mod templates {
|
||||
}
|
||||
|
||||
pub mod forms {
|
||||
use rocket::FromForm;
|
||||
|
||||
#[derive(FromForm)]
|
||||
pub struct ManageRequest {
|
||||
pub search_term: String,
|
||||
@@ -54,169 +52,20 @@ pub mod forms {
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/manage")]
|
||||
pub fn vks_manage(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("manage/manage", i18n, origin)
|
||||
}
|
||||
|
||||
#[get("/manage/<token>")]
|
||||
pub fn vks_manage_key(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
i18n: I18n,
|
||||
token: String,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
) -> MyResponse {
|
||||
use crate::database::types::Fingerprint;
|
||||
use std::convert::TryFrom;
|
||||
if let Ok(StatelessVerifyToken { fpr }) = token_service.check(&token) {
|
||||
match db.lookup(&database::Query::ByFingerprint(fpr)) {
|
||||
Ok(Some(tpk)) => {
|
||||
let fp = Fingerprint::try_from(tpk.fingerprint()).unwrap();
|
||||
let mut emails: Vec<Email> = tpk
|
||||
.userids()
|
||||
.map(|u| u.userid().to_string().parse::<Email>())
|
||||
.flatten()
|
||||
.collect();
|
||||
emails.sort_unstable();
|
||||
emails.dedup();
|
||||
let uid_status = emails
|
||||
.into_iter()
|
||||
.map(|email| templates::ManageKeyUidStatus {
|
||||
address: email.to_string(),
|
||||
published: true,
|
||||
})
|
||||
.collect();
|
||||
let key_link = uri!(vks_web::search(q = fp.to_string())).to_string();
|
||||
let context = templates::ManageKey {
|
||||
key_fpr: fp.to_string(),
|
||||
key_link,
|
||||
uid_status,
|
||||
token,
|
||||
base_uri: origin.get_base_uri().to_owned(),
|
||||
};
|
||||
MyResponse::ok("manage/manage_key", context, i18n, origin)
|
||||
}
|
||||
Ok(None) => MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "This link is invalid or expired")),
|
||||
i18n,
|
||||
origin,
|
||||
),
|
||||
Err(e) => MyResponse::ise(e),
|
||||
}
|
||||
} else {
|
||||
MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "This link is invalid or expired")),
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/manage", data = "<request>")]
|
||||
pub fn vks_manage_post(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::ManageRequest>,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
) -> MyResponse {
|
||||
use std::convert::TryInto;
|
||||
|
||||
let email = match request.search_term.parse::<Email>() {
|
||||
Ok(email) => email,
|
||||
Err(_) => {
|
||||
return MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "Malformed address: {}"; request.search_term.as_str())),
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let tpk = match db.lookup(&database::Query::ByEmail(email.clone())) {
|
||||
Ok(Some(tpk)) => tpk,
|
||||
Ok(None) => {
|
||||
return MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(i18n.catalog, "No key for address: {}"; request.search_term.as_str())),
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
Err(e) => return MyResponse::ise(e),
|
||||
};
|
||||
|
||||
let email_exists = tpk
|
||||
.userids()
|
||||
.flat_map(|binding| binding.userid().to_string().parse::<Email>())
|
||||
.any(|candidate| candidate == email);
|
||||
|
||||
if !email_exists {
|
||||
return MyResponse::ise(anyhow!("Internal error: address check failed!"));
|
||||
}
|
||||
|
||||
if !rate_limiter.action_perform(format!("manage-{}", &email)) {
|
||||
return MyResponse::not_found(
|
||||
Some("manage/manage"),
|
||||
Some(i18n!(
|
||||
i18n.catalog,
|
||||
"A request has already been sent for this address recently."
|
||||
)),
|
||||
i18n,
|
||||
origin,
|
||||
);
|
||||
}
|
||||
|
||||
let fpr: Fingerprint = tpk.fingerprint().try_into().unwrap();
|
||||
let fpr_text = fpr.to_string();
|
||||
let token = token_service.create(&StatelessVerifyToken { fpr });
|
||||
let link_path = uri!(vks_manage_key(token)).to_string();
|
||||
|
||||
let base_uri = origin.get_base_uri();
|
||||
if let Err(e) = mail_service.send_manage_token(&i18n, base_uri, fpr_text, &email, &link_path) {
|
||||
return MyResponse::ise(e);
|
||||
}
|
||||
|
||||
let ctx = templates::ManageLinkSent {
|
||||
address: email.to_string(),
|
||||
};
|
||||
MyResponse::ok("manage/manage_link_sent", ctx, i18n, origin)
|
||||
}
|
||||
|
||||
#[post("/manage/unpublish", data = "<request>")]
|
||||
pub fn vks_manage_unpublish(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
i18n: I18n,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
request: Form<forms::ManageDelete>,
|
||||
) -> MyResponse {
|
||||
match vks_manage_unpublish_or_fail(origin, db, token_service, i18n, request) {
|
||||
Ok(response) => response,
|
||||
Err(e) => MyResponse::ise(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn vks_manage_unpublish_or_fail(
|
||||
origin: RequestOrigin,
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
db: &rocket::State<Sqlite>,
|
||||
token_service: &rocket::State<tokens::Service>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::ManageDelete>,
|
||||
) -> Result<MyResponse> {
|
||||
) -> anyhow::Result<MyResponse> {
|
||||
let verify_token = token_service.check::<StatelessVerifyToken>(&request.token)?;
|
||||
let email = request.address.parse::<Email>()?;
|
||||
|
||||
db.set_email_unpublished(&verify_token.fpr, &email)?;
|
||||
counters::inc_address_unpublished(&email);
|
||||
|
||||
Ok(vks_manage_key(
|
||||
Ok(crate::routes::manage::vks_manage_key(
|
||||
origin,
|
||||
db,
|
||||
i18n,
|
||||
|
1948
src/web/mod.rs
1948
src/web/mod.rs
File diff suppressed because it is too large
Load Diff
7
src/web/util.rs
Normal file
7
src/web/util.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
use std::{env, option_env};
|
||||
|
||||
pub fn get_commit_sha() -> String {
|
||||
option_env!("COMMIT_SHA_SHORT")
|
||||
.unwrap_or_else(|| env!("VERGEN_SHA_SHORT"))
|
||||
.to_string()
|
||||
}
|
@@ -1,30 +1,31 @@
|
||||
use crate::Result;
|
||||
|
||||
use crate::counters;
|
||||
use crate::database::types::{Email, Fingerprint};
|
||||
use crate::database::{
|
||||
Database, EmailAddressStatus, ImportResult, KeyDatabase, StatefulTokens, TpkStatus,
|
||||
};
|
||||
use crate::mail;
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::tokens::{self, StatelessSerializable};
|
||||
use crate::web::RequestOrigin;
|
||||
use hagrid_database::{
|
||||
Database, EmailAddressStatus, ImportResult, Sqlite, StatefulTokens, TpkStatus,
|
||||
types::{Email, Fingerprint},
|
||||
};
|
||||
|
||||
use gettext_macros::i18n;
|
||||
use rocket_i18n::I18n;
|
||||
|
||||
use sequoia_openpgp::Cert;
|
||||
use sequoia_openpgp::armor::ReaderMode;
|
||||
use sequoia_openpgp::cert::CertParser;
|
||||
use sequoia_openpgp::parse::{Dearmor, PacketParserBuilder, Parse};
|
||||
use sequoia_openpgp::Cert;
|
||||
|
||||
use self::response::*;
|
||||
use anyhow::anyhow;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::io::Read;
|
||||
|
||||
use self::response::*;
|
||||
|
||||
pub mod request {
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UploadRequest {
|
||||
pub keytext: String,
|
||||
@@ -38,7 +39,10 @@ pub mod request {
|
||||
}
|
||||
|
||||
pub mod response {
|
||||
use crate::database::types::Email;
|
||||
use hagrid_database::types::Email;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum EmailStatus {
|
||||
@@ -52,8 +56,6 @@ pub mod response {
|
||||
Revoked,
|
||||
}
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub enum UploadResponse {
|
||||
Ok {
|
||||
token: String,
|
||||
@@ -98,7 +100,7 @@ struct VerifyTpkState {
|
||||
impl StatelessSerializable for VerifyTpkState {}
|
||||
|
||||
pub fn process_key(
|
||||
db: &KeyDatabase,
|
||||
db: &Sqlite,
|
||||
i18n: &I18n,
|
||||
tokens_stateless: &tokens::Service,
|
||||
rate_limiter: &RateLimiter,
|
||||
@@ -143,7 +145,7 @@ pub fn process_key(
|
||||
}
|
||||
}
|
||||
|
||||
fn log_db_merge(import_result: Result<ImportResult>) -> Result<ImportResult> {
|
||||
fn log_db_merge(import_result: anyhow::Result<ImportResult>) -> anyhow::Result<ImportResult> {
|
||||
match import_result {
|
||||
Ok(ImportResult::New(_)) => counters::inc_key_upload("new"),
|
||||
Ok(ImportResult::Updated(_)) => counters::inc_key_upload("updated"),
|
||||
@@ -154,7 +156,7 @@ fn log_db_merge(import_result: Result<ImportResult>) -> Result<ImportResult> {
|
||||
import_result
|
||||
}
|
||||
|
||||
fn process_key_multiple(db: &KeyDatabase, tpks: Vec<Cert>) -> response::UploadResponse {
|
||||
fn process_key_multiple(db: &Sqlite, tpks: Vec<Cert>) -> response::UploadResponse {
|
||||
let key_fprs: Vec<_> = tpks
|
||||
.into_iter()
|
||||
.flat_map(|tpk| Fingerprint::try_from(tpk.fingerprint()).map(|fpr| (fpr, tpk)))
|
||||
@@ -165,7 +167,7 @@ fn process_key_multiple(db: &KeyDatabase, tpks: Vec<Cert>) -> response::UploadRe
|
||||
}
|
||||
|
||||
fn process_key_single(
|
||||
db: &KeyDatabase,
|
||||
db: &Sqlite,
|
||||
i18n: &I18n,
|
||||
tokens_stateless: &tokens::Service,
|
||||
rate_limiter: &RateLimiter,
|
||||
@@ -178,7 +180,7 @@ fn process_key_single(
|
||||
Ok(ImportResult::Updated(tpk_status)) => (tpk_status, false),
|
||||
Ok(ImportResult::Unchanged(tpk_status)) => (tpk_status, false),
|
||||
Err(_) => {
|
||||
return UploadResponse::err(i18n!(i18n.catalog, "Error processing uploaded key."))
|
||||
return UploadResponse::err(i18n!(i18n.catalog, "Error processing uploaded key."));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -201,7 +203,7 @@ fn process_key_single(
|
||||
}
|
||||
|
||||
pub fn request_verify(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: &RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
@@ -213,7 +215,7 @@ pub fn request_verify(
|
||||
) -> response::UploadResponse {
|
||||
let (verify_state, tpk_status) = match check_tpk_state(db, token_stateless, i18n, &token) {
|
||||
Ok(ok) => ok,
|
||||
Err(e) => return UploadResponse::err(&e.to_string()),
|
||||
Err(e) => return UploadResponse::err(e.to_string()),
|
||||
};
|
||||
|
||||
if tpk_status.is_revoked {
|
||||
@@ -222,8 +224,7 @@ pub fn request_verify(
|
||||
|
||||
let emails_requested: Vec<_> = addresses
|
||||
.into_iter()
|
||||
.map(|address| address.parse::<Email>())
|
||||
.flatten()
|
||||
.flat_map(|address| address.parse::<Email>())
|
||||
.filter(|email| verify_state.addresses.contains(email))
|
||||
.filter(|email| {
|
||||
tpk_status.email_status.iter().any(|(uid_email, status)| {
|
||||
@@ -245,7 +246,7 @@ pub fn request_verify(
|
||||
)
|
||||
.is_err()
|
||||
{
|
||||
return UploadResponse::err(&format!("error sending email to {}", &email));
|
||||
return UploadResponse::err(format!("error sending email to {}", &email));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,11 +254,11 @@ pub fn request_verify(
|
||||
}
|
||||
|
||||
fn check_tpk_state(
|
||||
db: &KeyDatabase,
|
||||
db: &Sqlite,
|
||||
token_stateless: &tokens::Service,
|
||||
i18n: &I18n,
|
||||
token: &str,
|
||||
) -> Result<(VerifyTpkState, TpkStatus)> {
|
||||
) -> anyhow::Result<(VerifyTpkState, TpkStatus)> {
|
||||
let verify_state = token_stateless
|
||||
.check::<VerifyTpkState>(token)
|
||||
.map_err(|_| {
|
||||
@@ -277,7 +278,7 @@ fn send_verify_email(
|
||||
i18n: &I18n,
|
||||
fpr: &Fingerprint,
|
||||
email: &Email,
|
||||
) -> Result<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let token_content = (fpr.clone(), email.clone());
|
||||
let token_str = serde_json::to_string(&token_content)?;
|
||||
let token_verify = token_stateful.new_token("verify", token_str.as_bytes())?;
|
||||
@@ -292,7 +293,7 @@ fn send_verify_email(
|
||||
}
|
||||
|
||||
pub fn verify_confirm(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
db: &rocket::State<Sqlite>,
|
||||
i18n: &I18n,
|
||||
token_service: &rocket::State<StatefulTokens>,
|
||||
token: String,
|
||||
@@ -309,10 +310,10 @@ pub fn verify_confirm(
|
||||
}
|
||||
|
||||
fn check_publish_token(
|
||||
db: &KeyDatabase,
|
||||
db: &Sqlite,
|
||||
token_service: &StatefulTokens,
|
||||
token: String,
|
||||
) -> Result<(Fingerprint, Email)> {
|
||||
) -> anyhow::Result<(Fingerprint, Email)> {
|
||||
let payload = token_service.pop_token("verify", &token)?;
|
||||
let (fingerprint, email) = serde_json::from_str(&payload)?;
|
||||
|
||||
@@ -364,7 +365,7 @@ fn show_upload_verify(
|
||||
.collect();
|
||||
let primary_uid = tpk_status
|
||||
.email_status
|
||||
.get(0)
|
||||
.first()
|
||||
.map(|(email, _)| email)
|
||||
.cloned();
|
||||
|
||||
|
@@ -6,21 +6,12 @@ use rocket_i18n::{I18n, Translations};
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
|
||||
use crate::database::types::{Email, Fingerprint, KeyID};
|
||||
use crate::database::{KeyDatabase, Query, StatefulTokens};
|
||||
use crate::mail;
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::tokens;
|
||||
|
||||
use crate::web;
|
||||
use crate::web::vks;
|
||||
use crate::web::vks::response::*;
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
|
||||
use rocket::serde::json::Error as JsonError;
|
||||
|
||||
pub mod json {
|
||||
use crate::web::vks::response::EmailStatus;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -43,10 +34,10 @@ pub mod json {
|
||||
}
|
||||
}
|
||||
|
||||
type JsonResult = Result<serde_json::Value, JsonErrorResponse>;
|
||||
pub type JsonResult = Result<serde_json::Value, JsonErrorResponse>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct JsonErrorResponse(Status, String);
|
||||
pub struct JsonErrorResponse(pub Status, pub String);
|
||||
|
||||
impl<'r> Responder<'r, 'static> for JsonErrorResponse {
|
||||
fn respond_to(self, _: &'r Request<'_>) -> response::Result<'static> {
|
||||
@@ -59,7 +50,7 @@ impl<'r> Responder<'r, 'static> for JsonErrorResponse {
|
||||
}
|
||||
}
|
||||
|
||||
fn json_or_error<T>(data: Result<Json<T>, JsonError>) -> Result<Json<T>, JsonErrorResponse> {
|
||||
pub fn json_or_error<T>(data: Result<Json<T>, JsonError>) -> Result<Json<T>, JsonErrorResponse> {
|
||||
match data {
|
||||
Ok(data) => Ok(data),
|
||||
Err(JsonError::Io(_)) => Err(JsonErrorResponse(
|
||||
@@ -70,7 +61,7 @@ fn json_or_error<T>(data: Result<Json<T>, JsonError>) -> Result<Json<T>, JsonErr
|
||||
}
|
||||
}
|
||||
|
||||
fn upload_ok_json(response: UploadResponse) -> Result<serde_json::Value, JsonErrorResponse> {
|
||||
pub fn upload_ok_json(response: UploadResponse) -> Result<serde_json::Value, JsonErrorResponse> {
|
||||
match response {
|
||||
UploadResponse::Ok {
|
||||
token,
|
||||
@@ -87,34 +78,10 @@ fn upload_ok_json(response: UploadResponse) -> Result<serde_json::Value, JsonErr
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/vks/v1/upload", format = "json", data = "<data>")]
|
||||
pub fn upload_json(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
data: Result<Json<json::UploadRequest>, JsonError>,
|
||||
) -> JsonResult {
|
||||
let data = json_or_error(data)?;
|
||||
use std::io::Cursor;
|
||||
let data_reader = Cursor::new(data.keytext.as_bytes());
|
||||
let result = vks::process_key(db, &i18n, tokens_stateless, rate_limiter, data_reader);
|
||||
upload_ok_json(result)
|
||||
}
|
||||
|
||||
#[post("/vks/v1/upload", rank = 2)]
|
||||
pub fn upload_fallback(origin: RequestOrigin) -> JsonErrorResponse {
|
||||
let error_msg = format!(
|
||||
"expected application/json data. see {}/about/api for api docs.",
|
||||
origin.get_base_uri()
|
||||
);
|
||||
JsonErrorResponse(Status::BadRequest, error_msg)
|
||||
}
|
||||
|
||||
fn get_locale(langs: &rocket::State<Translations>, locales: Vec<String>) -> I18n {
|
||||
pub fn get_locale(langs: &rocket::State<Translations>, locales: Vec<String>) -> I18n {
|
||||
locales
|
||||
.iter()
|
||||
.flat_map(|lang| lang.split(|c| c == '-' || c == ';' || c == '_').next())
|
||||
.flat_map(|lang| lang.split(['-', ';', '_']).next())
|
||||
.flat_map(|lang| langs.iter().find(|(trans, _)| trans == &lang))
|
||||
.next()
|
||||
.or_else(|| langs.iter().find(|(trans, _)| trans == &"en"))
|
||||
@@ -124,79 +91,3 @@ fn get_locale(langs: &rocket::State<Translations>, locales: Vec<String>) -> I18n
|
||||
})
|
||||
.expect("Expected to have an english translation!")
|
||||
}
|
||||
|
||||
#[post("/vks/v1/request-verify", format = "json", data = "<data>")]
|
||||
pub fn request_verify_json(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
langs: &rocket::State<Translations>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
data: Result<Json<json::VerifyRequest>, JsonError>,
|
||||
) -> JsonResult {
|
||||
let data = json_or_error(data)?;
|
||||
let json::VerifyRequest {
|
||||
token,
|
||||
addresses,
|
||||
locale,
|
||||
} = data.into_inner();
|
||||
let i18n = get_locale(langs, locale.unwrap_or_default());
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
addresses,
|
||||
);
|
||||
upload_ok_json(result)
|
||||
}
|
||||
|
||||
#[post("/vks/v1/request-verify", rank = 2)]
|
||||
pub fn request_verify_fallback(origin: RequestOrigin) -> JsonErrorResponse {
|
||||
let error_msg = format!(
|
||||
"expected application/json data. see {}/about/api for api docs.",
|
||||
origin.get_base_uri()
|
||||
);
|
||||
JsonErrorResponse(Status::BadRequest, error_msg)
|
||||
}
|
||||
|
||||
#[get("/vks/v1/by-fingerprint/<fpr>")]
|
||||
pub fn vks_v1_by_fingerprint(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
i18n: I18n,
|
||||
fpr: String,
|
||||
) -> MyResponse {
|
||||
let query = match fpr.parse::<Fingerprint>() {
|
||||
Ok(fpr) => Query::ByFingerprint(fpr),
|
||||
Err(_) => return MyResponse::bad_request_plain("malformed fingerprint"),
|
||||
};
|
||||
|
||||
web::key_to_response_plain(db, i18n, query)
|
||||
}
|
||||
|
||||
#[get("/vks/v1/by-email/<email>")]
|
||||
pub fn vks_v1_by_email(db: &rocket::State<KeyDatabase>, i18n: I18n, email: String) -> MyResponse {
|
||||
let email = email.replace("%40", "@");
|
||||
let query = match email.parse::<Email>() {
|
||||
Ok(email) => Query::ByEmail(email),
|
||||
Err(_) => return MyResponse::bad_request_plain("malformed e-mail address"),
|
||||
};
|
||||
|
||||
web::key_to_response_plain(db, i18n, query)
|
||||
}
|
||||
|
||||
#[get("/vks/v1/by-keyid/<kid>")]
|
||||
pub fn vks_v1_by_keyid(db: &rocket::State<KeyDatabase>, i18n: I18n, kid: String) -> MyResponse {
|
||||
let query = match kid.parse::<KeyID>() {
|
||||
Ok(keyid) => Query::ByKeyID(keyid),
|
||||
Err(_) => return MyResponse::bad_request_plain("malformed key id"),
|
||||
};
|
||||
|
||||
web::key_to_response_plain(db, i18n, query)
|
||||
}
|
||||
|
@@ -1,34 +1,34 @@
|
||||
use crate::Result;
|
||||
|
||||
use multipart::server::Multipart;
|
||||
use multipart::server::save::Entries;
|
||||
use multipart::server::save::SaveResult::*;
|
||||
use multipart::server::Multipart;
|
||||
|
||||
use gettext_macros::i18n;
|
||||
use rocket::Data;
|
||||
use rocket::data::ByteUnit;
|
||||
use rocket::form::Form;
|
||||
use rocket::form::ValueField;
|
||||
use rocket::http::ContentType;
|
||||
use rocket::Data;
|
||||
use rocket::uri;
|
||||
use rocket_i18n::I18n;
|
||||
use url::percent_encoding::percent_decode;
|
||||
|
||||
use crate::database::{Database, KeyDatabase, Query, StatefulTokens};
|
||||
use crate::i18n_helpers::describe_query_error;
|
||||
use crate::mail;
|
||||
use crate::rate_limiter::RateLimiter;
|
||||
use crate::tokens;
|
||||
use crate::web::{MyResponse, RequestOrigin};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
use hagrid_database::{Database, Query, Sqlite};
|
||||
|
||||
use crate::web::vks;
|
||||
use crate::web::vks::response::*;
|
||||
use anyhow::anyhow;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
|
||||
const UPLOAD_LIMIT: ByteUnit = ByteUnit::Mebibyte(1);
|
||||
pub const UPLOAD_LIMIT: ByteUnit = ByteUnit::Mebibyte(1);
|
||||
|
||||
pub mod forms {
|
||||
use rocket::FromForm;
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
mod forms {
|
||||
#[derive(FromForm, Deserialize)]
|
||||
pub struct VerifyRequest {
|
||||
pub token: String,
|
||||
@@ -41,7 +41,9 @@ mod forms {
|
||||
}
|
||||
}
|
||||
|
||||
mod template {
|
||||
pub mod template {
|
||||
use serde_derive::Serialize;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct VerifyForm {
|
||||
pub token: String,
|
||||
@@ -93,10 +95,14 @@ mod template {
|
||||
}
|
||||
|
||||
impl MyResponse {
|
||||
fn upload_response_quick(response: UploadResponse, i18n: I18n, origin: RequestOrigin) -> Self {
|
||||
pub fn upload_response_quick(
|
||||
response: UploadResponse,
|
||||
i18n: I18n,
|
||||
origin: RequestOrigin,
|
||||
) -> Self {
|
||||
match response {
|
||||
UploadResponse::Ok { token, .. } => {
|
||||
let uri = uri!(quick_upload_proceed(token));
|
||||
let uri = uri!(crate::routes::vks::quick_upload_proceed(token));
|
||||
let text = format!(
|
||||
"Key successfully uploaded. Proceed with verification here:\n{}{}\n",
|
||||
origin.get_base_uri(),
|
||||
@@ -114,7 +120,7 @@ impl MyResponse {
|
||||
}
|
||||
}
|
||||
|
||||
fn upload_response(response: UploadResponse, i18n: I18n, origin: RequestOrigin) -> Self {
|
||||
pub fn upload_response(response: UploadResponse, i18n: I18n, origin: RequestOrigin) -> Self {
|
||||
match response {
|
||||
UploadResponse::Ok {
|
||||
token,
|
||||
@@ -148,7 +154,7 @@ impl MyResponse {
|
||||
i18n: I18n,
|
||||
origin: RequestOrigin,
|
||||
) -> Self {
|
||||
let key_link = uri!(search(q = &key_fpr)).to_string();
|
||||
let key_link = uri!(crate::routes::vks::search(q = &key_fpr)).to_string();
|
||||
|
||||
let count_revoked = uid_status
|
||||
.iter()
|
||||
@@ -193,7 +199,7 @@ impl MyResponse {
|
||||
let keys = key_fprs
|
||||
.into_iter()
|
||||
.map(|fpr| {
|
||||
let key_link = uri!(search(q = &fpr)).to_string();
|
||||
let key_link = uri!(crate::routes::vks::search(q = &fpr)).to_string();
|
||||
template::UploadOkKey {
|
||||
key_fpr: fpr,
|
||||
key_link,
|
||||
@@ -207,53 +213,19 @@ impl MyResponse {
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/upload")]
|
||||
pub fn upload(origin: RequestOrigin, i18n: I18n) -> MyResponse {
|
||||
MyResponse::ok_bare("upload/upload", i18n, origin)
|
||||
}
|
||||
|
||||
#[post("/upload/submit", format = "multipart/form-data", data = "<data>")]
|
||||
pub async fn upload_post_form_data(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
cont_type: &ContentType,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type).await {
|
||||
Ok(response) => MyResponse::upload_response(response, i18n, origin),
|
||||
Err(err) => MyResponse::bad_request("upload/upload", err, i18n, origin),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn process_post_form_data(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
db: &rocket::State<Sqlite>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
cont_type: &ContentType,
|
||||
data: Data<'_>,
|
||||
) -> Result<UploadResponse> {
|
||||
) -> anyhow::Result<UploadResponse> {
|
||||
process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type).await
|
||||
}
|
||||
|
||||
#[get("/search?<q>")]
|
||||
pub fn search(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
i18n: I18n,
|
||||
q: String,
|
||||
) -> MyResponse {
|
||||
match q.parse::<Query>() {
|
||||
Ok(query) => key_to_response(db, origin, i18n, q, query),
|
||||
Err(e) => MyResponse::bad_request("index", e, i18n, origin),
|
||||
}
|
||||
}
|
||||
|
||||
fn key_to_response(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
pub fn key_to_response(
|
||||
db: &rocket::State<Sqlite>,
|
||||
origin: RequestOrigin,
|
||||
i18n: I18n,
|
||||
query_string: String,
|
||||
@@ -280,82 +252,17 @@ fn key_to_response(
|
||||
MyResponse::ok("found", context, i18n, origin)
|
||||
}
|
||||
|
||||
#[put("/", data = "<data>")]
|
||||
pub async fn quick_upload(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
origin: RequestOrigin,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
let buf = match data.open(UPLOAD_LIMIT).into_bytes().await {
|
||||
Ok(buf) => buf.into_inner(),
|
||||
Err(error) => return MyResponse::bad_request("400-plain", anyhow!(error), i18n, origin),
|
||||
};
|
||||
|
||||
MyResponse::upload_response_quick(
|
||||
vks::process_key(db, &i18n, tokens_stateless, rate_limiter, Cursor::new(buf)),
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
|
||||
#[get("/upload/<token>", rank = 2)]
|
||||
pub fn quick_upload_proceed(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
token: String,
|
||||
) -> MyResponse {
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
vec![],
|
||||
);
|
||||
MyResponse::upload_response(result, i18n, origin)
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/upload/submit",
|
||||
format = "application/x-www-form-urlencoded",
|
||||
data = "<data>"
|
||||
)]
|
||||
pub async fn upload_post_form(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
tokens_stateless: &rocket::State<tokens::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
data: Data<'_>,
|
||||
) -> MyResponse {
|
||||
match process_post_form(db, tokens_stateless, rate_limiter, &i18n, data).await {
|
||||
Ok(response) => MyResponse::upload_response(response, i18n, origin),
|
||||
Err(err) => MyResponse::bad_request("upload/upload", err, i18n, origin),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn process_post_form(
|
||||
db: &KeyDatabase,
|
||||
db: &Sqlite,
|
||||
tokens_stateless: &tokens::Service,
|
||||
rate_limiter: &RateLimiter,
|
||||
i18n: &I18n,
|
||||
data: Data<'_>,
|
||||
) -> Result<UploadResponse> {
|
||||
) -> anyhow::Result<UploadResponse> {
|
||||
// application/x-www-form-urlencoded
|
||||
let buf = data.open(UPLOAD_LIMIT).into_bytes().await?;
|
||||
|
||||
for ValueField { name, value } in Form::values(&*String::from_utf8_lossy(&buf)) {
|
||||
for ValueField { name, value } in Form::values(&String::from_utf8_lossy(&buf)) {
|
||||
let decoded_value = percent_decode(value.as_bytes())
|
||||
.decode_utf8()
|
||||
.map_err(|_| anyhow!("`Content-Type: application/x-www-form-urlencoded` not valid"))?;
|
||||
@@ -374,14 +281,14 @@ pub async fn process_post_form(
|
||||
Err(anyhow!("No keytext found"))
|
||||
}
|
||||
|
||||
async fn process_upload(
|
||||
db: &KeyDatabase,
|
||||
pub async fn process_upload(
|
||||
db: &Sqlite,
|
||||
tokens_stateless: &tokens::Service,
|
||||
rate_limiter: &RateLimiter,
|
||||
i18n: &I18n,
|
||||
data: Data<'_>,
|
||||
cont_type: &ContentType,
|
||||
) -> Result<UploadResponse> {
|
||||
) -> anyhow::Result<UploadResponse> {
|
||||
// multipart/form-data
|
||||
let (_, boundary) = cont_type
|
||||
.params()
|
||||
@@ -407,12 +314,12 @@ async fn process_upload(
|
||||
}
|
||||
|
||||
fn process_multipart(
|
||||
db: &KeyDatabase,
|
||||
db: &Sqlite,
|
||||
tokens_stateless: &tokens::Service,
|
||||
rate_limiter: &RateLimiter,
|
||||
i18n: &I18n,
|
||||
entries: Entries,
|
||||
) -> Result<UploadResponse> {
|
||||
) -> anyhow::Result<UploadResponse> {
|
||||
match entries.fields.get("keytext") {
|
||||
Some(ent) if ent.len() == 1 => {
|
||||
let reader = ent[0].data.readable()?;
|
||||
@@ -428,109 +335,3 @@ fn process_multipart(
|
||||
None => Err(anyhow!("No keytext found")),
|
||||
}
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/upload/request-verify",
|
||||
format = "application/x-www-form-urlencoded",
|
||||
data = "<request>"
|
||||
)]
|
||||
pub fn request_verify_form(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::VerifyRequest>,
|
||||
) -> MyResponse {
|
||||
let forms::VerifyRequest { token, address } = request.into_inner();
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
vec![address],
|
||||
);
|
||||
MyResponse::upload_response(result, i18n, origin)
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/upload/request-verify",
|
||||
format = "multipart/form-data",
|
||||
data = "<request>"
|
||||
)]
|
||||
pub fn request_verify_form_data(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
token_stateful: &rocket::State<StatefulTokens>,
|
||||
token_stateless: &rocket::State<tokens::Service>,
|
||||
mail_service: &rocket::State<mail::Service>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
request: Form<forms::VerifyRequest>,
|
||||
) -> MyResponse {
|
||||
let forms::VerifyRequest { token, address } = request.into_inner();
|
||||
let result = vks::request_verify(
|
||||
db,
|
||||
&origin,
|
||||
token_stateful,
|
||||
token_stateless,
|
||||
mail_service,
|
||||
rate_limiter,
|
||||
&i18n,
|
||||
token,
|
||||
vec![address],
|
||||
);
|
||||
MyResponse::upload_response(result, i18n, origin)
|
||||
}
|
||||
|
||||
#[post("/verify/<token>")]
|
||||
pub fn verify_confirm(
|
||||
db: &rocket::State<KeyDatabase>,
|
||||
origin: RequestOrigin,
|
||||
token_service: &rocket::State<StatefulTokens>,
|
||||
rate_limiter: &rocket::State<RateLimiter>,
|
||||
i18n: I18n,
|
||||
token: String,
|
||||
) -> MyResponse {
|
||||
let rate_limit_id = format!("verify-token-{}", &token);
|
||||
match vks::verify_confirm(db, &i18n, token_service, token) {
|
||||
PublishResponse::Ok { fingerprint, email } => {
|
||||
rate_limiter.action_perform(rate_limit_id);
|
||||
let userid_link = uri!(search(q = &email)).to_string();
|
||||
let context = template::Verify {
|
||||
userid: email,
|
||||
key_fpr: fingerprint,
|
||||
userid_link,
|
||||
};
|
||||
|
||||
MyResponse::ok("upload/publish-result", context, i18n, origin)
|
||||
}
|
||||
PublishResponse::Error(error) => {
|
||||
let error_msg = if rate_limiter.action_check(rate_limit_id) {
|
||||
anyhow!(error)
|
||||
} else {
|
||||
anyhow!(i18n!(
|
||||
i18n.catalog,
|
||||
"This address has already been verified."
|
||||
))
|
||||
};
|
||||
MyResponse::bad_request("400", error_msg, i18n, origin)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/verify/<token>")]
|
||||
pub fn verify_confirm_form(origin: RequestOrigin, i18n: I18n, token: String) -> MyResponse {
|
||||
MyResponse::ok(
|
||||
"upload/verification-form",
|
||||
template::VerifyForm { token },
|
||||
i18n,
|
||||
origin,
|
||||
)
|
||||
}
|
||||
|
@@ -1,18 +0,0 @@
|
||||
use crate::database::{Database, KeyDatabase};
|
||||
use crate::web::MyResponse;
|
||||
|
||||
// WKD queries
|
||||
#[get("/.well-known/openpgpkey/<domain>/hu/<wkd_hash>")]
|
||||
pub fn wkd_query(db: &rocket::State<KeyDatabase>, domain: String, wkd_hash: String) -> MyResponse {
|
||||
match db.by_domain_and_hash_wkd(&domain, &wkd_hash) {
|
||||
Some(key) => MyResponse::wkd(key, &wkd_hash),
|
||||
None => MyResponse::not_found_plain("No key found for this email address."),
|
||||
}
|
||||
}
|
||||
|
||||
// Policy requests.
|
||||
// 200 response with an empty body.
|
||||
#[get("/.well-known/openpgpkey/<_domain>/policy")]
|
||||
pub fn wkd_policy(_domain: String) -> MyResponse {
|
||||
MyResponse::plain("".to_string())
|
||||
}
|
23
tester/Cargo.toml
Normal file
23
tester/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "tester"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Breitmoser <look@my.amazin.horse>"]
|
||||
description = "Helper CLI for testing the hagrid keyserver"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
sequoia-openpgp = { workspace = true, features = ["crypto-openssl"] }
|
||||
log = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_derive = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
time = { workspace = true }
|
||||
url = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
idna = { workspace = true }
|
||||
fs2 = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "unicode"] }
|
||||
indicatif = { workspace = true }
|
59
tester/src/cli.rs
Normal file
59
tester/src/cli.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use crate::{generate, genreqs};
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "Hagrid Tester", version, about, long_about = None, help_expected = true)]
|
||||
pub(crate) struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Command,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Command {
|
||||
/// generate requests
|
||||
GenReqs {
|
||||
#[arg(long = "fingerprints-file", default_value = "fingerprints.txt")]
|
||||
/// path to read fingerprints from
|
||||
fingerprints_file: PathBuf,
|
||||
/// host to generate requests for
|
||||
host: String,
|
||||
},
|
||||
/// Generate a test set of certificates
|
||||
Generate {
|
||||
#[arg(long, default_value_t = 100_000)]
|
||||
/// number of certificates to generate
|
||||
cert_count: u64,
|
||||
#[arg(long = "output-file", default_value = "keyring.pub.pgp")]
|
||||
/// path to file to store the certificates in
|
||||
certs_output_file: PathBuf,
|
||||
#[arg(long = "fingerprints-file", default_value = "fingerprints.txt")]
|
||||
/// path to file to store fingerprints in
|
||||
fingerprints_output_file: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) fn dispatch_cmd(cli: Cli) -> anyhow::Result<()> {
|
||||
match cli.command {
|
||||
Command::GenReqs {
|
||||
fingerprints_file,
|
||||
host,
|
||||
} => genreqs::run(&host, fingerprints_file),
|
||||
Command::Generate {
|
||||
cert_count,
|
||||
certs_output_file,
|
||||
fingerprints_output_file,
|
||||
} => generate::run(cert_count, certs_output_file, fingerprints_output_file),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use clap::CommandFactory;
|
||||
|
||||
#[test]
|
||||
fn test_cli() {
|
||||
Cli::command().debug_assert()
|
||||
}
|
||||
}
|
35
tester/src/generate.rs
Normal file
35
tester/src/generate.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use std::{fs::File, io::Write, path::Path};
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use sequoia_openpgp::{cert::CertBuilder, serialize::Serialize};
|
||||
|
||||
use crate::util;
|
||||
|
||||
pub fn run(
|
||||
count: u64,
|
||||
output_path: impl AsRef<Path>,
|
||||
fprs_path: impl AsRef<Path>,
|
||||
) -> anyhow::Result<()> {
|
||||
let progress_bar = ProgressBar::new(count);
|
||||
progress_bar.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos}/{len} {msg}")
|
||||
.progress_chars("##-"),
|
||||
);
|
||||
progress_bar.set_draw_delta(count / 100);
|
||||
|
||||
let mut output = File::create(output_path)?;
|
||||
let mut output_fprs = File::create(fprs_path)?;
|
||||
|
||||
for i in 0..count {
|
||||
let (cert, _) = CertBuilder::general_purpose(None, Some(util::gen_email(i))).generate()?;
|
||||
cert.serialize(&mut output)?;
|
||||
|
||||
writeln!(output_fprs, "{}", cert)?;
|
||||
|
||||
progress_bar.inc(1);
|
||||
}
|
||||
progress_bar.finish();
|
||||
|
||||
Ok(())
|
||||
}
|
51
tester/src/genreqs.rs
Normal file
51
tester/src/genreqs.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use std::io::Write;
|
||||
use std::{fs::File, io, io::BufRead, path::Path};
|
||||
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::{Rng, thread_rng};
|
||||
|
||||
use crate::util;
|
||||
|
||||
pub fn run(host: &str, fprs_path: impl AsRef<Path>) -> anyhow::Result<()> {
|
||||
let file = File::open(fprs_path)?;
|
||||
let fingerprints: Vec<String> = io::BufReader::new(file)
|
||||
.lines()
|
||||
.map_while(io::Result::ok)
|
||||
.collect();
|
||||
|
||||
/* possible requests:
|
||||
* /vks/v1/by-fingerprint/
|
||||
* /vks/v1/by-keyid/
|
||||
* /vks/v1/by-email/
|
||||
*/
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let mut stdout = io::LineWriter::new(io::stdout());
|
||||
loop {
|
||||
let result = match rng.gen_range(0, 3) {
|
||||
0 => {
|
||||
let email = util::gen_email(rng.gen_range(0, fingerprints.len() as u64));
|
||||
stdout.write_fmt(format_args!("GET {}/vks/v1/by-email/{}\n", host, email))
|
||||
}
|
||||
1 => {
|
||||
let random_fpr = fingerprints.choose(&mut rng).unwrap();
|
||||
stdout.write_fmt(format_args!(
|
||||
"GET {}/vks/v1/by-keyid/{}\n",
|
||||
host,
|
||||
&random_fpr[24..40]
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
let random_fpr = fingerprints.choose(&mut rng).unwrap();
|
||||
stdout.write_fmt(format_args!(
|
||||
"GET {}/vks/v1/by-fingerprint/{}\n",
|
||||
host, random_fpr
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
if result.is_err() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
10
tester/src/main.rs
Normal file
10
tester/src/main.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use clap::Parser;
|
||||
|
||||
mod cli;
|
||||
mod generate;
|
||||
mod genreqs;
|
||||
mod util;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
cli::dispatch_cmd(cli::Cli::parse())
|
||||
}
|
3
tester/src/util.rs
Normal file
3
tester/src/util.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub fn gen_email(i: u64) -> String {
|
||||
format!("{:07}@hagrid.invalid", i)
|
||||
}
|
22
wkd-domain-checker/default.nix
Normal file
22
wkd-domain-checker/default.nix
Normal file
@@ -0,0 +1,22 @@
|
||||
{ lib, python3Packages }:
|
||||
|
||||
python3Packages.buildPythonApplication {
|
||||
pname = "wkd-domain-checker";
|
||||
version = "1.0";
|
||||
|
||||
propagatedBuildInputs = with python3Packages; [
|
||||
flask
|
||||
publicsuffix2
|
||||
requests
|
||||
];
|
||||
|
||||
src = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "WKD domain checker for hagrid wkd gateway";
|
||||
homepage = "https://gitlab.com/keys.openpgp.org/hagrid";
|
||||
license = with licenses; [ gpl3 ];
|
||||
maintainers = with maintainers; [ valodim ];
|
||||
platforms = platforms.all;
|
||||
};
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user