cargo: apply cargo fmt --all

This commit is contained in:
Vincent Breitmoser 2022-02-26 16:54:07 +01:00
parent 961559e154
commit b29845b893
32 changed files with 2684 additions and 1866 deletions

View File

@ -4,6 +4,5 @@ use vergen::{generate_cargo_keys, ConstantsFlags};
fn main() {
// Generate the 'cargo:' key output
generate_cargo_keys(ConstantsFlags::all())
.expect("Unable to generate the cargo keys!");
generate_cargo_keys(ConstantsFlags::all()).expect("Unable to generate the cargo keys!");
}

View File

@ -1,19 +1,21 @@
use std::collections::HashMap;
use std::convert::TryFrom;
use std::fs::{OpenOptions, File, create_dir_all, read_link, remove_file, rename, set_permissions, Permissions};
use std::fs::{
create_dir_all, read_link, remove_file, rename, set_permissions, File, OpenOptions, Permissions,
};
use std::io::Write;
use std::path::{Path, PathBuf};
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use tempfile;
use url::form_urlencoded;
use pathdiff::diff_paths;
use std::time::SystemTime;
use tempfile;
use url::form_urlencoded;
use {Database, Query};
use types::{Email, Fingerprint, KeyID};
use sync::FlockMutexGuard;
use types::{Email, Fingerprint, KeyID};
use Result;
use {Database, Query};
use wkd;
@ -51,7 +53,6 @@ fn ensure_parent(path: &Path) -> Result<&Path> {
Ok(path)
}
impl Filesystem {
pub fn new_from_base(base_dir: impl Into<PathBuf>) -> Result<Self> {
let base_dir: PathBuf = base_dir.into();
@ -164,22 +165,23 @@ impl Filesystem {
/// Returns the path to the given Email.
fn link_by_email(&self, email: &Email) -> PathBuf {
let email = form_urlencoded::byte_serialize(email.as_str().as_bytes())
.collect::<String>();
let email = form_urlencoded::byte_serialize(email.as_str().as_bytes()).collect::<String>();
self.links_dir_by_email.join(path_split(&email))
}
/// Returns the WKD path to the given Email.
fn link_wkd_by_email(&self, email: &Email) -> PathBuf {
let (encoded_local_part, domain) = wkd::encode_wkd(email.as_str()).unwrap();
let encoded_domain = form_urlencoded::byte_serialize(domain.as_bytes())
.collect::<PathBuf>();
let encoded_domain =
form_urlencoded::byte_serialize(domain.as_bytes()).collect::<PathBuf>();
[
&self.links_dir_wkd_by_email,
&encoded_domain,
&path_split(&encoded_local_part)
].iter().collect()
&path_split(&encoded_local_part),
]
.iter()
.collect()
}
/// Returns the WKD path to the given url-encoded domain and wkd-encoded local part.
@ -187,16 +189,19 @@ impl Filesystem {
[
&self.links_dir_wkd_by_email,
Path::new(&domain),
&path_split(hash)
].iter().collect()
&path_split(hash),
]
.iter()
.collect()
}
#[allow(clippy::nonminimal_bool)]
fn read_from_path(&self, path: &Path, allow_internal: bool) -> Option<String> {
use std::fs;
if !path.starts_with(&self.keys_external_dir) &&
!(allow_internal && path.starts_with(&self.keys_internal_dir)) {
if !path.starts_with(&self.keys_external_dir)
&& !(allow_internal && path.starts_with(&self.keys_internal_dir))
{
panic!("Attempted to access file outside expected dirs!");
}
@ -211,8 +216,9 @@ impl Filesystem {
fn read_from_path_bytes(&self, path: &Path, allow_internal: bool) -> Option<Vec<u8>> {
use std::fs;
if !path.starts_with(&self.keys_external_dir) &&
!(allow_internal && path.starts_with(&self.keys_internal_dir)) {
if !path.starts_with(&self.keys_external_dir)
&& !(allow_internal && path.starts_with(&self.keys_internal_dir))
{
panic!("Attempted to access file outside expected dirs!");
}
@ -286,8 +292,9 @@ impl Filesystem {
let expected = diff_paths(
&self.fingerprint_to_path_published(fpr),
link.parent().unwrap()
).unwrap();
link.parent().unwrap(),
)
.unwrap();
symlink_unlink_with_check(&link, &expected)
}
@ -297,8 +304,9 @@ impl Filesystem {
let expected = diff_paths(
&self.fingerprint_to_path_published_wkd(fpr),
link.parent().unwrap()
).unwrap();
link.parent().unwrap(),
)
.unwrap();
symlink_unlink_with_check(&link, &expected)
}
@ -317,8 +325,8 @@ impl Filesystem {
tpks: &mut HashMap<Fingerprint, Cert>,
check: impl Fn(&Path, &Cert, &Fingerprint) -> Result<()>,
) -> Result<()> {
use walkdir::WalkDir;
use std::fs;
use walkdir::WalkDir;
for entry in WalkDir::new(checks_dir) {
let entry = entry?;
@ -331,23 +339,19 @@ impl Filesystem {
// Compute the corresponding primary fingerprint just
// by looking at the paths.
let primary_fp = Filesystem::path_to_primary(path)
.ok_or_else(
|| format_err!("Malformed path: {:?}",
path.read_link().unwrap()))?;
.ok_or_else(|| format_err!("Malformed path: {:?}", path.read_link().unwrap()))?;
// Load into cache.
if ! tpks.contains_key(&primary_fp) {
if !tpks.contains_key(&primary_fp) {
tpks.insert(
primary_fp.clone(),
self.lookup(&Query::ByFingerprint(primary_fp.clone()))
?.ok_or_else(
|| format_err!("No Cert with fingerprint {:?}",
primary_fp))?);
self.lookup(&Query::ByFingerprint(primary_fp.clone()))?
.ok_or_else(|| format_err!("No Cert with fingerprint {:?}", primary_fp))?,
);
}
let tpk = tpks.get(&primary_fp)
.ok_or_else(
|| format_err!("Broken symlink {:?}: No such Key {}",
path, primary_fp))?;
let tpk = tpks.get(&primary_fp).ok_or_else(|| {
format_err!("Broken symlink {:?}: No such Key {}", path, primary_fp)
})?;
check(path, tpk, &primary_fp)?;
}
@ -433,7 +437,11 @@ impl Database for Filesystem {
Ok(())
}
fn move_tmp_to_published_wkd(&self, file: Option<Self::TempCert>, fpr: &Fingerprint) -> Result<()> {
fn move_tmp_to_published_wkd(
&self,
file: Option<Self::TempCert>,
fpr: &Fingerprint,
) -> Result<()> {
if self.dry_run {
return Ok(());
}
@ -461,24 +469,30 @@ impl Database for Filesystem {
Ok(())
}
fn check_link_fpr(&self, fpr: &Fingerprint, fpr_target: &Fingerprint) -> Result<Option<Fingerprint>> {
fn check_link_fpr(
&self,
fpr: &Fingerprint,
fpr_target: &Fingerprint,
) -> Result<Option<Fingerprint>> {
let link_keyid = self.link_by_keyid(&fpr.into());
let link_fpr = self.link_by_fingerprint(fpr);
let path_published = self.fingerprint_to_path_published(fpr_target);
if let Ok(link_fpr_target) = link_fpr.canonicalize() {
if !link_fpr_target.ends_with(&path_published) {
if !link_fpr_target.ends_with(&path_published) {
info!("Fingerprint points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_fpr_target);
return Err(anyhow!(format!("Fingerprint collision for key {}", fpr)));
}
}
}
if let Ok(link_keyid_target) = link_keyid.canonicalize() {
if !link_keyid_target.ends_with(&path_published) {
info!("KeyID points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_keyid_target);
info!(
"KeyID points to different key for {} (expected {:?} to be suffix of {:?})",
fpr, &path_published, &link_keyid_target
);
return Err(anyhow!(format!("KeyID collision for key {}", fpr)));
}
}
@ -496,7 +510,7 @@ impl Database for Filesystem {
ByFingerprint(ref fp) => self.link_by_fingerprint(fp),
ByKeyID(ref keyid) => self.link_by_keyid(keyid),
ByEmail(ref email) => self.link_by_email(email),
_ => return None
_ => return None,
};
path.read_link()
.ok()
@ -527,8 +541,11 @@ impl Database for Filesystem {
let link_fpr = self.link_by_fingerprint(from);
let link_keyid = self.link_by_keyid(&from.into());
let target = diff_paths(&self.fingerprint_to_path_published(primary_fpr),
link_fpr.parent().unwrap()).unwrap();
let target = diff_paths(
&self.fingerprint_to_path_published(primary_fpr),
link_fpr.parent().unwrap(),
)
.unwrap();
symlink(&target, ensure_parent(&link_fpr)?)?;
symlink(&target, ensure_parent(&link_keyid)?)
@ -537,10 +554,13 @@ impl Database for Filesystem {
fn unlink_fpr(&self, from: &Fingerprint, primary_fpr: &Fingerprint) -> Result<()> {
let link_fpr = self.link_by_fingerprint(from);
let link_keyid = self.link_by_keyid(&from.into());
let expected = diff_paths(&self.fingerprint_to_path_published(primary_fpr),
link_fpr.parent().unwrap()).unwrap();
let expected = diff_paths(
&self.fingerprint_to_path_published(primary_fpr),
link_fpr.parent().unwrap(),
)
.unwrap();
if let Ok(target) = read_link(&link_fpr) {
if let Ok(target) = read_link(&link_fpr) {
if target == expected {
remove_file(&link_fpr)?;
}
@ -604,7 +624,9 @@ impl Database for Filesystem {
// A cache of all Certs, for quick lookups.
let mut tpks = HashMap::new();
self.perform_checks(&self.keys_dir_published, &mut tpks,
self.perform_checks(
&self.keys_dir_published,
&mut tpks,
|path, _, primary_fp| {
// The KeyID corresponding with this path.
let fp = Filesystem::path_to_fingerprint(path)
@ -614,131 +636,141 @@ impl Database for Filesystem {
return Err(format_err!(
"{:?} points to the wrong Cert, expected {} \
but found {}",
path, fp, primary_fp));
path,
fp,
primary_fp
));
}
Ok(())
}
},
)?;
self.perform_checks(&self.keys_dir_published, &mut tpks,
|_, tpk, primary_fp| {
// check that certificate exists in published wkd path
let path_wkd = self.fingerprint_to_path_published_wkd(primary_fp);
let should_wkd_exist = tpk.userids().next().is_some();
self.perform_checks(&self.keys_dir_published, &mut tpks, |_, tpk, primary_fp| {
// check that certificate exists in published wkd path
let path_wkd = self.fingerprint_to_path_published_wkd(primary_fp);
let should_wkd_exist = tpk.userids().next().is_some();
if should_wkd_exist && !path_wkd.exists() {
return Err(format_err!("Missing wkd for fp {}", primary_fp));
};
if !should_wkd_exist && path_wkd.exists() {
return Err(format_err!("Incorrectly present wkd for fp {}", primary_fp));
};
Ok(())
}
)?;
if should_wkd_exist && !path_wkd.exists() {
return Err(format_err!("Missing wkd for fp {}", primary_fp));
};
if !should_wkd_exist && path_wkd.exists() {
return Err(format_err!("Incorrectly present wkd for fp {}", primary_fp));
};
Ok(())
})?;
// check that all subkeys are linked
self.perform_checks(&self.keys_dir_published, &mut tpks,
|_, tpk, primary_fp| {
let policy = &POLICY;
let fingerprints = tpk
.keys()
.with_policy(policy, None)
.for_certification()
.for_signing()
.map(|amalgamation| amalgamation.key().fingerprint())
.map(Fingerprint::try_from)
.flatten();
self.perform_checks(&self.keys_dir_published, &mut tpks, |_, tpk, primary_fp| {
let policy = &POLICY;
let fingerprints = tpk
.keys()
.with_policy(policy, None)
.for_certification()
.for_signing()
.map(|amalgamation| amalgamation.key().fingerprint())
.map(Fingerprint::try_from)
.flatten();
for fpr in fingerprints {
if let Some(missing_fpr) = self.check_link_fpr(&fpr, primary_fp)? {
return Err(format_err!(
"Missing link to key {} for sub {}", primary_fp, missing_fpr));
}
for fpr in fingerprints {
if let Some(missing_fpr) = self.check_link_fpr(&fpr, primary_fp)? {
return Err(format_err!(
"Missing link to key {} for sub {}",
primary_fp,
missing_fpr
));
}
Ok(())
}
)?;
Ok(())
})?;
// check that all published uids are linked
self.perform_checks(&self.keys_dir_published, &mut tpks,
|_, tpk, primary_fp| {
let emails = tpk
.userids()
.map(|binding| binding.userid().clone())
.map(|userid| Email::try_from(&userid).unwrap());
self.perform_checks(&self.keys_dir_published, &mut tpks, |_, tpk, primary_fp| {
let emails = tpk
.userids()
.map(|binding| binding.userid().clone())
.map(|userid| Email::try_from(&userid).unwrap());
for email in emails {
let email_path = self.link_by_email(&email);
if !email_path.exists() {
return Err(format_err!(
"Missing link to key {} for email {}", primary_fp, email));
}
let email_wkd_path = self.link_wkd_by_email(&email);
if !email_wkd_path.exists() {
return Err(format_err!(
"Missing wkd link to key {} for email {}", primary_fp, email));
}
}
Ok(())
}
)?;
self.perform_checks(&self.links_dir_by_fingerprint, &mut tpks,
|path, tpk, _| {
// The KeyID corresponding with this path.
let id = Filesystem::path_to_keyid(path)
.ok_or_else(|| format_err!("Malformed path: {:?}", path))?;
let found = tpk.keys()
.map(|amalgamation| KeyID::try_from(amalgamation.key().fingerprint()).unwrap())
.any(|key_fp| key_fp == id);
if ! found {
for email in emails {
let email_path = self.link_by_email(&email);
if !email_path.exists() {
return Err(format_err!(
"{:?} points to the wrong Cert, the Cert does not \
contain the (sub)key {}", path, id));
"Missing link to key {} for email {}",
primary_fp,
email
));
}
Ok(())
}
)?;
self.perform_checks(&self.links_dir_by_keyid, &mut tpks,
|path, tpk, _| {
// The KeyID corresponding with this path.
let id = Filesystem::path_to_keyid(path)
.ok_or_else(|| format_err!("Malformed path: {:?}", path))?;
let found = tpk.keys()
.map(|amalgamation| KeyID::try_from(amalgamation.key().fingerprint()).unwrap())
.any(|key_fp| key_fp == id);
if ! found {
let email_wkd_path = self.link_wkd_by_email(&email);
if !email_wkd_path.exists() {
return Err(format_err!(
"{:?} points to the wrong Cert, the Cert does not \
contain the (sub)key {}", path, id));
"Missing wkd link to key {} for email {}",
primary_fp,
email
));
}
Ok(())
}
)?;
Ok(())
})?;
self.perform_checks(&self.links_dir_by_email, &mut tpks,
|path, tpk, _| {
// The Email corresponding with this path.
let email = Filesystem::path_to_email(path)
.ok_or_else(|| format_err!("Malformed path: {:?}", path))?;
let mut found = false;
for uidb in tpk.userids() {
if Email::try_from(uidb.userid()).unwrap() == email
{
found = true;
break;
}
self.perform_checks(&self.links_dir_by_fingerprint, &mut tpks, |path, tpk, _| {
// The KeyID corresponding with this path.
let id = Filesystem::path_to_keyid(path)
.ok_or_else(|| format_err!("Malformed path: {:?}", path))?;
let found = tpk
.keys()
.map(|amalgamation| KeyID::try_from(amalgamation.key().fingerprint()).unwrap())
.any(|key_fp| key_fp == id);
if !found {
return Err(format_err!(
"{:?} points to the wrong Cert, the Cert does not \
contain the (sub)key {}",
path,
id
));
}
Ok(())
})?;
self.perform_checks(&self.links_dir_by_keyid, &mut tpks, |path, tpk, _| {
// The KeyID corresponding with this path.
let id = Filesystem::path_to_keyid(path)
.ok_or_else(|| format_err!("Malformed path: {:?}", path))?;
let found = tpk
.keys()
.map(|amalgamation| KeyID::try_from(amalgamation.key().fingerprint()).unwrap())
.any(|key_fp| key_fp == id);
if !found {
return Err(format_err!(
"{:?} points to the wrong Cert, the Cert does not \
contain the (sub)key {}",
path,
id
));
}
Ok(())
})?;
self.perform_checks(&self.links_dir_by_email, &mut tpks, |path, tpk, _| {
// The Email corresponding with this path.
let email = Filesystem::path_to_email(path)
.ok_or_else(|| format_err!("Malformed path: {:?}", path))?;
let mut found = false;
for uidb in tpk.userids() {
if Email::try_from(uidb.userid()).unwrap() == email {
found = true;
break;
}
if ! found {
return Err(format_err!(
"{:?} points to the wrong Cert, the Cert does not \
contain the email {}", path, email));
}
Ok(())
}
if !found {
return Err(format_err!(
"{:?} points to the wrong Cert, the Cert does not \
contain the email {}",
path,
email
));
}
Ok(())
})?;
Ok(())
@ -754,7 +786,13 @@ fn path_split(path: &str) -> PathBuf {
}
fn path_merge(path: &Path) -> String {
let comps = path.iter().rev().take(3).collect::<Vec<_>>().into_iter().rev();
let comps = path
.iter()
.rev()
.take(3)
.collect::<Vec<_>>()
.into_iter()
.rev();
let comps: Vec<_> = comps.map(|os| os.to_string_lossy()).collect();
comps.join("")
}
@ -762,9 +800,9 @@ fn path_merge(path: &Path) -> String {
#[cfg(test)]
mod tests {
use super::*;
use test;
use openpgp::cert::CertBuilder;
use tempfile::TempDir;
use test;
#[test]
fn init() {
@ -783,18 +821,48 @@ mod tests {
#[test]
fn new() {
let (_tmp_dir, db, _log_path) = open_db();
let k1 = CertBuilder::new().add_userid("a@invalid.example.org")
.generate().unwrap().0;
let k2 = CertBuilder::new().add_userid("b@invalid.example.org")
.generate().unwrap().0;
let k3 = CertBuilder::new().add_userid("c@invalid.example.org")
.generate().unwrap().0;
let k1 = CertBuilder::new()
.add_userid("a@invalid.example.org")
.generate()
.unwrap()
.0;
let k2 = CertBuilder::new()
.add_userid("b@invalid.example.org")
.generate()
.unwrap()
.0;
let k3 = CertBuilder::new()
.add_userid("c@invalid.example.org")
.generate()
.unwrap()
.0;
assert!(db.merge(k1).unwrap().into_tpk_status().email_status.len() > 0);
assert!(db.merge(k2.clone()).unwrap().into_tpk_status().email_status.len() > 0);
assert!(
db.merge(k2.clone())
.unwrap()
.into_tpk_status()
.email_status
.len()
> 0
);
assert!(!db.merge(k2).unwrap().into_tpk_status().email_status.len() > 0);
assert!(db.merge(k3.clone()).unwrap().into_tpk_status().email_status.len() > 0);
assert!(!db.merge(k3.clone()).unwrap().into_tpk_status().email_status.len() > 0);
assert!(
db.merge(k3.clone())
.unwrap()
.into_tpk_status()
.email_status
.len()
> 0
);
assert!(
!db.merge(k3.clone())
.unwrap()
.into_tpk_status()
.email_status
.len()
> 0
);
assert!(!db.merge(k3).unwrap().into_tpk_status().email_status.len() > 0);
}
@ -915,11 +983,12 @@ mod tests {
let tmpdir = TempDir::new().unwrap();
let db = Filesystem::new_from_base(tmpdir.path()).unwrap();
let fp: Fingerprint =
"CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
let fp: Fingerprint = "CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();
assert_eq!(Filesystem::path_to_fingerprint(&db.link_by_fingerprint(&fp)),
Some(fp.clone()));
assert_eq!(
Filesystem::path_to_fingerprint(&db.link_by_fingerprint(&fp)),
Some(fp.clone())
);
db.check_consistency().expect("inconsistent database");
}

View File

@ -14,6 +14,8 @@ extern crate fs2;
extern crate idna;
#[macro_use]
extern crate log;
extern crate chrono;
extern crate hex;
extern crate pathdiff;
extern crate rand;
extern crate serde;
@ -21,24 +23,17 @@ extern crate serde_json;
extern crate tempfile;
extern crate time;
extern crate url;
extern crate hex;
extern crate walkdir;
extern crate chrono;
extern crate zbase32;
extern crate sequoia_openpgp as openpgp;
use openpgp::{
Cert,
packet::UserID,
parse::Parse,
types::KeyFlags,
};
use openpgp::{packet::UserID, parse::Parse, types::KeyFlags, Cert};
pub mod types;
use types::{Email, Fingerprint, KeyID};
pub mod wkd;
pub mod sync;
pub mod wkd;
mod fs;
pub use self::fs::Filesystem as KeyDatabase;
@ -47,7 +42,7 @@ mod stateful_tokens;
pub use stateful_tokens::StatefulTokens;
mod openpgp_utils;
use openpgp_utils::{tpk_filter_alive_emails, tpk_to_string, tpk_clean, is_status_revoked, POLICY};
use openpgp_utils::{is_status_revoked, tpk_clean, tpk_filter_alive_emails, tpk_to_string, POLICY};
#[cfg(test)]
mod test;
@ -74,8 +69,8 @@ impl FromStr for Query {
fn from_str(term: &str) -> Result<Self> {
use self::Query::*;
let looks_like_short_key_id = !term.contains('@') &&
(term.starts_with("0x") && term.len() < 16 || term.len() == 8);
let looks_like_short_key_id =
!term.contains('@') && (term.starts_with("0x") && term.len() < 16 || term.len() == 8);
if looks_like_short_key_id {
Ok(InvalidShort())
} else if let Ok(fp) = Fingerprint::from_str(term) {
@ -90,7 +85,7 @@ impl FromStr for Query {
}
}
#[derive(Debug,PartialEq,Eq,PartialOrd,Ord)]
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum EmailAddressStatus {
Published,
NotPublished,
@ -113,10 +108,10 @@ impl ImportResult {
}
}
#[derive(Debug,PartialEq)]
#[derive(Debug, PartialEq)]
pub struct TpkStatus {
pub is_revoked: bool,
pub email_status: Vec<(Email,EmailAddressStatus)>,
pub email_status: Vec<(Email, EmailAddressStatus)>,
pub unparsed_uids: usize,
}
@ -151,7 +146,11 @@ pub trait Database: Sync + Send {
fn by_email_wkd(&self, email: &Email) -> Option<Vec<u8>>;
fn by_domain_and_hash_wkd(&self, domain: &str, hash: &str) -> Option<Vec<u8>>;
fn check_link_fpr(&self, fpr: &Fingerprint, target: &Fingerprint) -> Result<Option<Fingerprint>>;
fn check_link_fpr(
&self,
fpr: &Fingerprint,
target: &Fingerprint,
) -> Result<Option<Fingerprint>>;
fn by_fpr_full(&self, fpr: &Fingerprint) -> Option<String>;
fn by_primary_fpr(&self, fpr: &Fingerprint) -> Option<String>;
@ -159,7 +158,11 @@ pub trait Database: Sync + Send {
fn write_to_temp(&self, content: &[u8]) -> Result<Self::TempCert>;
fn move_tmp_to_full(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
fn move_tmp_to_published(&self, content: Self::TempCert, fpr: &Fingerprint) -> Result<()>;
fn move_tmp_to_published_wkd(&self, content: Option<Self::TempCert>, fpr: &Fingerprint) -> Result<()>;
fn move_tmp_to_published_wkd(
&self,
content: Option<Self::TempCert>,
fpr: &Fingerprint,
) -> Result<()>;
fn write_to_quarantine(&self, fpr: &Fingerprint, content: &[u8]) -> Result<()>;
fn write_log_append(&self, filename: &str, fpr_primary: &Fingerprint) -> Result<()>;
@ -204,7 +207,8 @@ pub trait Database: Sync + Send {
.map(|binding| binding.userid().clone())
.collect();
let full_tpk_old = self.by_fpr_full(&fpr_primary)
let full_tpk_old = self
.by_fpr_full(&fpr_primary)
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok());
let is_update = full_tpk_old.is_some();
let (full_tpk_new, full_tpk_unchanged) = if let Some(full_tpk_old) = full_tpk_old {
@ -217,9 +221,9 @@ pub trait Database: Sync + Send {
let is_revoked = is_status_revoked(full_tpk_new.revocation_status(&POLICY, None));
let is_ok = is_revoked ||
full_tpk_new.keys().subkeys().next().is_some() ||
full_tpk_new.userids().next().is_some();
let is_ok = is_revoked
|| full_tpk_new.keys().subkeys().next().is_some()
|| full_tpk_new.userids().next().is_some();
if !is_ok {
// self.write_to_quarantine(&fpr_primary, &tpk_to_string(&full_tpk_new)?)?;
return Err(anyhow!("Not a well-formed key!"));
@ -228,7 +232,10 @@ pub trait Database: Sync + Send {
let published_tpk_old = self
.by_fpr(&fpr_primary)
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok());
let published_emails = published_tpk_old.as_ref().map(tpk_get_emails).unwrap_or_default();
let published_emails = published_tpk_old
.as_ref()
.map(tpk_get_emails)
.unwrap_or_default();
let unparsed_uids = full_tpk_new
.userids()
@ -246,7 +253,9 @@ pub trait Database: Sync + Send {
}
})
.flatten()
.filter(|(binding, email)| known_uids.contains(binding.userid()) || published_emails.contains(email))
.filter(|(binding, email)| {
known_uids.contains(binding.userid()) || published_emails.contains(email)
})
.flat_map(|(binding, email)| {
if is_status_revoked(binding.revocation_status(&POLICY, None)) {
Some((email, EmailAddressStatus::Revoked))
@ -264,7 +273,11 @@ pub trait Database: Sync + Send {
// Abort if no changes were made
if full_tpk_unchanged {
return Ok(ImportResult::Unchanged(TpkStatus { is_revoked, email_status, unparsed_uids }));
return Ok(ImportResult::Unchanged(TpkStatus {
is_revoked,
email_status,
unparsed_uids,
}));
}
let published_tpk_new = if is_revoked {
@ -284,7 +297,8 @@ pub trait Database: Sync + Send {
.flatten()
.any(|unrevoked_email| &unrevoked_email == *email);
!has_unrevoked_userid
}).collect();
})
.collect();
let fingerprints = tpk_get_linkable_fprs(&published_tpk_new);
@ -321,22 +335,31 @@ pub trait Database: Sync + Send {
for fpr in fpr_not_linked {
if let Err(e) = self.link_fpr(&fpr, &fpr_primary) {
info!("Error ensuring symlink! {} {} {:?}",
&fpr, &fpr_primary, e);
info!("Error ensuring symlink! {} {} {:?}", &fpr, &fpr_primary, e);
}
}
for revoked_email in newly_revoked_emails {
if let Err(e) = self.unlink_email(revoked_email, &fpr_primary) {
info!("Error ensuring symlink! {} {} {:?}",
&fpr_primary, &revoked_email, e);
info!(
"Error ensuring symlink! {} {} {:?}",
&fpr_primary, &revoked_email, e
);
}
}
if is_update {
Ok(ImportResult::Updated(TpkStatus { is_revoked, email_status, unparsed_uids }))
Ok(ImportResult::Updated(TpkStatus {
is_revoked,
email_status,
unparsed_uids,
}))
} else {
Ok(ImportResult::New(TpkStatus { is_revoked, email_status, unparsed_uids }))
Ok(ImportResult::New(TpkStatus {
is_revoked,
email_status,
unparsed_uids,
}))
}
}
@ -352,8 +375,13 @@ pub trait Database: Sync + Send {
Utc::now().format("%Y-%m-%d").to_string()
}
fn get_tpk_status(&self, fpr_primary: &Fingerprint, known_addresses: &[Email]) -> Result<TpkStatus> {
let tpk_full = self.by_fpr_full(fpr_primary)
fn get_tpk_status(
&self,
fpr_primary: &Fingerprint,
known_addresses: &[Email],
) -> Result<TpkStatus> {
let tpk_full = self
.by_fpr_full(fpr_primary)
.ok_or_else(|| anyhow!("Key not in database!"))
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()))?;
@ -368,10 +396,12 @@ pub trait Database: Sync + Send {
let published_uids: Vec<UserID> = self
.by_fpr(fpr_primary)
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
.map(|tpk| tpk.userids()
.map(|binding| binding.userid().clone())
.collect()
).unwrap_or_default();
.map(|tpk| {
tpk.userids()
.map(|binding| binding.userid().clone())
.collect()
})
.unwrap_or_default();
let mut email_status: Vec<_> = tpk_full
.userids()
@ -397,7 +427,11 @@ pub trait Database: Sync + Send {
// the same address, we keep the first.
email_status.dedup_by(|(e1, _), (e2, _)| e1 == e2);
Ok(TpkStatus { is_revoked, email_status, unparsed_uids })
Ok(TpkStatus {
is_revoked,
email_status,
unparsed_uids,
})
}
/// Complex operation that publishes some user id for a Cert already in the database.
@ -418,18 +452,22 @@ pub trait Database: Sync + Send {
self.nolock_unlink_email_if_other(fpr_primary, email_new)?;
let full_tpk = self.by_fpr_full(fpr_primary)
let full_tpk = self
.by_fpr_full(fpr_primary)
.ok_or_else(|| anyhow!("Key not in database!"))
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()))?;
let published_uids_old: Vec<UserID> = self
.by_fpr(fpr_primary)
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
.map(|tpk| tpk.userids()
.map(|binding| binding.userid().clone())
.collect()
).unwrap_or_default();
let published_emails_old: Vec<Email> = published_uids_old.iter()
.map(|tpk| {
tpk.userids()
.map(|binding| binding.userid().clone())
.collect()
})
.unwrap_or_default();
let published_emails_old: Vec<Email> = published_uids_old
.iter()
.map(|uid| Email::try_from(uid).ok())
.flatten()
.collect();
@ -449,8 +487,9 @@ pub trait Database: Sync + Send {
.userids()
.map(|binding| Email::try_from(binding.userid()))
.flatten()
.any(|email| email == *email_new) {
return Err(anyhow!("Requested UserID not found!"));
.any(|email| email == *email_new)
{
return Err(anyhow!("Requested UserID not found!"));
}
let published_tpk_clean = tpk_clean(&published_tpk_new)?;
@ -462,8 +501,10 @@ pub trait Database: Sync + Send {
self.update_write_log(fpr_primary);
if let Err(e) = self.link_email(email_new, fpr_primary) {
info!("Error ensuring email symlink! {} -> {} {:?}",
&email_new, &fpr_primary, e);
info!(
"Error ensuring email symlink! {} -> {} {:?}",
&email_new, &fpr_primary, e
);
}
Ok(())
@ -474,13 +515,15 @@ pub trait Database: Sync + Send {
fpr_primary: &Fingerprint,
unlink_email: &Email,
) -> Result<()> {
let current_link_fpr = self.lookup_primary_fingerprint(
&Query::ByEmail(unlink_email.clone()));
let current_link_fpr =
self.lookup_primary_fingerprint(&Query::ByEmail(unlink_email.clone()));
if let Some(current_fpr) = current_link_fpr {
if current_fpr != *fpr_primary {
self.nolock_set_email_unpublished_filter(&current_fpr,
|uid| Email::try_from(uid).map(|email| email != *unlink_email)
.unwrap_or(false))?;
self.nolock_set_email_unpublished_filter(&current_fpr, |uid| {
Email::try_from(uid)
.map(|email| email != *unlink_email)
.unwrap_or(false)
})?;
}
}
Ok(())
@ -513,7 +556,8 @@ pub trait Database: Sync + Send {
fpr_primary: &Fingerprint,
email_remove: impl Fn(&UserID) -> bool,
) -> Result<()> {
let published_tpk_old = self.by_fpr(fpr_primary)
let published_tpk_old = self
.by_fpr(fpr_primary)
.ok_or_else(|| anyhow!("Key not in database!"))
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()))?;
@ -523,8 +567,7 @@ pub trait Database: Sync + Send {
.flatten()
.collect();
let published_tpk_new = published_tpk_old.retain_userids(
|uid| email_remove(uid.userid()));
let published_tpk_new = published_tpk_old.retain_userids(|uid| email_remove(uid.userid()));
let published_emails_new: Vec<Email> = published_tpk_new
.userids()
@ -546,37 +589,31 @@ pub trait Database: Sync + Send {
for unpublished_email in unpublished_emails {
if let Err(e) = self.unlink_email(unpublished_email, fpr_primary) {
info!("Error deleting email symlink! {} -> {} {:?}",
&unpublished_email, &fpr_primary, e);
info!(
"Error deleting email symlink! {} -> {} {:?}",
&unpublished_email, &fpr_primary, e
);
}
}
Ok(())
}
fn set_email_unpublished(
&self,
fpr_primary: &Fingerprint,
email_remove: &Email,
) -> Result<()> {
self.set_email_unpublished_filter(fpr_primary, |uid|
fn set_email_unpublished(&self, fpr_primary: &Fingerprint, email_remove: &Email) -> Result<()> {
self.set_email_unpublished_filter(fpr_primary, |uid| {
Email::try_from(uid)
.map(|email| email != *email_remove)
.unwrap_or(false))
.unwrap_or(false)
})
}
fn set_email_unpublished_all(
&self,
fpr_primary: &Fingerprint,
) -> Result<()> {
fn set_email_unpublished_all(&self, fpr_primary: &Fingerprint) -> Result<()> {
self.set_email_unpublished_filter(fpr_primary, |_| false)
}
fn regenerate_links(
&self,
fpr_primary: &Fingerprint,
) -> Result<RegenerateResult> {
let tpk = self.by_primary_fpr(fpr_primary)
fn regenerate_links(&self, fpr_primary: &Fingerprint) -> Result<RegenerateResult> {
let tpk = self
.by_primary_fpr(fpr_primary)
.and_then(|bytes| Cert::from_bytes(bytes.as_bytes()).ok())
.ok_or_else(|| anyhow!("Key not in database!"))?;
@ -619,11 +656,7 @@ pub trait Database: Sync + Send {
}
}
fn regenerate_wkd(
&self,
fpr_primary: &Fingerprint,
published_tpk: &Cert
) -> Result<()> {
fn regenerate_wkd(&self, fpr_primary: &Fingerprint, published_tpk: &Cert) -> Result<()> {
let published_wkd_tpk_tmp = if published_tpk.userids().next().is_some() {
Some(self.write_to_temp(&published_tpk.export_to_vec()?)?)
} else {
@ -636,30 +669,33 @@ pub trait Database: Sync + Send {
}
fn tpk_get_emails(cert: &Cert) -> Vec<Email> {
cert
.userids()
cert.userids()
.map(|binding| Email::try_from(binding.userid()))
.flatten()
.collect()
}
pub fn tpk_get_linkable_fprs(tpk: &Cert) -> Vec<Fingerprint> {
let signing_capable = &KeyFlags::empty()
.set_signing()
.set_certification();
let signing_capable = &KeyFlags::empty().set_signing().set_certification();
let fpr_primary = &Fingerprint::try_from(tpk.fingerprint()).unwrap();
tpk
.keys()
.into_iter()
.flat_map(|bundle| {
Fingerprint::try_from(bundle.key().fingerprint())
.map(|fpr| (fpr, bundle.binding_signature(&POLICY, None).ok().and_then(|sig| sig.key_flags())))
tpk.keys()
.into_iter()
.flat_map(|bundle| {
Fingerprint::try_from(bundle.key().fingerprint()).map(|fpr| {
(
fpr,
bundle
.binding_signature(&POLICY, None)
.ok()
.and_then(|sig| sig.key_flags()),
)
})
.filter(|(fpr, flags)| {
fpr == fpr_primary ||
flags.is_none() ||
!(signing_capable & flags.as_ref().unwrap()).is_empty()
})
.map(|(fpr,_)| fpr)
.collect()
})
.filter(|(fpr, flags)| {
fpr == fpr_primary
|| flags.is_none()
|| !(signing_capable & flags.as_ref().unwrap()).is_empty()
})
.map(|(fpr, _)| fpr)
.collect()
}

View File

@ -2,11 +2,8 @@ use openpgp::Result;
use std::convert::TryFrom;
use openpgp::{
Cert,
types::RevocationStatus,
cert::prelude::*,
serialize::SerializeInto as _,
policy::StandardPolicy,
cert::prelude::*, policy::StandardPolicy, serialize::SerializeInto as _,
types::RevocationStatus, Cert,
};
use Email;
@ -33,24 +30,42 @@ pub fn tpk_clean(tpk: &Cert) -> Result<Cert> {
// The primary key and related signatures.
let pk_bundle = tpk.primary_key().bundle();
acc.push(pk_bundle.key().clone().into());
for s in pk_bundle.self_signatures() { acc.push(s.clone().into()) }
for s in pk_bundle.self_revocations() { acc.push(s.clone().into()) }
for s in pk_bundle.other_revocations() { acc.push(s.clone().into()) }
for s in pk_bundle.self_signatures() {
acc.push(s.clone().into())
}
for s in pk_bundle.self_revocations() {
acc.push(s.clone().into())
}
for s in pk_bundle.other_revocations() {
acc.push(s.clone().into())
}
// The subkeys and related signatures.
for skb in tpk.keys().subkeys() {
acc.push(skb.key().clone().into());
for s in skb.self_signatures() { acc.push(s.clone().into()) }
for s in skb.self_revocations() { acc.push(s.clone().into()) }
for s in skb.other_revocations() { acc.push(s.clone().into()) }
for s in skb.self_signatures() {
acc.push(s.clone().into())
}
for s in skb.self_revocations() {
acc.push(s.clone().into())
}
for s in skb.other_revocations() {
acc.push(s.clone().into())
}
}
// The UserIDs.
for uidb in tpk.userids() {
acc.push(uidb.userid().clone().into());
for s in uidb.self_signatures() { acc.push(s.clone().into()) }
for s in uidb.self_revocations() { acc.push(s.clone().into()) }
for s in uidb.other_revocations() { acc.push(s.clone().into()) }
for s in uidb.self_signatures() {
acc.push(s.clone().into())
}
for s in uidb.self_revocations() {
acc.push(s.clone().into())
}
for s in uidb.other_revocations() {
acc.push(s.clone().into())
}
// Reasoning about the currently attested certifications
// requires a policy.

View File

@ -1,6 +1,6 @@
use std::io::{Read,Write};
use std::path::PathBuf;
use std::fs::{create_dir_all, remove_file, File};
use std::io::{Read, Write};
use std::path::PathBuf;
use std::str;

File diff suppressed because it is too large Load Diff

View File

@ -3,10 +3,10 @@ use std::fmt;
use std::result;
use std::str::FromStr;
use anyhow::Error;
use openpgp::packet::UserID;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use anyhow::Error;
use {Result};
use Result;
/// Holds a normalized email address.
///
@ -86,8 +86,7 @@ impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
fn try_from(fpr: sequoia_openpgp::Fingerprint) -> Result<Self> {
match fpr {
sequoia_openpgp::Fingerprint::V4(a) => Ok(Fingerprint(a)),
sequoia_openpgp::Fingerprint::Invalid(_) =>
Err(anyhow!("invalid fingerprint")),
sequoia_openpgp::Fingerprint::Invalid(_) => Err(anyhow!("invalid fingerprint")),
_ => Err(anyhow!("unknown fingerprint type")),
}
}
@ -95,7 +94,7 @@ impl TryFrom<sequoia_openpgp::Fingerprint> for Fingerprint {
impl fmt::Display for Fingerprint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use ::hex::ToHex;
use hex::ToHex;
self.0.write_hex_upper(f)
}
}
@ -116,8 +115,7 @@ impl<'de> Deserialize<'de> for Fingerprint {
{
use serde::de::Error;
String::deserialize(deserializer).and_then(|string| {
Self::from_str(&string)
.map_err(|err| Error::custom(err.to_string()))
Self::from_str(&string).map_err(|err| Error::custom(err.to_string()))
})
}
}
@ -128,8 +126,9 @@ impl FromStr for Fingerprint {
fn from_str(s: &str) -> Result<Fingerprint> {
match sequoia_openpgp::Fingerprint::from_hex(s)? {
sequoia_openpgp::Fingerprint::V4(a) => Ok(Fingerprint(a)),
sequoia_openpgp::Fingerprint::Invalid(_) =>
Err(anyhow!("'{}' is not a valid fingerprint", s)),
sequoia_openpgp::Fingerprint::Invalid(_) => {
Err(anyhow!("'{}' is not a valid fingerprint", s))
}
_ => Err(anyhow!("unknown fingerprint type")),
}
}
@ -144,9 +143,7 @@ impl TryFrom<sequoia_openpgp::Fingerprint> for KeyID {
fn try_from(fpr: sequoia_openpgp::Fingerprint) -> Result<Self> {
match fpr {
sequoia_openpgp::Fingerprint::V4(a) => Ok(Fingerprint(a).into()),
sequoia_openpgp::Fingerprint::Invalid(_) => {
Err(anyhow!("invalid fingerprint"))
},
sequoia_openpgp::Fingerprint::Invalid(_) => Err(anyhow!("invalid fingerprint")),
_ => Err(anyhow!("unknown fingerprint type")),
}
}
@ -172,7 +169,7 @@ impl From<Fingerprint> for KeyID {
impl fmt::Display for KeyID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use ::hex::ToHex;
use hex::ToHex;
self.0.write_hex_upper(f)
}
}
@ -183,8 +180,9 @@ impl FromStr for KeyID {
fn from_str(s: &str) -> Result<KeyID> {
match sequoia_openpgp::KeyID::from_hex(s)? {
sequoia_openpgp::KeyID::V4(a) => Ok(KeyID(a)),
sequoia_openpgp::KeyID::Invalid(_) =>
Err(anyhow!("'{}' is not a valid long key ID", s)),
sequoia_openpgp::KeyID::Invalid(_) => {
Err(anyhow!("'{}' is not a valid long key ID", s))
}
_ => Err(anyhow!("unknown keyid type")),
}
}
@ -203,10 +201,11 @@ mod tests {
assert_eq!(c("Foo Bar <foo@example.org>").as_str(), "foo@example.org");
// FIXME gotta fix this
// assert_eq!(c("foo@example.org <foo@example.org>").as_str(), "foo@example.org");
assert_eq!(c("\"Foo Bar\" <foo@example.org>").as_str(),
"foo@example.org");
assert_eq!(c("foo@👍.example.org").as_str(),
"foo@xn--yp8h.example.org");
assert_eq!(
c("\"Foo Bar\" <foo@example.org>").as_str(),
"foo@example.org"
);
assert_eq!(c("foo@👍.example.org").as_str(), "foo@xn--yp8h.example.org");
assert_eq!(c("Foo@example.org").as_str(), "foo@example.org");
assert_eq!(c("foo@EXAMPLE.ORG").as_str(), "foo@example.org");
}

View File

@ -1,11 +1,11 @@
use super::Result;
use crate::openpgp::types::HashAlgorithm;
use zbase32;
use super::Result;
// cannibalized from
// https://gitlab.com/sequoia-pgp/sequoia/blob/master/net/src/wkd.rs
pub fn encode_wkd(address: impl AsRef<str>) -> Result<(String,String)> {
pub fn encode_wkd(address: impl AsRef<str>) -> Result<(String, String)> {
let (local_part, domain) = split_address(address)?;
let local_part_encoded = encode_local_part(local_part);
@ -13,7 +13,7 @@ pub fn encode_wkd(address: impl AsRef<str>) -> Result<(String,String)> {
Ok((local_part_encoded, domain))
}
fn split_address(email_address: impl AsRef<str>) -> Result<(String,String)> {
fn split_address(email_address: impl AsRef<str>) -> Result<(String, String)> {
let email_address = email_address.as_ref();
let v: Vec<&str> = email_address.split('@').collect();
if v.len() != 2 {

View File

@ -1,22 +1,22 @@
use std::path::{Path,PathBuf};
use std::cmp::min;
use std::fs::File;
use std::io::Read;
use std::thread;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::thread;
use anyhow::Result;
extern crate tempfile;
extern crate sequoia_openpgp as openpgp;
use openpgp::Packet;
use openpgp::parse::{PacketParser, PacketParserResult, Parse};
use openpgp::Packet;
extern crate hagrid_database as database;
use database::{Database, KeyDatabase, ImportResult};
use database::{Database, ImportResult, KeyDatabase};
use indicatif::{MultiProgress,ProgressBar,ProgressStyle};
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use HagridConfig;
@ -38,8 +38,7 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
let config = config.clone();
let multi_progress = multi_progress.clone();
thread::spawn(move || {
import_from_files(
&config, dry_run, input_file_chunk, multi_progress).unwrap();
import_from_files(&config, dry_run, input_file_chunk, multi_progress).unwrap();
})
})
.collect();
@ -53,15 +52,12 @@ pub fn do_import(config: &HagridConfig, dry_run: bool, input_files: Vec<PathBuf>
Ok(())
}
fn setup_chunks(
mut input_files: Vec<PathBuf>,
num_threads: usize,
) -> Vec<Vec<PathBuf>> {
fn setup_chunks(mut input_files: Vec<PathBuf>, num_threads: usize) -> Vec<Vec<PathBuf>> {
let chunk_size = (input_files.len() + (num_threads - 1)) / num_threads;
(0..num_threads)
.map(|_| {
let len = input_files.len();
input_files.drain(0..min(chunk_size,len)).collect()
input_files.drain(0..min(chunk_size, len)).collect()
})
.collect()
}
@ -76,7 +72,7 @@ struct ImportStats<'a> {
count_unchanged: u64,
}
impl <'a> ImportStats<'a> {
impl<'a> ImportStats<'a> {
fn new(progress: &'a ProgressBar, filename: String) -> Self {
ImportStats {
progress,
@ -106,9 +102,14 @@ impl <'a> ImportStats<'a> {
return;
}
self.progress.set_message(&format!(
"{}, imported {:5} keys, {:5} New {:5} Updated {:5} Unchanged {:5} Errors",
&self.filename, self.count_total, self.count_new, self.count_updated, self.count_unchanged, self.count_err));
"{}, imported {:5} keys, {:5} New {:5} Updated {:5} Unchanged {:5} Errors",
&self.filename,
self.count_total,
self.count_new,
self.count_updated,
self.count_unchanged,
self.count_err
));
}
}
@ -137,10 +138,11 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
let bytes_total = input_file.metadata()?.len();
let progress_bar = multi_progress.add(ProgressBar::new(bytes_total));
progress_bar
.set_style(ProgressStyle::default_bar()
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
.progress_chars("##-"));
.progress_chars("##-"),
);
progress_bar.set_message("Starting…");
let input_reader = &mut progress_bar.wrap_read(input_file);
@ -156,8 +158,13 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
Packet::SecretKey(key) => key.fingerprint().to_hex(),
_ => "Unknown".to_owned(),
};
let error = format!("{}:{:05}:{}: {}", filename, stats.count_total,
key_fpr, e.to_string());
let error = format!(
"{}:{:05}:{}: {}",
filename,
stats.count_total,
key_fpr,
e.to_string()
);
progress_bar.println(error);
}
stats.update(result);
@ -169,7 +176,7 @@ fn import_from_file(db: &KeyDatabase, input: &Path, multi_progress: &MultiProgre
fn read_file_to_tpks(
reader: impl Read + Send + Sync,
callback: &mut impl FnMut(Vec<Packet>) -> ()
callback: &mut impl FnMut(Vec<Packet>) -> (),
) -> Result<()> {
let mut ppr = PacketParser::from_reader(reader)?;
let mut acc = Vec::new();
@ -183,7 +190,7 @@ fn read_file_to_tpks(
if !acc.is_empty() {
if let Packet::PublicKey(_) | Packet::SecretKey(_) = packet {
callback(acc);
acc = vec!();
acc = vec![];
}
}
@ -194,10 +201,7 @@ fn read_file_to_tpks(
}
fn import_key(db: &KeyDatabase, packets: Vec<Packet>) -> Result<ImportResult> {
openpgp::Cert::from_packets(packets.into_iter())
.and_then(|tpk| {
db.merge(tpk)
})
openpgp::Cert::from_packets(packets.into_iter()).and_then(|tpk| db.merge(tpk))
}
/*

View File

@ -1,12 +1,12 @@
extern crate anyhow;
extern crate clap;
extern crate tempfile;
extern crate sequoia_openpgp as openpgp;
extern crate hagrid_database as database;
extern crate sequoia_openpgp as openpgp;
extern crate tempfile;
#[macro_use]
extern crate serde_derive;
extern crate toml;
extern crate indicatif;
extern crate toml;
extern crate walkdir;
use std::fs;
@ -15,7 +15,7 @@ use std::str::FromStr;
use anyhow::Result;
use clap::{Arg, App, SubCommand};
use clap::{App, Arg, SubCommand};
mod import;
mod regenerate;
@ -30,7 +30,7 @@ pub struct HagridConfigs {
// this is not an exact match - Rocket config has more complicated semantics
// than a plain toml file.
// see also https://github.com/SergioBenitez/Rocket/issues/228
#[derive(Deserialize,Clone)]
#[derive(Deserialize, Clone)]
pub struct HagridConfig {
_template_dir: Option<PathBuf>,
keys_internal_dir: Option<PathBuf>,
@ -43,34 +43,42 @@ pub struct HagridConfig {
fn main() -> Result<()> {
let matches = App::new("Hagrid Control")
.version("0.1")
.about("Control hagrid database externally")
.arg(Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file")
.takes_value(true))
.arg(Arg::with_name("env")
.short("e")
.long("env")
.value_name("ENVIRONMENT")
.takes_value(true)
.default_value("prod")
.possible_values(&["dev","stage","prod"]))
.subcommand(SubCommand::with_name("regenerate")
.about("Regenerate symlink directory"))
.subcommand(SubCommand::with_name("import")
.about("Import keys into Hagrid")
.arg(Arg::with_name("dry run")
.short("n")
.long("dry-run")
.help("don't actually keep imported keys")
)
.arg(Arg::with_name("keyring files")
.required(true)
.multiple(true)))
.get_matches();
.version("0.1")
.about("Control hagrid database externally")
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file")
.takes_value(true),
)
.arg(
Arg::with_name("env")
.short("e")
.long("env")
.value_name("ENVIRONMENT")
.takes_value(true)
.default_value("prod")
.possible_values(&["dev", "stage", "prod"]),
)
.subcommand(SubCommand::with_name("regenerate").about("Regenerate symlink directory"))
.subcommand(
SubCommand::with_name("import")
.about("Import keys into Hagrid")
.arg(
Arg::with_name("dry run")
.short("n")
.long("dry-run")
.help("don't actually keep imported keys"),
)
.arg(
Arg::with_name("keyring files")
.required(true)
.multiple(true),
),
)
.get_matches();
let config_file = matches.value_of("config").unwrap_or("Rocket.toml");
let config_data = fs::read_to_string(config_file).unwrap();

View File

@ -3,12 +3,12 @@ use anyhow::Result;
use std::path::Path;
use std::time::Instant;
use indicatif::{ProgressBar, ProgressStyle};
use walkdir::WalkDir;
use indicatif::{ProgressBar,ProgressStyle};
use HagridConfig;
use database::{Database,KeyDatabase,RegenerateResult};
use database::types::Fingerprint;
use database::{Database, KeyDatabase, RegenerateResult};
use HagridConfig;
struct RegenerateStats<'a> {
progress: &'a ProgressBar,
@ -22,7 +22,7 @@ struct RegenerateStats<'a> {
kps_partial: u64,
}
impl <'a> RegenerateStats<'a> {
impl<'a> RegenerateStats<'a> {
fn new(progress: &'a ProgressBar) -> Self {
Self {
progress,
@ -48,7 +48,7 @@ impl <'a> RegenerateStats<'a> {
Err(e) => {
self.progress.println(format!("{}: {}", fpr, e.to_string()));
self.count_err += 1;
},
}
Ok(RegenerateResult::Updated) => self.count_updated += 1,
Ok(RegenerateResult::Unchanged) => self.count_unchanged += 1,
}
@ -79,21 +79,27 @@ pub fn do_regenerate(config: &HagridConfig) -> Result<()> {
false,
)?;
let published_dir = config.keys_external_dir.as_ref().unwrap().join("links").join("by-email");
let published_dir = config
.keys_external_dir
.as_ref()
.unwrap()
.join("links")
.join("by-email");
let dirs: Vec<_> = WalkDir::new(published_dir)
.min_depth(1)
.max_depth(1)
.sort_by(|a,b| a.file_name().cmp(b.file_name()))
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.flatten()
.map(|entry| entry.into_path())
.collect();
let progress_bar = ProgressBar::new(dirs.len() as u64);
progress_bar
.set_style(ProgressStyle::default_bar()
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {msg}")
.progress_chars("##-"));
.progress_chars("##-"),
);
let mut stats = RegenerateStats::new(&progress_bar);
@ -106,14 +112,18 @@ pub fn do_regenerate(config: &HagridConfig) -> Result<()> {
Ok(())
}
fn regenerate_dir_recursively(db: &KeyDatabase, stats: &mut RegenerateStats, dir: &Path) -> Result<()> {
fn regenerate_dir_recursively(
db: &KeyDatabase,
stats: &mut RegenerateStats,
dir: &Path,
) -> Result<()> {
for path in WalkDir::new(dir)
.follow_links(true)
.into_iter()
.flatten()
.filter(|e| e.file_type().is_file())
.map(|entry| entry.into_path()) {
.map(|entry| entry.into_path())
{
let fpr = KeyDatabase::path_to_primary(&path).unwrap();
let result = db.regenerate_links(&fpr);
stats.update(result, fpr);

View File

@ -55,17 +55,18 @@ lazy_static! {
}
pub fn anonymize_address(email: &Email) -> Option<String> {
email.as_str()
.rsplit('@')
.next()
.map(|domain| domain.to_lowercase())
.and_then(|domain| {
if POPULAR_DOMAINS.contains(&domain.as_str()) {
Some(domain)
} else {
domain.rsplit('.').next().map(|tld| tld.to_owned())
}
})
email
.as_str()
.rsplit('@')
.next()
.map(|domain| domain.to_lowercase())
.and_then(|domain| {
if POPULAR_DOMAINS.contains(&domain.as_str()) {
Some(domain)
} else {
domain.rsplit('.').next().map(|tld| tld.to_owned())
}
})
}
pub fn anonymize_address_fallback(email: &Email) -> String {

View File

@ -8,14 +8,21 @@ use crate::database::types::Email;
lazy_static! {
static ref KEY_UPLOAD: LabelCounter =
LabelCounter::new("hagrid_key_upload", "Uploaded keys", &["result"]);
static ref MAIL_SENT: LabelCounter =
LabelCounter::new("hagrid_mail_sent", "Sent verification mails", &["type", "domain"]);
static ref KEY_ADDRESS_PUBLISHED: LabelCounter =
LabelCounter::new("hagrid_key_address_published", "Verified email addresses", &["domain"]);
static ref KEY_ADDRESS_UNPUBLISHED: LabelCounter =
LabelCounter::new("hagrid_key_address_unpublished", "Unpublished email addresses", &["domain"]);
static ref MAIL_SENT: LabelCounter = LabelCounter::new(
"hagrid_mail_sent",
"Sent verification mails",
&["type", "domain"]
);
static ref KEY_ADDRESS_PUBLISHED: LabelCounter = LabelCounter::new(
"hagrid_key_address_published",
"Verified email addresses",
&["domain"]
);
static ref KEY_ADDRESS_UNPUBLISHED: LabelCounter = LabelCounter::new(
"hagrid_key_address_unpublished",
"Unpublished email addresses",
&["domain"]
);
}
pub fn register_counters(registry: &prometheus::Registry) {
@ -58,7 +65,9 @@ impl LabelCounter {
}
fn register(&self, registry: &prometheus::Registry) {
registry.register(Box::new(self.prometheus_counter.clone())).unwrap();
registry
.register(Box::new(self.prometheus_counter.clone()))
.unwrap();
}
fn inc(&self, values: &[&str]) {

View File

@ -4,13 +4,13 @@ use std::convert::TryInto;
use std::path::PathBuf;
extern crate anyhow;
use anyhow::Result as Result;
use anyhow::Result;
extern crate structopt;
use structopt::StructOpt;
extern crate hagrid_database as database;
use crate::database::{Query, Database, KeyDatabase};
use crate::database::{Database, KeyDatabase, Query};
#[derive(Debug, StructOpt)]
#[structopt(
@ -54,32 +54,30 @@ fn real_main() -> Result<()> {
delete(&db, &opt.query.parse()?, opt.all_bindings, opt.all)
}
fn delete(db: &KeyDatabase, query: &Query, all_bindings: bool, mut all: bool)
-> Result<()> {
fn delete(db: &KeyDatabase, query: &Query, all_bindings: bool, mut all: bool) -> Result<()> {
match query {
Query::ByFingerprint(_) | Query::ByKeyID(_) => {
eprintln!("Fingerprint or KeyID given, deleting key and all \
bindings.");
eprintln!(
"Fingerprint or KeyID given, deleting key and all \
bindings."
);
all = true;
},
}
_ => (),
}
let tpk = db.lookup(query)?.ok_or_else(
|| anyhow::format_err!("No TPK matching {:?}", query))?;
let tpk = db
.lookup(query)?
.ok_or_else(|| anyhow::format_err!("No TPK matching {:?}", query))?;
let fp: database::types::Fingerprint = tpk.fingerprint().try_into()?;
let mut results = Vec::new();
// First, delete the bindings.
if all_bindings || all {
results.push(
("all bindings".into(),
db.set_email_unpublished_all(&fp)));
results.push(("all bindings".into(), db.set_email_unpublished_all(&fp)));
} else if let Query::ByEmail(ref email) = query {
results.push(
(email.to_string(),
db.set_email_unpublished(&fp, email)));
results.push((email.to_string(), db.set_email_unpublished(&fp, email)));
} else {
unreachable!()
}
@ -110,12 +108,15 @@ fn delete(db: &KeyDatabase, query: &Query, all_bindings: bool, mut all: bool)
let mut err = Ok(());
for (slug, result) in results {
eprintln!("{}: {}", slug,
if let Err(ref e) = result {
e.to_string()
} else {
"Deleted".into()
});
eprintln!(
"{}: {}",
slug,
if let Err(ref e) = result {
e.to_string()
} else {
"Deleted".into()
}
);
if err.is_ok() {
if let Err(e) = result {
err = Err(e);

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,9 @@
use handlebars::{
Context, Handlebars, Helper, HelperDef, HelperResult, Output, RenderContext, RenderError
Context, Handlebars, Helper, HelperDef, HelperResult, Output, RenderContext, RenderError,
};
use std::io;
pub struct I18NHelper {
catalogs: Vec<(&'static str, gettext::Catalog)>,
}
@ -14,11 +13,9 @@ impl I18NHelper {
Self { catalogs }
}
pub fn get_catalog(
&self,
lang: &str,
) -> &gettext::Catalog {
let (_, ref catalog) = self.catalogs
pub fn get_catalog(&self, lang: &str) -> &gettext::Catalog {
let (_, ref catalog) = self
.catalogs
.iter()
.find(|(candidate, _)| *candidate == lang)
.unwrap_or_else(|| self.catalogs.get(0).unwrap());
@ -75,10 +72,8 @@ impl HelperDef for I18NHelper {
let rerender = h
.param(1)
.and_then(|p| p
.relative_path()
.map(|v| v == "rerender")
).unwrap_or(false);
.and_then(|p| p.relative_path().map(|v| v == "rerender"))
.unwrap_or(false);
let lang = context
.data()
@ -89,14 +84,15 @@ impl HelperDef for I18NHelper {
fn render_error_with<E>(e: E) -> RenderError
where
E: std::error::Error + Send + Sync + 'static
E: std::error::Error + Send + Sync + 'static,
{
RenderError::from_error("Failed to render", e)
}
let response = self.lookup(lang, id);
if rerender {
let data = rcx.evaluate(context, "this").unwrap();
let response = reg.render_template(response, data.as_json())
let response = reg
.render_template(response, data.as_json())
.map_err(render_error_with)?;
out.write(&response).map_err(render_error_with)?;
} else {

View File

@ -1,16 +1,21 @@
use rocket_i18n::I18n;
use crate::database::Query;
use gettext_macros::i18n;
use rocket_i18n::I18n;
pub fn describe_query_error(i18n: &I18n, q: &Query) -> String {
match q {
Query::ByFingerprint(fpr) =>
i18n!(i18n.catalog, "No key found for fingerprint {}"; fpr),
Query::ByKeyID(key_id) =>
i18n!(i18n.catalog, "No key found for key id {}"; key_id),
Query::ByEmail(email) =>
i18n!(i18n.catalog, "No key found for email address {}"; email),
Query::InvalidShort() => i18n!(i18n.catalog, "Search by Short Key ID is not supported."),
Query::ByFingerprint(fpr) => {
i18n!(i18n.catalog, "No key found for fingerprint {}"; fpr)
}
Query::ByKeyID(key_id) => {
i18n!(i18n.catalog, "No key found for key id {}"; key_id)
}
Query::ByEmail(email) => {
i18n!(i18n.catalog, "No key found for email address {}"; email)
}
Query::InvalidShort() => {
i18n!(i18n.catalog, "Search by Short Key ID is not supported.")
}
Query::Invalid() => i18n!(i18n.catalog, "Invalid search query."),
}
}

View File

@ -1,14 +1,14 @@
use std::path::{PathBuf, Path};
use std::path::{Path, PathBuf};
use crate::counters;
use handlebars::Handlebars;
use lettre::{Transport as LettreTransport, SendmailTransport, file::FileTransport};
use lettre::builder::{EmailBuilder, PartBuilder, Mailbox, MimeMultipartType};
use lettre::builder::{EmailBuilder, Mailbox, MimeMultipartType, PartBuilder};
use lettre::{file::FileTransport, SendmailTransport, Transport as LettreTransport};
use serde::Serialize;
use uuid::Uuid;
use crate::counters;
use rocket_i18n::I18n;
use gettext_macros::i18n;
use rocket_i18n::I18n;
use rfc2047::rfc2047_encode;
@ -67,17 +67,26 @@ impl Service {
/// Sends mail by storing it in the given directory.
pub fn filemail(from: &str, base_uri: &str, template_dir: &Path, path: &Path) -> Result<Self> {
Self::new(from, base_uri, template_dir, Transport::Filemail(path.to_owned()))
Self::new(
from,
base_uri,
template_dir,
Transport::Filemail(path.to_owned()),
)
}
fn new(from: &str, base_uri: &str, template_dir: &Path, transport: Transport)
-> Result<Self> {
fn new(from: &str, base_uri: &str, template_dir: &Path, transport: Transport) -> Result<Self> {
let templates = template_helpers::load_handlebars(template_dir)?;
let domain =
url::Url::parse(base_uri)
?.host_str().ok_or_else(|| anyhow!("No host in base-URI"))
?.to_string();
Ok(Self { from: from.into(), domain, templates, transport })
let domain = url::Url::parse(base_uri)?
.host_str()
.ok_or_else(|| anyhow!("No host in base-URI"))?
.to_string();
Ok(Self {
from: from.into(),
domain,
templates,
transport,
})
}
pub fn send_verification(
@ -86,7 +95,7 @@ impl Service {
base_uri: &str,
tpk_name: String,
userid: &Email,
token: &str
token: &str,
) -> Result<()> {
let ctx = context::Verification {
lang: i18n.lang.to_string(),
@ -151,7 +160,7 @@ impl Service {
base_uri: &str,
tpk_name: String,
userid: &Email,
token: &str
token: &str,
) -> Result<()> {
let ctx = context::Welcome {
lang: "en".to_owned(),
@ -176,12 +185,16 @@ impl Service {
&self,
template: &str,
locale: &str,
ctx: impl Serialize
ctx: impl Serialize,
) -> Result<(String, String)> {
let html = self.templates.render(&format!("{}/{}.htm", locale, template), &ctx)
let html = self
.templates
.render(&format!("{}/{}.htm", locale, template), &ctx)
.or_else(|_| self.templates.render(&format!("{}.htm", template), &ctx))
.map_err(|_| anyhow!("Email template failed to render"))?;
let txt = self.templates.render(&format!("{}/{}.txt", locale, template), &ctx)
let txt = self
.templates
.render(&format!("{}/{}.txt", locale, template), &ctx)
.or_else(|_| self.templates.render(&format!("{}.txt", template), &ctx))
.map_err(|_| anyhow!("Email template failed to render"))?;
@ -194,7 +207,7 @@ impl Service {
subject: &str,
template: &str,
locale: &str,
ctx: impl Serialize
ctx: impl Serialize,
) -> Result<()> {
let (html, txt) = self.render_template(template, locale, ctx)?;
@ -235,18 +248,17 @@ impl Service {
Transport::Sendmail => {
let mut transport = SendmailTransport::new();
transport.send(email)?;
},
}
Transport::Filemail(ref path) => {
let mut transport = FileTransport::new(path);
transport.send(email)?;
},
}
}
Ok(())
}
}
// for some reason, this is no longer public in lettre itself
// FIXME replace with builtin struct on lettre update
// see https://github.com/lettre/lettre/blob/master/lettre/src/file/mod.rs#L41
@ -281,9 +293,9 @@ pub fn pop_mail(dir: &Path) -> Result<Option<String>> {
#[cfg(test)]
mod test {
use super::*;
use tempfile::{tempdir, TempDir};
use gettext_macros::{include_i18n};
use gettext_macros::include_i18n;
use std::str::FromStr;
use tempfile::{tempdir, TempDir};
const BASEDIR: &str = "http://localhost/";
const FROM: &str = "test@localhost";
@ -291,12 +303,22 @@ mod test {
fn configure_i18n(lang: &'static str) -> I18n {
let langs = include_i18n!();
let catalog = langs.clone().into_iter().find(|(l, _)| *l == lang).unwrap().1;
let catalog = langs
.clone()
.into_iter()
.find(|(l, _)| *l == lang)
.unwrap()
.1;
rocket_i18n::I18n { catalog, lang }
}
fn configure_mail() -> (Service, TempDir) {
let template_dir: PathBuf = ::std::env::current_dir().unwrap().join("dist/email-templates").to_str().unwrap().into();
let template_dir: PathBuf = ::std::env::current_dir()
.unwrap()
.join("dist/email-templates")
.to_str()
.unwrap()
.into();
let tempdir = tempdir().unwrap();
let service = Service::filemail(FROM, BASEDIR, &template_dir, tempdir.path()).unwrap();
(service, tempdir)
@ -328,7 +350,9 @@ mod test {
assert!(headers.contains(&("Content-Type", "text/html; charset=utf-8")));
assert!(headers.contains(&("From", "<test@localhost>")));
assert!(headers.contains(&("To", "<recipient@example.org>")));
assert_header(&headers, "Content-Type", |v| v.starts_with("multipart/alternative"));
assert_header(&headers, "Content-Type", |v| {
v.starts_with("multipart/alternative")
});
assert_header(&headers, "Date", |v| v.contains("+0000"));
assert_header(&headers, "Message-ID", |v| v.contains("@localhost>"));
}
@ -345,7 +369,14 @@ mod test {
let i18n = configure_i18n("en");
let recipient = Email::from_str(TO).unwrap();
mail.send_verification(&i18n, "test", "fingerprintoo".to_owned(), &recipient, "token").unwrap();
mail.send_verification(
&i18n,
"test",
"fingerprintoo".to_owned(),
&recipient,
"token",
)
.unwrap();
let mail_content = pop_mail(tempdir.path()).unwrap().unwrap();
check_headers(&mail_content);
@ -363,7 +394,14 @@ mod test {
let i18n = configure_i18n("ja");
let recipient = Email::from_str(TO).unwrap();
mail.send_verification(&i18n, "test", "fingerprintoo".to_owned(), &recipient, "token").unwrap();
mail.send_verification(
&i18n,
"test",
"fingerprintoo".to_owned(),
&recipient,
"token",
)
.unwrap();
let mail_content = pop_mail(tempdir.path()).unwrap().unwrap();
check_headers(&mail_content);
@ -373,8 +411,9 @@ mod test {
assert!(mail_content.contains("test/verify/token"));
assert!(mail_content.contains("test/about"));
assert!(mail_content.contains("あなたのメールアド"));
assert!(mail_content.contains("Subject: =?utf-8?q?localhost=E3=81=AE=E3=81=82=E3=81=AA=E3=81=9F=E3=81=AE?="));
assert!(mail_content.contains(
"Subject: =?utf-8?q?localhost=E3=81=AE=E3=81=82=E3=81=AA=E3=81=9F=E3=81=AE?="
));
}
#[test]
@ -383,7 +422,14 @@ mod test {
let i18n = configure_i18n("en");
let recipient = Email::from_str(TO).unwrap();
mail.send_manage_token(&i18n, "test", "fingerprintoo".to_owned(), &recipient, "token").unwrap();
mail.send_manage_token(
&i18n,
"test",
"fingerprintoo".to_owned(),
&recipient,
"token",
)
.unwrap();
let mail_content = pop_mail(tempdir.path()).unwrap().unwrap();
check_headers(&mail_content);
@ -401,7 +447,14 @@ mod test {
let i18n = configure_i18n("ja");
let recipient = Email::from_str(TO).unwrap();
mail.send_manage_token(&i18n, "test", "fingerprintoo".to_owned(), &recipient, "token").unwrap();
mail.send_manage_token(
&i18n,
"test",
"fingerprintoo".to_owned(),
&recipient,
"token",
)
.unwrap();
let mail_content = pop_mail(tempdir.path()).unwrap().unwrap();
check_headers(&mail_content);
@ -412,7 +465,9 @@ mod test {
assert!(mail_content.contains("testtoken"));
assert!(mail_content.contains("test/about"));
assert!(mail_content.contains("この鍵の掲示されたア"));
assert!(mail_content.contains("Subject: =?utf-8?q?localhost=E3=81=AE=E9=8D=B5=E3=82=92=E7=AE=A1=E7=90=86?="));
assert!(mail_content.contains(
"Subject: =?utf-8?q?localhost=E3=81=AE=E9=8D=B5=E3=82=92=E7=AE=A1=E7=90=86?="
));
}
#[test]
@ -420,7 +475,8 @@ mod test {
let (mail, tempdir) = configure_mail();
let recipient = Email::from_str(TO).unwrap();
mail.send_welcome("test", "fingerprintoo".to_owned(), &recipient, "token").unwrap();
mail.send_welcome("test", "fingerprintoo".to_owned(), &recipient, "token")
.unwrap();
let mail_content = pop_mail(tempdir.path()).unwrap().unwrap();
check_headers(&mail_content);
@ -432,4 +488,3 @@ mod test {
assert!(mail_content.contains("first time"));
}
}

View File

@ -2,7 +2,7 @@
#[macro_use]
extern crate anyhow;
use anyhow::Result as Result;
use anyhow::Result;
#[macro_use]
extern crate serde_derive;
@ -23,18 +23,18 @@ init_i18n!("hagrid", en, de, ja);
#[cfg(not(debug_assertions))]
init_i18n!("hagrid", en, de, fr, it, ja, nb, pl, tr, zh_Hans, ko, nl, ru, ar, sv, es, ro);
mod mail;
mod anonymize_utils;
mod tokens;
mod sealed_state;
mod rate_limiter;
mod dump;
mod counters;
mod dump;
mod gettext_strings;
mod i18n;
mod i18n_helpers;
mod gettext_strings;
mod web;
mod mail;
mod rate_limiter;
mod sealed_state;
mod template_helpers;
mod tokens;
mod web;
#[launch]
fn rocket() -> _ {

View File

@ -1,6 +1,6 @@
use std::sync::Mutex;
use std::collections::HashMap;
use std::time::{Instant,Duration};
use std::sync::Mutex;
use std::time::{Duration, Instant};
pub struct RateLimiter {
locked_map: Mutex<HashMap<String, Instant>>,
@ -23,11 +23,12 @@ impl RateLimiter {
self.maybe_cleanup();
let mut locked_map = self.locked_map.lock().unwrap();
let action_ok = locked_map.get(&identifier)
let action_ok = locked_map
.get(&identifier)
.map(|instant| instant.elapsed())
.map(|duration| duration >= self.timeout)
.unwrap_or(true);
if action_ok {
if action_ok {
locked_map.insert(identifier, Instant::now());
}
action_ok
@ -35,7 +36,8 @@ impl RateLimiter {
pub fn action_check(&self, identifier: String) -> bool {
let locked_map = self.locked_map.lock().unwrap();
locked_map.get(&identifier)
locked_map
.get(&identifier)
.map(|instant| instant.elapsed())
.map(|duration| duration >= self.timeout)
.unwrap_or(true)

View File

@ -1,8 +1,8 @@
use ring::aead::{seal_in_place, open_in_place, Algorithm, AES_256_GCM};
use ring::aead::{open_in_place, seal_in_place, Algorithm, AES_256_GCM};
use ring::aead::{OpeningKey, SealingKey};
use ring::rand::{SecureRandom, SystemRandom};
use ring::hmac;
use ring::digest;
use ring::hmac;
use ring::rand::{SecureRandom, SystemRandom};
// Keep these in sync, and keep the key len synced with the `private` docs as
// well as the `KEYS_INFO` const in secure::Key.
@ -14,7 +14,7 @@ pub struct SealedState {
opening_key: OpeningKey,
}
impl SealedState {
impl SealedState {
pub fn new(secret: &str) -> Self {
let salt = hmac::SigningKey::new(&digest::SHA256, b"hagrid");
let mut key = vec![0; 32];
@ -23,7 +23,10 @@ impl SealedState {
let sealing_key = SealingKey::new(ALGO, key.as_ref()).expect("sealing key creation");
let opening_key = OpeningKey::new(ALGO, key.as_ref()).expect("sealing key creation");
SealedState { sealing_key, opening_key }
SealedState {
sealing_key,
opening_key,
}
}
pub fn unseal(&self, mut data: Vec<u8>) -> Result<String, &'static str> {
@ -43,7 +46,9 @@ impl SealedState {
data = vec![0; NONCE_LEN + input.len() + overhead];
let (nonce, in_out) = data.split_at_mut(NONCE_LEN);
SystemRandom::new().fill(nonce).expect("couldn't random fill nonce");
SystemRandom::new()
.fill(nonce)
.expect("couldn't random fill nonce");
in_out[..input.len()].copy_from_slice(input.as_bytes());
seal_in_place(&self.sealing_key, nonce, &[], in_out, overhead).expect("in-place seal")

View File

@ -1,12 +1,12 @@
use std::path::{Path, PathBuf};
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use handlebars::Handlebars;
use gettext_macros::include_i18n;
use crate::Result;
use crate::i18n::I18NHelper;
use crate::Result;
#[derive(Debug)]
pub struct TemplateOverrides(String, HashSet<String>);
@ -17,7 +17,7 @@ impl TemplateOverrides {
.map(|vec| Self(localized_dir.to_owned(), vec))
}
pub fn get_template_override(&self, lang: &str, tmpl: &str) -> Option<String> {
pub fn get_template_override(&self, lang: &str, tmpl: &str) -> Option<String> {
let template_name = format!("{}/{}/{}", self.0, lang, tmpl);
if self.1.contains(&template_name) {
println!("{}", &template_name);
@ -28,7 +28,10 @@ impl TemplateOverrides {
}
}
fn load_localized_template_names(template_path: &Path, localized_dir: &str) -> Result<HashSet<String>> {
fn load_localized_template_names(
template_path: &Path,
localized_dir: &str,
) -> Result<HashSet<String>> {
let language_glob = template_path.join(localized_dir).join("*");
glob::glob(language_glob.to_str().expect("valid glob path string"))
.unwrap()
@ -41,11 +44,12 @@ fn load_localized_template_names(template_path: &Path, localized_dir: &str) -> R
.flatten()
.map(move |path| {
// TODO this is a hack
let template_name = remove_extension(remove_extension(path.strip_prefix(&template_path)?));
let template_name =
remove_extension(remove_extension(path.strip_prefix(&template_path)?));
Ok(template_name.to_string_lossy().into_owned())
})
})
.collect()
})
.collect()
}
pub fn load_handlebars(template_dir: &Path) -> Result<Handlebars<'static>> {
@ -71,12 +75,11 @@ fn remove_extension<P: AsRef<Path>>(path: P) -> PathBuf {
let path = path.as_ref();
let stem = match path.file_stem() {
Some(stem) => stem,
None => return path.to_path_buf()
None => return path.to_path_buf(),
};
match path.parent() {
Some(parent) => parent.join(stem),
None => PathBuf::from(stem)
None => PathBuf::from(stem),
}
}

View File

@ -1,17 +1,16 @@
use crate::sealed_state::SealedState;
use serde::{Serialize,de::DeserializeOwned};
use crate::Result;
use serde::{de::DeserializeOwned, Serialize};
pub trait StatelessSerializable : Serialize + DeserializeOwned {
}
pub trait StatelessSerializable: Serialize + DeserializeOwned {}
pub struct Service {
sealed_state: SealedState,
validity: u64,
}
#[derive(Serialize,Deserialize)]
#[derive(Serialize, Deserialize)]
struct Token {
#[serde(rename = "c")]
creation: u64,
@ -22,7 +21,10 @@ struct Token {
impl Service {
pub fn init(secret: &str, validity: u64) -> Self {
let sealed_state = SealedState::new(secret);
Service { sealed_state, validity }
Service {
sealed_state,
validity,
}
}
pub fn create(&self, payload_content: &impl StatelessSerializable) -> String {
@ -37,13 +39,17 @@ impl Service {
}
pub fn check<T>(&self, token_encoded: &str) -> Result<T>
where T: StatelessSerializable {
where
T: StatelessSerializable,
{
let token_sealed = base64::decode_config(&token_encoded, base64::URL_SAFE_NO_PAD)
.map_err(|_| anyhow!("invalid b64"))?;
let token_str = self.sealed_state.unseal(token_sealed)
let token_str = self
.sealed_state
.unseal(token_sealed)
.map_err(|_| anyhow!("failed to validate"))?;
let token: Token = serde_json::from_str(&token_str)
.map_err(|_| anyhow!("failed to deserialize"))?;
let token: Token =
serde_json::from_str(&token_str).map_err(|_| anyhow!("failed to deserialize"))?;
let elapsed = current_time() - token.creation;
if elapsed > self.validity {
@ -55,13 +61,15 @@ impl Service {
Ok(payload)
}
}
#[cfg(not(test))]
fn current_time() -> u64 {
use std::time::SystemTime;
SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs()
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs()
}
#[cfg(test)]
@ -73,23 +81,23 @@ fn current_time() -> u64 {
mod tests {
use super::*;
#[derive(Debug,Serialize,Deserialize,Clone,PartialEq)]
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
struct TestStruct1 {
payload: String,
}
impl StatelessSerializable for TestStruct1 {
}
impl StatelessSerializable for TestStruct1 {}
#[derive(Debug,Serialize,Deserialize,Clone,PartialEq)]
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
struct TestStruct2 {
something: String,
}
impl StatelessSerializable for TestStruct2 {
}
impl StatelessSerializable for TestStruct2 {}
#[test]
fn test_create_check() {
let payload = TestStruct1 { payload: "hello".to_owned() };
let payload = TestStruct1 {
payload: "hello".to_owned(),
};
let mt = Service::init("secret", 60);
let token = mt.create(&payload);
// println!("{}", &token);
@ -102,7 +110,9 @@ mod tests {
#[test]
fn test_ok() {
let payload = TestStruct1 { payload: "hello".to_owned() };
let payload = TestStruct1 {
payload: "hello".to_owned(),
};
let token = "rwM_S9gZaRQaf6DLvmWtZSipQhH_G5ronSIJv2FrMdwGBPSYYQ-1jaP58dTHU5WuC14vb8jxmz2Xf_b3pqzpCGTEJj9drm4t";
let mt = Service::init("secret", 60);
@ -113,7 +123,9 @@ mod tests {
#[test]
fn test_bad_type() {
let payload = TestStruct1 { payload: "hello".to_owned() };
let payload = TestStruct1 {
payload: "hello".to_owned(),
};
let mt = Service::init("secret", 60);
let token = mt.create(&payload);

View File

@ -3,17 +3,13 @@ use std::io;
use rocket_i18n::I18n;
use crate::dump::{self, Kind};
use crate::web::MyResponse;
use crate::i18n_helpers::describe_query_error;
use crate::web::MyResponse;
use crate::database::{Database, KeyDatabase, Query};
#[get("/debug?<q>")]
pub fn debug_info(
db: &rocket::State<KeyDatabase>,
i18n: I18n,
q: String,
) -> MyResponse {
pub fn debug_info(db: &rocket::State<KeyDatabase>, i18n: I18n, q: String) -> MyResponse {
let query = match q.parse::<Query>() {
Ok(query) => query,
Err(_) => return MyResponse::bad_request_plain("bad request"),
@ -38,11 +34,9 @@ pub fn debug_info(
32 * 4 + 80,
);
match dump_result {
Ok(Kind::Cert) => {
match String::from_utf8(result) {
Ok(dump_text) => MyResponse::plain(dump_text),
Err(e) => MyResponse::ise(e.into()),
}
Ok(Kind::Cert) => match String::from_utf8(result) {
Ok(dump_text) => MyResponse::plain(dump_text),
Err(e) => MyResponse::ise(e.into()),
},
Ok(_) => MyResponse::ise(anyhow!("Internal parsing error!")),
Err(e) => MyResponse::ise(e),

View File

@ -1,29 +1,29 @@
use std::fmt;
use std::time::SystemTime;
use std::collections::HashMap;
use std::time::SystemTime;
use rocket::Data;
use rocket::form::{Form, ValueField};
use rocket::outcome::Outcome;
use rocket::http::{ContentType, Status};
use rocket::request::{self, Request, FromRequest};
use rocket::outcome::Outcome;
use rocket::request::{self, FromRequest, Request};
use rocket::Data;
use rocket_i18n::I18n;
use url::percent_encoding::{DEFAULT_ENCODE_SET, utf8_percent_encode};
use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
use crate::database::{Database, Query, KeyDatabase};
use crate::database::types::{Email, Fingerprint, KeyID};
use crate::database::{Database, KeyDatabase, Query};
use crate::rate_limiter::RateLimiter;
use crate::i18n_helpers::describe_query_error;
use crate::rate_limiter::RateLimiter;
use crate::tokens;
use crate::web;
use crate::mail;
use crate::web::{RequestOrigin, MyResponse, vks_web};
use crate::web::vks::response::UploadResponse;
use crate::web;
use crate::web::vks::response::EmailStatus;
use crate::web::vks::response::UploadResponse;
use crate::web::{vks_web, MyResponse, RequestOrigin};
#[derive(Debug)]
pub enum Hkp {
@ -31,17 +31,17 @@ pub enum Hkp {
KeyID { keyid: KeyID, index: bool },
ShortKeyID { query: String, index: bool },
Email { email: Email, index: bool },
Invalid { query: String, },
Invalid { query: String },
}
impl fmt::Display for Hkp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Hkp::Fingerprint{ ref fpr,.. } => write!(f, "{}", fpr),
Hkp::KeyID{ ref keyid,.. } => write!(f, "{}", keyid),
Hkp::Email{ ref email,.. } => write!(f, "{}", email),
Hkp::ShortKeyID{ ref query,.. } => write!(f, "{}", query),
Hkp::Invalid{ ref query } => write!(f, "{}", query),
Hkp::Fingerprint { ref fpr, .. } => write!(f, "{}", fpr),
Hkp::KeyID { ref keyid, .. } => write!(f, "{}", keyid),
Hkp::Email { ref email, .. } => write!(f, "{}", email),
Hkp::ShortKeyID { ref query, .. } => write!(f, "{}", query),
Hkp::Invalid { ref query } => write!(f, "{}", query),
}
}
}
@ -53,58 +53,48 @@ impl<'r> FromRequest<'r> for Hkp {
async fn from_request(request: &'r Request<'_>) -> request::Outcome<Hkp, ()> {
use std::str::FromStr;
let query = request.uri().query().map(|q| q.as_str()).unwrap_or_default();
let query = request
.uri()
.query()
.map(|q| q.as_str())
.unwrap_or_default();
let fields = Form::values(query)
.map(|ValueField { name, value }| {
(name.to_string(), value.to_string())
})
.map(|ValueField { name, value }| (name.to_string(), value.to_string()))
.collect::<HashMap<_, _>>();
if fields.contains_key("search")
&& fields
.get("op")
.map(|x| x == "get" || x == "index")
.unwrap_or(false)
.get("op")
.map(|x| x == "get" || x == "index")
.unwrap_or(false)
{
let index = fields.get("op").map(|x| x == "index").unwrap_or(false);
let search = fields.get("search").cloned().unwrap_or_default();
let maybe_fpr = Fingerprint::from_str(&search);
let maybe_keyid = KeyID::from_str(&search);
let looks_like_short_key_id = !search.contains('@') &&
(search.starts_with("0x") && search.len() < 16 || search.len() == 8);
let looks_like_short_key_id = !search.contains('@')
&& (search.starts_with("0x") && search.len() < 16 || search.len() == 8);
if looks_like_short_key_id {
Outcome::Success(Hkp::ShortKeyID {
query: search,
index,
})
} else if let Ok(fpr) = maybe_fpr {
Outcome::Success(Hkp::Fingerprint {
fpr,
index,
})
Outcome::Success(Hkp::Fingerprint { fpr, index })
} else if let Ok(keyid) = maybe_keyid {
Outcome::Success(Hkp::KeyID {
keyid,
index,
})
Outcome::Success(Hkp::KeyID { keyid, index })
} else {
match Email::from_str(&search) {
Ok(email) => {
Outcome::Success(Hkp::Email {
email,
index,
})
}
Err(_) => {
Outcome::Success(Hkp::Invalid{
query: search.to_string(),
})
}
Ok(email) => Outcome::Success(Hkp::Email { email, index }),
Err(_) => Outcome::Success(Hkp::Invalid {
query: search.to_string(),
}),
}
}
} else if fields.get("op").map(|x| x == "vindex"
|| x.starts_with("x-"))
} else if fields
.get("op")
.map(|x| x == "vindex" || x.starts_with("x-"))
.unwrap_or(false)
{
Outcome::Failure((Status::NotImplemented, ()))
@ -131,7 +121,11 @@ pub async fn pks_add_form_data(
}
}
#[post("/pks/add", format = "application/x-www-form-urlencoded", data = "<data>")]
#[post(
"/pks/add",
format = "application/x-www-form-urlencoded",
data = "<data>"
)]
pub async fn pks_add_form(
origin: RequestOrigin,
db: &rocket::State<KeyDatabase>,
@ -142,8 +136,24 @@ pub async fn pks_add_form(
data: Data<'_>,
) -> MyResponse {
match vks_web::process_post_form(db, tokens_stateless, rate_limiter, &i18n, data).await {
Ok(UploadResponse::Ok { is_new_key, key_fpr, primary_uid, token, status, .. }) => {
let msg = pks_add_ok(&origin, mail_service, rate_limiter, token, status, is_new_key, key_fpr, primary_uid);
Ok(UploadResponse::Ok {
is_new_key,
key_fpr,
primary_uid,
token,
status,
..
}) => {
let msg = pks_add_ok(
&origin,
mail_service,
rate_limiter,
token,
status,
is_new_key,
key_fpr,
primary_uid,
);
MyResponse::plain(msg)
}
Ok(_) => {
@ -165,16 +175,17 @@ fn pks_add_ok(
primary_uid: Option<Email>,
) -> String {
if primary_uid.is_none() {
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri())
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
}
let primary_uid = primary_uid.unwrap();
if is_new_key {
if send_welcome_mail(origin, mail_service, key_fpr, &primary_uid, token) {
rate_limiter.action_perform(format!("hkp-sent-{}", &primary_uid));
return "Upload successful. This is a new key, a welcome email has been sent.".to_string();
return "Upload successful. This is a new key, a welcome email has been sent."
.to_string();
}
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri())
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
}
let has_unverified = status.iter().any(|(_, v)| *v == EmailStatus::Unpublished);
@ -182,7 +193,7 @@ fn pks_add_ok(
return "Upload successful.".to_string();
}
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri())
return format!("Upload successful. Please note that identity information will only be published after verification. See {baseuri}/about/usage#gnupg-upload", baseuri = origin.get_base_uri());
}
fn send_welcome_mail(
@ -192,25 +203,21 @@ fn send_welcome_mail(
primary_uid: &Email,
token: String,
) -> bool {
mail_service.send_welcome(origin.get_base_uri(), fpr, primary_uid, &token).is_ok()
mail_service
.send_welcome(origin.get_base_uri(), fpr, primary_uid, &token)
.is_ok()
}
#[get("/pks/lookup")]
pub fn pks_lookup(
db: &rocket::State<KeyDatabase>,
i18n: I18n,
key: Hkp
) -> MyResponse {
pub fn pks_lookup(db: &rocket::State<KeyDatabase>, i18n: I18n, key: Hkp) -> MyResponse {
let (query, index) = match key {
Hkp::Fingerprint { fpr, index } =>
(Query::ByFingerprint(fpr), index),
Hkp::KeyID { keyid, index } =>
(Query::ByKeyID(keyid), index),
Hkp::Email { email, index } => {
(Query::ByEmail(email), index)
}
Hkp::Fingerprint { fpr, index } => (Query::ByFingerprint(fpr), index),
Hkp::KeyID { keyid, index } => (Query::ByKeyID(keyid), index),
Hkp::Email { email, index } => (Query::ByEmail(email), index),
Hkp::ShortKeyID { query: _, .. } => {
return MyResponse::bad_request_plain("Search by short key ids is not supported, sorry!");
return MyResponse::bad_request_plain(
"Search by short key ids is not supported, sorry!",
);
}
Hkp::Invalid { query: _ } => {
return MyResponse::bad_request_plain("Invalid search query!");
@ -232,47 +239,50 @@ pub fn pks_internal_index(
) -> MyResponse {
match query_string.parse() {
Ok(query) => key_to_hkp_index(db, i18n, query),
Err(_) => MyResponse::bad_request_plain("Invalid search query!")
Err(_) => MyResponse::bad_request_plain("Invalid search query!"),
}
}
fn key_to_hkp_index(
db: &rocket::State<KeyDatabase>,
i18n: I18n,
query: Query,
) -> MyResponse {
use sequoia_openpgp::types::RevocationStatus;
fn key_to_hkp_index(db: &rocket::State<KeyDatabase>, i18n: I18n, query: Query) -> MyResponse {
use sequoia_openpgp::policy::StandardPolicy;
use sequoia_openpgp::types::RevocationStatus;
let tpk = match db.lookup(&query) {
Ok(Some(tpk)) => tpk,
Ok(None) => return MyResponse::not_found_plain(describe_query_error(&i18n, &query)),
Err(err) => { return MyResponse::ise(err); }
Err(err) => {
return MyResponse::ise(err);
}
};
let mut out = String::default();
let p = tpk.primary_key();
let policy = &StandardPolicy::new();
let ctime = format!("{}", p.creation_time().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs());
let is_rev =
if tpk.revocation_status(policy, None) != RevocationStatus::NotAsFarAsWeKnow {
"r"
} else {
""
};
let ctime = format!(
"{}",
p.creation_time()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs()
);
let is_rev = if tpk.revocation_status(policy, None) != RevocationStatus::NotAsFarAsWeKnow {
"r"
} else {
""
};
let algo: u8 = p.pk_algo().into();
out.push_str("info:1:1\r\n");
out.push_str(&format!(
"pub:{}:{}:{}:{}:{}:{}{}\r\n",
p.fingerprint().to_string().replace(" ", ""),
algo,
p.mpis().bits().unwrap_or(0),
ctime,
"",
"",
is_rev
"pub:{}:{}:{}:{}:{}:{}{}\r\n",
p.fingerprint().to_string().replace(" ", ""),
algo,
p.mpis().bits().unwrap_or(0),
ctime,
"",
"",
is_rev
));
for uid in tpk.userids() {
@ -285,18 +295,13 @@ fn key_to_hkp_index(
.and_then(|time| time.duration_since(SystemTime::UNIX_EPOCH).ok())
.map(|x| format!("{}", x.as_secs()))
.unwrap_or_default();
let is_rev = if uid.revocation_status(policy, None)
!= RevocationStatus::NotAsFarAsWeKnow
{
"r"
} else {
""
};
let is_rev = if uid.revocation_status(policy, None) != RevocationStatus::NotAsFarAsWeKnow {
"r"
} else {
""
};
out.push_str(&format!(
"uid:{}:{}:{}:{}{}\r\n",
u, ctime, "", "", is_rev
));
out.push_str(&format!("uid:{}:{}:{}:{}{}\r\n", u, ctime, "", "", is_rev));
}
MyResponse::plain(out)
@ -304,13 +309,13 @@ fn key_to_hkp_index(
#[cfg(test)]
mod tests {
use rocket::http::Status;
use rocket::http::ContentType;
use rocket::http::Status;
use sequoia_openpgp::serialize::Serialize;
use crate::web::tests::*;
use crate::mail::pop_mail;
use crate::web::tests::*;
#[test]
fn hkp() {
@ -326,9 +331,8 @@ mod tests {
// Prepare to /pks/add
let mut armored = Vec::new();
{
use sequoia_openpgp::armor::{Writer, Kind};
let mut w = Writer::new(&mut armored, Kind::PublicKey)
.unwrap();
use sequoia_openpgp::armor::{Kind, Writer};
let mut w = Writer::new(&mut armored, Kind::PublicKey).unwrap();
tpk.serialize(&mut w).unwrap();
w.finalize().unwrap();
}
@ -338,7 +342,8 @@ mod tests {
}
// Add!
let response = client.post("/pks/add")
let response = client
.post("/pks/add")
.body(post_data.as_bytes())
.header(ContentType::Form)
.dispatch();
@ -351,7 +356,8 @@ mod tests {
assert!(welcome_mail.is_some());
// Add!
let response = client.post("/pks/add")
let response = client
.post("/pks/add")
.body(post_data.as_bytes())
.header(ContentType::Form)
.dispatch();
@ -374,7 +380,8 @@ mod tests {
check_hr_responses_by_fingerprint(&client, &tpk, 0);
// Upload the same key again, make sure the welcome mail is not sent again
let response = client.post("/pks/add")
let response = client
.post("/pks/add")
.body(post_data.as_bytes())
.header(ContentType::Form)
.dispatch();
@ -399,14 +406,14 @@ mod tests {
let mut armored_first = Vec::new();
let mut armored_both = Vec::new();
{
use sequoia_openpgp::armor::{Writer, Kind};
use sequoia_openpgp::armor::{Kind, Writer};
let mut w = Writer::new(&mut armored_both, Kind::PublicKey).unwrap();
tpk_0.serialize(&mut w).unwrap();
tpk_1.serialize(&mut w).unwrap();
w.finalize().unwrap();
}
{
use sequoia_openpgp::armor::{Writer, Kind};
use sequoia_openpgp::armor::{Kind, Writer};
let mut w = Writer::new(&mut armored_first, Kind::PublicKey).unwrap();
tpk_0.serialize(&mut w).unwrap();
w.finalize().unwrap();
@ -421,7 +428,8 @@ mod tests {
}
// Add!
let response = client.post("/pks/add")
let response = client
.post("/pks/add")
.body(post_data_both.as_bytes())
.header(ContentType::Form)
.dispatch();
@ -432,7 +440,8 @@ mod tests {
assert!(welcome_mail.is_none());
// Add the first again
let response = client.post("/pks/add")
let response = client
.post("/pks/add")
.body(post_data_first.as_bytes())
.header(ContentType::Form)
.dispatch();

View File

@ -1,6 +1,6 @@
use rocket::{Request, Data};
use rocket::fairing::{Fairing, Info, Kind};
use rocket::http::Method;
use rocket::{Data, Request};
use rocket_dyn_templates::Template;
use rocket_i18n::I18n;
use serde_json::json;
@ -29,7 +29,7 @@ impl Fairing for MaintenanceMode {
fn info(&self) -> Info {
Info {
name: "Maintenance Mode",
kind: Kind::Request
kind: Kind::Request,
}
}
@ -59,8 +59,7 @@ impl MaintenanceMode {
}
fn is_request_json(&self, path: &str) -> bool {
path.starts_with("/vks/v1/upload") ||
path.starts_with("/vks/v1/request-verify")
path.starts_with("/vks/v1/upload") || path.starts_with("/vks/v1/request-verify")
}
fn is_request_plain(&self, path: &str, method: Method) -> bool {
@ -68,9 +67,7 @@ impl MaintenanceMode {
}
fn is_request_web(&self, path: &str) -> bool {
path.starts_with("/upload") ||
path.starts_with("/manage") ||
path.starts_with("/verify")
path.starts_with("/upload") || path.starts_with("/manage") || path.starts_with("/verify")
}
fn get_maintenance_message(&self) -> Option<String> {
@ -93,15 +90,12 @@ struct JsonErrorMessage {
#[get("/maintenance/json/<message>")]
pub fn maintenance_error_json(message: String) -> MyResponse {
MyResponse::MaintenanceJson(json!(JsonErrorMessage{ message }))
MyResponse::MaintenanceJson(json!(JsonErrorMessage { message }))
}
#[get("/maintenance/web/<message>")]
pub fn maintenance_error_web(
message: String,
i18n: I18n,
) -> MyResponse {
let ctx = templates::MaintenanceMode{
pub fn maintenance_error_web(message: String, i18n: I18n) -> MyResponse {
let ctx = templates::MaintenanceMode {
message,
version: env!("VERGEN_SEMVER").to_string(),
commit: env!("VERGEN_SHA_SHORT").to_string(),

View File

@ -5,20 +5,19 @@ use crate::Result;
use gettext_macros::i18n;
use crate::web::{RequestOrigin, MyResponse};
use crate::web::vks_web;
use crate::database::{Database, KeyDatabase, types::Email, types::Fingerprint};
use crate::mail;
use crate::counters;
use crate::database::{types::Email, types::Fingerprint, Database, KeyDatabase};
use crate::mail;
use crate::rate_limiter::RateLimiter;
use crate::tokens::{self, StatelessSerializable};
use crate::web::vks_web;
use crate::web::{MyResponse, RequestOrigin};
#[derive(Debug,Serialize,Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
struct StatelessVerifyToken {
fpr: Fingerprint,
}
impl StatelessSerializable for StatelessVerifyToken {
fpr: Fingerprint,
}
impl StatelessSerializable for StatelessVerifyToken {}
mod templates {
#[derive(Serialize)]
@ -37,8 +36,8 @@ mod templates {
#[derive(Serialize)]
pub struct ManageKeyUidStatus {
pub address: String,
pub published: bool,
pub address: String,
pub published: bool,
}
}
@ -62,11 +61,11 @@ pub fn vks_manage(origin: RequestOrigin, i18n: I18n) -> MyResponse {
#[get("/manage/<token>")]
pub fn vks_manage_key(
origin: RequestOrigin,
db: &rocket::State<KeyDatabase>,
i18n: I18n,
token: String,
token_service: &rocket::State<tokens::Service>,
origin: RequestOrigin,
db: &rocket::State<KeyDatabase>,
i18n: I18n,
token: String,
token_service: &rocket::State<tokens::Service>,
) -> MyResponse {
use crate::database::types::Fingerprint;
use std::convert::TryFrom;
@ -74,19 +73,21 @@ pub fn vks_manage_key(
match db.lookup(&database::Query::ByFingerprint(fpr)) {
Ok(Some(tpk)) => {
let fp = Fingerprint::try_from(tpk.fingerprint()).unwrap();
let mut emails: Vec<Email> = tpk.userids()
let mut emails: Vec<Email> = tpk
.userids()
.map(|u| u.userid().to_string().parse::<Email>())
.flatten()
.collect();
emails.sort_unstable();
emails.dedup();
let uid_status = emails.into_iter().map(|email|
templates::ManageKeyUidStatus {
let uid_status = emails
.into_iter()
.map(|email| templates::ManageKeyUidStatus {
address: email.to_string(),
published: true,
}
).collect();
let key_link = uri!(vks_web::search(q = fp.to_string())).to_string();
})
.collect();
let key_link = uri!(vks_web::search(q = fp.to_string())).to_string();
let context = templates::ManageKey {
key_fpr: fp.to_string(),
key_link,
@ -95,11 +96,12 @@ pub fn vks_manage_key(
base_uri: origin.get_base_uri().to_owned(),
};
MyResponse::ok("manage/manage_key", context, i18n, origin)
},
}
Ok(None) => MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "This link is invalid or expired")),
i18n, origin,
i18n,
origin,
),
Err(e) => MyResponse::ise(e),
}
@ -107,11 +109,13 @@ pub fn vks_manage_key(
MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "This link is invalid or expired")),
i18n, origin)
i18n,
origin,
)
}
}
#[post("/manage", data="<request>")]
#[post("/manage", data = "<request>")]
pub fn vks_manage_post(
db: &rocket::State<KeyDatabase>,
origin: RequestOrigin,
@ -125,35 +129,48 @@ pub fn vks_manage_post(
let email = match request.search_term.parse::<Email>() {
Ok(email) => email,
Err(_) => return MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "Malformed address: {}"; &request.search_term)),
i18n, origin)
Err(_) => {
return MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "Malformed address: {}"; &request.search_term)),
i18n,
origin,
)
}
};
let tpk = match db.lookup(&database::Query::ByEmail(email.clone())) {
Ok(Some(tpk)) => tpk,
Ok(None) => return MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "No key for address: {}"; &request.search_term)),
i18n, origin),
Ok(None) => {
return MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "No key for address: {}"; &request.search_term)),
i18n,
origin,
)
}
Err(e) => return MyResponse::ise(e),
};
let email_exists = tpk.userids()
let email_exists = tpk
.userids()
.flat_map(|binding| binding.userid().to_string().parse::<Email>())
.any(|candidate| candidate == email);
if !email_exists {
return MyResponse::ise(
anyhow!("Internal error: address check failed!"));
return MyResponse::ise(anyhow!("Internal error: address check failed!"));
}
if !rate_limiter.action_perform(format!("manage-{}", &email)) {
return MyResponse::not_found(
Some("manage/manage"),
Some(i18n!(i18n.catalog, "A request has already been sent for this address recently.")),
i18n, origin);
Some(i18n!(
i18n.catalog,
"A request has already been sent for this address recently."
)),
i18n,
origin,
);
}
let fpr: Fingerprint = tpk.fingerprint().try_into().unwrap();
@ -172,7 +189,7 @@ pub fn vks_manage_post(
MyResponse::ok("manage/manage_link_sent", ctx, i18n, origin)
}
#[post("/manage/unpublish", data="<request>")]
#[post("/manage/unpublish", data = "<request>")]
pub fn vks_manage_unpublish(
origin: RequestOrigin,
db: &rocket::State<KeyDatabase>,
@ -199,5 +216,11 @@ pub fn vks_manage_unpublish_or_fail(
db.set_email_unpublished(&verify_token.fpr, &email)?;
counters::inc_address_unpublished(&email);
Ok(vks_manage_key(origin, db, i18n, request.token.to_owned(), token_service))
Ok(vks_manage_key(
origin,
db,
i18n,
request.token.to_owned(),
token_service,
))
}

View File

@ -1,14 +1,14 @@
use hyperx::header::{Charset, ContentDisposition, DispositionParam, DispositionType};
use rocket::figment::Figment;
use rocket::fs::NamedFile;
use rocket::http::{Header, Status};
use rocket::request;
use rocket::outcome::Outcome;
use rocket::response::{Responder, Response};
use rocket::request;
use rocket::response::status::Custom;
use rocket::response::{Responder, Response};
use rocket_dyn_templates::{Engines, Template};
use rocket_i18n::I18n;
use rocket_prometheus::PrometheusMetrics;
use hyperx::header::{ContentDisposition, DispositionType, DispositionParam, Charset};
use gettext_macros::{compile_i18n, include_i18n};
@ -16,27 +16,27 @@ use serde::Serialize;
use std::path::PathBuf;
use crate::mail;
use crate::tokens;
use crate::counters;
use crate::i18n_helpers::describe_query_error;
use crate::template_helpers::TemplateOverrides;
use crate::i18n::I18NHelper;
use crate::i18n_helpers::describe_query_error;
use crate::mail;
use crate::rate_limiter::RateLimiter;
use crate::template_helpers::TemplateOverrides;
use crate::tokens;
use crate::database::{Database, KeyDatabase, Query};
use crate::database::types::Fingerprint;
use crate::database::{Database, KeyDatabase, Query};
use crate::Result;
use std::convert::TryInto;
mod hkp;
mod manage;
mod maintenance;
mod vks;
mod vks_web;
mod vks_api;
mod debug_web;
mod hkp;
mod maintenance;
mod manage;
mod vks;
mod vks_api;
mod vks_web;
mod wkd;
use crate::web::maintenance::MaintenanceMode;
@ -44,10 +44,16 @@ use crate::web::maintenance::MaintenanceMode;
pub struct HagridTemplate(&'static str, serde_json::Value, I18n, RequestOrigin);
impl<'r> Responder<'r, 'static> for HagridTemplate {
fn respond_to(self, req: &'r rocket::Request) -> std::result::Result<Response<'static>, Status> {
fn respond_to(
self,
req: &'r rocket::Request,
) -> std::result::Result<Response<'static>, Status> {
let HagridTemplate(tmpl, ctx, i18n, origin) = self;
let template_overrides: &TemplateOverrides = req.rocket().state().expect("TemplateOverrides must be in managed state");
let template_overrides: &TemplateOverrides = req
.rocket()
.state()
.expect("TemplateOverrides must be in managed state");
let template_override = template_overrides.get_template_override(i18n.lang, tmpl);
let layout_context = templates::HagridLayout::new(ctx, i18n, origin);
@ -55,7 +61,8 @@ impl<'r> Responder<'r, 'static> for HagridTemplate {
Template::render(template_override, layout_context)
} else {
Template::render(tmpl, layout_context)
}.respond_to(req)
}
.respond_to(req)
}
}
@ -114,12 +121,14 @@ impl MyResponse {
rocket::http::hyper::header::CONTENT_DISPOSITION.as_str(),
ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![
DispositionParam::Filename(
Charset::Us_Ascii, None,
(fp.to_string() + ".asc").into_bytes()),
],
}.to_string());
parameters: vec![DispositionParam::Filename(
Charset::Us_Ascii,
None,
(fp.to_string() + ".asc").into_bytes(),
)],
}
.to_string(),
);
MyResponse::Key(armored_key, content_disposition)
}
@ -128,12 +137,14 @@ impl MyResponse {
rocket::http::hyper::header::CONTENT_DISPOSITION.as_str(),
ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![
DispositionParam::Filename(
Charset::Us_Ascii, None,
(wkd_hash.to_string() + ".pgp").into_bytes()),
],
}.to_string());
parameters: vec![DispositionParam::Filename(
Charset::Us_Ascii,
None,
(wkd_hash.to_string() + ".pgp").into_bytes(),
)],
}
.to_string(),
);
MyResponse::WkdKey(binary_key, content_disposition)
}
@ -148,8 +159,15 @@ impl MyResponse {
MyResponse::ServerError(Template::render("500", ctx))
}
pub fn bad_request(template: &'static str, e: anyhow::Error, i18n: I18n, origin: RequestOrigin) -> Self {
let ctx = templates::Error { error: format!("{}", e) };
pub fn bad_request(
template: &'static str,
e: anyhow::Error,
i18n: I18n,
origin: RequestOrigin,
) -> Self {
let ctx = templates::Error {
error: format!("{}", e),
};
let context_json = serde_json::to_value(ctx).unwrap();
MyResponse::BadRequest(HagridTemplate(template, context_json, i18n, origin))
}
@ -168,10 +186,16 @@ impl MyResponse {
i18n: I18n,
origin: RequestOrigin,
) -> Self {
let ctx = templates::Error { error: message.into()
.unwrap_or_else(|| "Key not found".to_owned()) };
let ctx = templates::Error {
error: message.into().unwrap_or_else(|| "Key not found".to_owned()),
};
let context_json = serde_json::to_value(ctx).unwrap();
MyResponse::NotFound(HagridTemplate(tmpl.unwrap_or("index"), context_json, i18n, origin))
MyResponse::NotFound(HagridTemplate(
tmpl.unwrap_or("index"),
context_json,
i18n,
origin,
))
}
}
@ -219,8 +243,16 @@ mod templates {
base_uri: origin.get_base_uri().to_string(),
page,
lang: i18n.lang.to_string(),
htmldir: if is_rtl { "rtl".to_owned() } else { "ltr".to_owned() },
htmlclass: if is_rtl { "rtl".to_owned() } else { "".to_owned() },
htmldir: if is_rtl {
"rtl".to_owned()
} else {
"ltr".to_owned()
},
htmlclass: if is_rtl {
"rtl".to_owned()
} else {
"".to_owned()
},
}
}
}
@ -245,7 +277,9 @@ pub enum RequestOrigin {
impl<'r> request::FromRequest<'r> for RequestOrigin {
type Error = ();
async fn from_request(request: &'r request::Request<'_>) -> request::Outcome<Self, Self::Error> {
async fn from_request(
request: &'r request::Request<'_>,
) -> request::Outcome<Self, Self::Error> {
let hagrid_state = request.rocket().state::<HagridState>().unwrap();
let result = match request.headers().get("x-is-onion").next() {
Some(_) => RequestOrigin::OnionService(hagrid_state.base_uri_onion.clone()),
@ -342,14 +376,16 @@ fn errors(
code: u16,
template: String,
) -> std::result::Result<Custom<Template>, &'static str> {
if !template.chars().all(|x| x == '-' || char::is_ascii_alphabetic(&x)) {
if !template
.chars()
.all(|x| x == '-' || char::is_ascii_alphabetic(&x))
{
return Err("bad request");
}
let status_code = Status::from_code(code)
.ok_or("bad request")?;
let status_code = Status::from_code(code).ok_or("bad request")?;
let response_body = Template::render(
format!("errors/{}-{}", code, template),
templates::HagridLayout::new(templates::Bare{dummy: ()}, i18n, origin)
templates::HagridLayout::new(templates::Bare { dummy: () }, i18n, origin),
);
Ok(Custom(status_code, response_body))
}
@ -360,7 +396,9 @@ pub fn serve() -> Result<rocket::Rocket<rocket::Build>> {
compile_i18n!();
fn rocket_factory(mut rocket: rocket::Rocket<rocket::Build>) -> Result<rocket::Rocket<rocket::Build>> {
fn rocket_factory(
mut rocket: rocket::Rocket<rocket::Build>,
) -> Result<rocket::Rocket<rocket::Build>> {
let routes = routes![
// infra
root,
@ -428,21 +466,23 @@ fn rocket_factory(mut rocket: rocket::Rocket<rocket::Build>) -> Result<rocket::R
let prometheus = configure_prometheus(figment);
rocket = rocket
.attach(Template::custom(|engines: &mut Engines| {
let i18ns = include_i18n!();
let i18n_helper = I18NHelper::new(i18ns);
engines.handlebars.register_helper("text", Box::new(i18n_helper));
}))
.attach(maintenance_mode)
.manage(include_i18n!())
.manage(hagrid_state)
.manage(stateless_token_service)
.manage(stateful_token_service)
.manage(mail_service)
.manage(db_service)
.manage(rate_limiter)
.manage(localized_template_list)
.mount("/", routes);
.attach(Template::custom(|engines: &mut Engines| {
let i18ns = include_i18n!();
let i18n_helper = I18NHelper::new(i18ns);
engines
.handlebars
.register_helper("text", Box::new(i18n_helper));
}))
.attach(maintenance_mode)
.manage(include_i18n!())
.manage(hagrid_state)
.manage(stateless_token_service)
.manage(stateful_token_service)
.manage(mail_service)
.manage(db_service)
.manage(rate_limiter)
.manage(localized_template_list)
.mount("/", routes);
if let Some(prometheus) = prometheus {
rocket = rocket
@ -476,7 +516,8 @@ fn configure_hagrid_state(config: &Figment) -> Result<HagridState> {
// State
let base_uri: String = config.extract_inner("base-URI")?;
let base_uri_onion = config.extract_inner::<String>("base-URI-Onion")
let base_uri_onion = config
.extract_inner::<String>("base-URI-Onion")
.unwrap_or_else(|_| base_uri.clone());
Ok(HagridState {
assets_dir,
@ -524,7 +565,8 @@ fn configure_localized_template_list(config: &Figment) -> Result<TemplateOverrid
}
fn configure_maintenance_mode(config: &Figment) -> Result<MaintenanceMode> {
let maintenance_file: PathBuf = config.extract_inner("maintenance_file")
let maintenance_file: PathBuf = config
.extract_inner("maintenance_file")
.unwrap_or_else(|_| PathBuf::from("maintenance"));
Ok(MaintenanceMode::new(maintenance_file))
}
@ -532,27 +574,27 @@ fn configure_maintenance_mode(config: &Figment) -> Result<MaintenanceMode> {
#[cfg(test)]
pub mod tests {
use regex;
use rocket::http::ContentType;
use rocket::http::Header;
use rocket::http::Status;
use rocket::local::blocking::{Client, LocalResponse};
use std::fs;
use std::fs::File;
use std::io::Write;
use std::path::Path;
use tempfile::{tempdir, TempDir};
use rocket::http::Status;
use rocket::http::ContentType;
use rocket::http::Header;
use sequoia_openpgp::Cert;
use sequoia_openpgp::cert::CertBuilder;
use sequoia_openpgp::parse::Parse;
use sequoia_openpgp::serialize::Serialize;
use sequoia_openpgp::Cert;
use std::time::SystemTime;
use mail::pop_mail;
use crate::database::*;
use super::*;
use crate::database::*;
/// Fake base URI to use in tests.
const BASE_URI: &str = "http://local.connection";
@ -583,27 +625,56 @@ pub mod tests {
let config = rocket::Config::figment()
.select("staging")
.merge(("root", root.path()))
.merge(("template_dir",
::std::env::current_dir().unwrap().join("dist/templates")
.to_str().unwrap()))
.merge(("email_template_dir",
::std::env::current_dir().unwrap().join("dist/email-templates")
.to_str().unwrap()))
.merge(("assets_dir",
::std::env::current_dir().unwrap().join("dist/assets")
.to_str().unwrap()))
.merge(("keys_internal_dir", base_dir.join("keys_internal").to_str().unwrap()))
.merge(("keys_external_dir", base_dir.join("keys_external").to_str().unwrap()))
.merge((
"template_dir",
::std::env::current_dir()
.unwrap()
.join("dist/templates")
.to_str()
.unwrap(),
))
.merge((
"email_template_dir",
::std::env::current_dir()
.unwrap()
.join("dist/email-templates")
.to_str()
.unwrap(),
))
.merge((
"assets_dir",
::std::env::current_dir()
.unwrap()
.join("dist/assets")
.to_str()
.unwrap(),
))
.merge((
"keys_internal_dir",
base_dir.join("keys_internal").to_str().unwrap(),
))
.merge((
"keys_external_dir",
base_dir.join("keys_external").to_str().unwrap(),
))
.merge(("tmp_dir", base_dir.join("tmp").to_str().unwrap()))
.merge(("token_dir", base_dir.join("tokens").to_str().unwrap()))
.merge(("maintenance_file", base_dir.join("maintenance").to_str().unwrap()))
.merge((
"maintenance_file",
base_dir.join("maintenance").to_str().unwrap(),
))
.merge(("base-URI", BASE_URI))
.merge(("base-URI-Onion", BASE_URI_ONION))
.merge(("from", "from@example.com"))
.merge(("token_secret", "hagrid"))
.merge(("token_validity", 3600u64))
.merge(("filemail_into", filemail.into_os_string().into_string()
.expect("path is valid UTF8")));
.merge((
"filemail_into",
filemail
.into_os_string()
.into_string()
.expect("path is valid UTF8"),
));
Ok((root, config))
}
@ -625,7 +696,8 @@ pub mod tests {
let client = Client::untracked(rocket).expect("valid rocket instance");
// Check that we see the landing page.
let response = client.get("/about")
let response = client
.get("/about")
.header(Header::new("Accept-Language", "de"))
.dispatch();
assert_eq!(response.status(), Status::Ok);
@ -650,7 +722,10 @@ pub mod tests {
let response = client.get("/about").dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(ContentType::HTML));
assert!(response.into_string().unwrap().contains("distribution and discovery"));
assert!(response
.into_string()
.unwrap()
.contains("distribution and discovery"));
// Check that we see the privacy policy.
let response = client.get("/about/privacy").dispatch();
@ -674,7 +749,10 @@ pub mod tests {
let response = client.get("/manage").dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(ContentType::HTML));
assert!(response.into_string().unwrap().contains("any verified email address"));
assert!(response
.into_string()
.unwrap()
.contains("any verified email address"));
assert_consistency(client.rocket());
}
@ -699,21 +777,30 @@ pub mod tests {
let response = client.put("/").dispatch();
assert_eq!(response.status(), Status::ServiceUnavailable);
assert_eq!(response.content_type(), Some(ContentType::Plain));
assert!(response.into_string().unwrap().contains("maintenance-message"));
assert!(response
.into_string()
.unwrap()
.contains("maintenance-message"));
fs::remove_file(&maintenance_path).unwrap();
// Check that we see the upload form.
let response = client.get("/upload").dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(ContentType::HTML));
assert!(!response.into_string().unwrap().contains("maintenance-message"));
assert!(!response
.into_string()
.unwrap()
.contains("maintenance-message"));
}
fn check_maintenance(client: &Client, uri: &str, content_type: ContentType) {
let response = client.get(uri).dispatch();
assert_eq!(response.status(), Status::ServiceUnavailable);
assert_eq!(response.content_type(), Some(content_type));
assert!(response.into_string().unwrap().contains("maintenance-message"));
assert!(response
.into_string()
.unwrap()
.contains("maintenance-message"));
}
#[test]
@ -755,7 +842,11 @@ pub mod tests {
vks_manage(&client, "foo@invalid.example.com");
// Confirm deletion.
check_mails_and_confirm_deletion(&client, filemail_into.as_path(), "foo@invalid.example.com");
check_mails_and_confirm_deletion(
&client,
filemail_into.as_path(),
"foo@invalid.example.com",
);
// Now, we should no longer be able to look it up by email
// address.
@ -912,7 +1003,8 @@ pub mod tests {
.append_pair("address", "foo@invalid.example.com")
.finish();
let response = client.post("/upload/request-verify")
let response = client
.post("/upload/request-verify")
.header(ContentType::Form)
.header(Header::new("X-Is-Onion", "true"))
.body(encoded.as_bytes())
@ -929,7 +1021,6 @@ pub mod tests {
assert_consistency(client.rocket());
}
#[test]
fn upload_curl_shortcut() {
let (_tmpdir, client) = client().unwrap();
@ -948,21 +1039,40 @@ pub mod tests {
#[test]
fn search_invalid() {
let (_tmpdir, client) = client().unwrap();
check_response(&client, "/search?q=0x1234abcd",
Status::BadRequest, "not supported");
check_response(&client, "/search?q=1234abcd",
Status::BadRequest, "not supported");
check_response(&client, "/pks/lookup?op=get&search=0x1234abcd",
Status::BadRequest, "not supported");
check_response(&client, "/pks/lookup?op=get&search=1234abcd",
Status::BadRequest, "not supported");
check_response(
&client,
"/search?q=0x1234abcd",
Status::BadRequest,
"not supported",
);
check_response(
&client,
"/search?q=1234abcd",
Status::BadRequest,
"not supported",
);
check_response(
&client,
"/pks/lookup?op=get&search=0x1234abcd",
Status::BadRequest,
"not supported",
);
check_response(
&client,
"/pks/lookup?op=get&search=1234abcd",
Status::BadRequest,
"not supported",
);
}
#[test]
fn wkd_policy() {
let (_tmpdir, client) = client().unwrap();
check_response(&client, "/.well-known/openpgpkey/example.org/policy",
Status::Ok, "");
check_response(
&client,
"/.well-known/openpgpkey/example.org/policy",
Status::Ok,
"",
);
}
/// Asserts that the given URI 404s.
@ -973,78 +1083,81 @@ pub mod tests {
/// Asserts that lookups by the given email 404.
pub fn check_null_responses_by_email(client: &Client, addr: &str) {
check_null_response(client, &format!("/vks/v1/by-email/{}", addr));
check_null_response(client, &format!("/pks/lookup?op=get&search={}", addr));
check_null_response(
client, &format!("/vks/v1/by-email/{}", addr));
check_null_response(
client, &format!("/pks/lookup?op=get&search={}", addr));
check_null_response(
client, &format!("/pks/lookup?op=get&options=mr&search={}",
addr));
client,
&format!("/pks/lookup?op=get&options=mr&search={}", addr),
);
let (wkd_hash, domain) = crate::database::wkd::encode_wkd(addr).unwrap();
check_null_response(
&client,
&format!("/.well-known/openpgpkey/{}/hu/{}", domain, wkd_hash));
&format!("/.well-known/openpgpkey/{}/hu/{}", domain, wkd_hash),
);
}
/// Asserts that lookups by the given email are successful.
pub fn check_responses_by_email(client: &Client, addr: &str, tpk: &Cert,
nr_uids: usize) {
check_mr_response(
client,
&format!("/vks/v1/by-email/{}", addr),
tpk, nr_uids);
pub fn check_responses_by_email(client: &Client, addr: &str, tpk: &Cert, nr_uids: usize) {
check_mr_response(client, &format!("/vks/v1/by-email/{}", addr), tpk, nr_uids);
check_mr_response(
client,
&format!("/vks/v1/by-email/{}", addr.replace("@", "%40")),
tpk, nr_uids);
tpk,
nr_uids,
);
check_mr_response(
client,
&format!("/pks/lookup?op=get&options=mr&search={}", addr),
tpk, nr_uids);
check_hr_response(
client,
&format!("/search?q={}", addr),
tpk, nr_uids);
check_hr_response_onion(
client,
&format!("/search?q={}", addr),
tpk, nr_uids);
tpk,
nr_uids,
);
check_hr_response(client, &format!("/search?q={}", addr), tpk, nr_uids);
check_hr_response_onion(client, &format!("/search?q={}", addr), tpk, nr_uids);
let (wkd_hash, domain) = crate::database::wkd::encode_wkd(addr).unwrap();
check_wkd_response(
&client,
&format!("/.well-known/openpgpkey/{}/hu/{}", domain, wkd_hash),
&tpk, nr_uids);
&tpk,
nr_uids,
);
}
/// Asserts that the given URI returns a Cert matching the given
/// one, with the given number of userids.
pub fn check_mr_response(client: &Client, uri: &str, tpk: &Cert,
nr_uids: usize) {
pub fn check_mr_response(client: &Client, uri: &str, tpk: &Cert, nr_uids: usize) {
let response = client.get(uri).dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(),
Some(ContentType::new("application", "pgp-keys")));
assert_eq!(
response.content_type(),
Some(ContentType::new("application", "pgp-keys"))
);
let body = response.into_string().unwrap();
assert!(body.contains("END PGP PUBLIC KEY BLOCK"));
let tpk_ = Cert::from_bytes(body.as_bytes()).unwrap();
assert_eq!(tpk.fingerprint(), tpk_.fingerprint());
assert_eq!(tpk.keys().map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>(),
tpk_.keys().map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>());
assert_eq!(
tpk.keys()
.map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>(),
tpk_.keys()
.map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>()
);
assert_eq!(tpk_.userids().count(), nr_uids);
}
// it's a rather "reverse implementation" style test.. can we do better?
// it's a rather "reverse implementation" style test.. can we do better?
/// Asserts that the given URI returns a correct hkp "index"
/// response for the given Cert.
pub fn check_index_response(client: &Client, uri: &str, tpk: &Cert) {
let response = client.get(uri).dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(),
Some(ContentType::new("text", "plain")));
assert_eq!(
response.content_type(),
Some(ContentType::new("text", "plain"))
);
let body = response.into_string().unwrap();
assert!(body.contains("info:1:1"));
@ -1052,46 +1165,60 @@ pub mod tests {
let algo: u8 = tpk.primary_key().pk_algo().into();
assert!(body.contains(&format!("pub:{}:{}:", primary_fpr, algo)));
let creation_time = tpk.primary_key().creation_time().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs();
let creation_time = tpk
.primary_key()
.creation_time()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
assert!(body.contains(&format!(":{}:", creation_time)));
}
/// Asserts that we can get the given Cert back using the various
/// by-fingerprint or by-keyid lookup mechanisms.
pub fn check_mr_responses_by_fingerprint(client: &Client, tpk: &Cert,
nr_uids: usize) {
pub fn check_mr_responses_by_fingerprint(client: &Client, tpk: &Cert, nr_uids: usize) {
let fp = tpk.fingerprint().to_hex();
let keyid = sequoia_openpgp::KeyID::from(tpk.fingerprint()).to_hex();
check_mr_response(client, &format!("/vks/v1/by-keyid/{}", keyid), tpk, nr_uids);
check_mr_response(
client, &format!("/vks/v1/by-keyid/{}", keyid), tpk, nr_uids);
check_mr_response(
client, &format!("/vks/v1/by-fingerprint/{}", fp), tpk, nr_uids);
client,
&format!("/vks/v1/by-fingerprint/{}", fp),
tpk,
nr_uids,
);
check_mr_response(
client,
&format!("/pks/lookup?op=get&options=mr&search={}", fp),
tpk, nr_uids);
tpk,
nr_uids,
);
check_mr_response(
client,
&format!("/pks/lookup?op=get&options=mr&search=0x{}", fp),
tpk, nr_uids);
tpk,
nr_uids,
);
check_mr_response(
client,
&format!("/pks/lookup?op=get&options=mr&search={}", keyid),
tpk, nr_uids);
tpk,
nr_uids,
);
check_mr_response(
client,
&format!("/pks/lookup?op=get&options=mr&search=0x{}", keyid),
tpk, nr_uids);
tpk,
nr_uids,
);
check_mr_response(
client,
&format!("/pks/lookup?op=get&search=0x{}", keyid),
tpk, nr_uids);
tpk,
nr_uids,
);
check_index_response(
client,
&format!("/pks/lookup?op=index&search={}", fp),
tpk);
check_index_response(client, &format!("/pks/lookup?op=index&search={}", fp), tpk);
}
/// Asserts that the given URI contains the search string.
@ -1105,8 +1232,7 @@ pub mod tests {
/// Asserts that the given URI returns human readable response
/// page that contains a URI pointing to the Cert.
pub fn check_hr_response(client: &Client, uri: &str, tpk: &Cert,
nr_uids: usize) {
pub fn check_hr_response(client: &Client, uri: &str, tpk: &Cert, nr_uids: usize) {
let response = client.get(uri).dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(ContentType::HTML));
@ -1115,12 +1241,10 @@ pub mod tests {
assert!(body.contains(&tpk.fingerprint().to_hex()));
// Extract the links.
let link_re = regex::Regex::new(
&format!("{}(/vks/[^ \t\n\"<]*)", BASE_URI)).unwrap();
let link_re = regex::Regex::new(&format!("{}(/vks/[^ \t\n\"<]*)", BASE_URI)).unwrap();
let mut n = 0;
for link in link_re.captures_iter(&body) {
check_mr_response(client, link.get(1).unwrap().as_str(), tpk,
nr_uids);
check_mr_response(client, link.get(1).unwrap().as_str(), tpk, nr_uids);
n += 1;
}
assert!(n > 0);
@ -1128,8 +1252,7 @@ pub mod tests {
/// Asserts that the given URI returns human readable response
/// page that contains an onion URI pointing to the Cert.
pub fn check_hr_response_onion(client: &Client, uri: &str, tpk: &Cert,
_nr_uids: usize) {
pub fn check_hr_response_onion(client: &Client, uri: &str, tpk: &Cert, _nr_uids: usize) {
let response = client
.get(uri)
.header(Header::new("X-Is-Onion", "true"))
@ -1140,63 +1263,53 @@ pub mod tests {
assert!(body.contains(&tpk.fingerprint().to_hex()));
// Extract the links.
let link_re = regex::Regex::new(
&format!("{}(/vks/[^ \t\n\"<]*)", BASE_URI_ONION)).unwrap();
let link_re = regex::Regex::new(&format!("{}(/vks/[^ \t\n\"<]*)", BASE_URI_ONION)).unwrap();
assert!(link_re.is_match(&body));
}
/// Asserts that we can get the given Cert back using the various
/// by-fingerprint or by-keyid lookup mechanisms.
pub fn check_hr_responses_by_fingerprint(client: &Client, tpk: &Cert,
nr_uids: usize) {
pub fn check_hr_responses_by_fingerprint(client: &Client, tpk: &Cert, nr_uids: usize) {
let fp = tpk.fingerprint().to_hex();
let keyid = sequoia_openpgp::KeyID::from(tpk.fingerprint()).to_hex();
check_hr_response(
client,
&format!("/search?q={}", fp),
tpk, nr_uids);
check_hr_response(
client,
&format!("/search?q=0x{}", fp),
tpk, nr_uids);
check_hr_response(
client,
&format!("/search?q={}", keyid),
tpk, nr_uids);
check_hr_response(
client,
&format!("/search?q=0x{}", keyid),
tpk, nr_uids);
check_hr_response(client, &format!("/search?q={}", fp), tpk, nr_uids);
check_hr_response(client, &format!("/search?q=0x{}", fp), tpk, nr_uids);
check_hr_response(client, &format!("/search?q={}", keyid), tpk, nr_uids);
check_hr_response(client, &format!("/search?q=0x{}", keyid), tpk, nr_uids);
}
/// Asserts that the given URI returns correct WKD response with a Cert
/// matching the given one, with the given number of userids.
pub fn check_wkd_response(client: &Client, uri: &str, tpk: &Cert,
nr_uids: usize) {
pub fn check_wkd_response(client: &Client, uri: &str, tpk: &Cert, nr_uids: usize) {
let response = client.get(uri).dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(),
Some(ContentType::new("application", "octet-stream")));
assert_eq!(
response.content_type(),
Some(ContentType::new("application", "octet-stream"))
);
let body = response.into_bytes().unwrap();
let tpk_ = Cert::from_bytes(&body).unwrap();
assert_eq!(tpk.fingerprint(), tpk_.fingerprint());
assert_eq!(tpk.keys().map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>(),
tpk_.keys().map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>());
assert_eq!(
tpk.keys()
.map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>(),
tpk_.keys()
.map(|skb| skb.key().fingerprint())
.collect::<Vec<_>>()
);
assert_eq!(tpk_.userids().count(), nr_uids);
}
fn check_verify_link(client: &Client, token: &str, address: &str, lang: &'static str) {
let encoded = ::url::form_urlencoded::Serializer::new(String::new())
.append_pair("token", token)
.append_pair("address", address)
.finish();
let response = client.post("/upload/request-verify")
let response = client
.post("/upload/request-verify")
.header(ContentType::Form)
.header(Header::new("Accept-Language", lang))
.body(encoded.as_bytes())
@ -1207,7 +1320,8 @@ pub mod tests {
fn check_verify_link_json(client: &Client, token: &str, address: &str) {
let json = format!(r#"{{"token":"{}","addresses":["{}"]}}"#, token, address);
let response = client.post("/vks/v1/request-verify")
let response = client
.post("/vks/v1/request-verify")
.header(ContentType::JSON)
.body(json.as_bytes())
.dispatch();
@ -1224,7 +1338,10 @@ pub mod tests {
let response_second = client.post(&confirm_uri).dispatch();
assert_eq!(response_second.status(), Status::BadRequest);
assert!(response_second.into_string().unwrap().contains("already been verified"));
assert!(response_second
.into_string()
.unwrap()
.contains("already been verified"));
}
fn check_mails_and_confirm_deletion(client: &Client, filemail_path: &Path, address: &str) {
@ -1237,8 +1354,12 @@ pub mod tests {
let mail_content = pop_mail(filemail_path).unwrap().unwrap();
let capture_re = regex::bytes::Regex::new(pattern).unwrap();
let capture_content = capture_re.captures(mail_content.as_ref()).unwrap()
.get(1).unwrap().as_bytes();
let capture_content = capture_re
.captures(mail_content.as_ref())
.unwrap()
.get(1)
.unwrap()
.as_bytes();
String::from_utf8_lossy(capture_content).to_string()
}
@ -1258,22 +1379,29 @@ pub mod tests {
let pattern = "name=\"token\" value=\"([^\"]*)\"";
let capture_re = regex::bytes::Regex::new(pattern).unwrap();
let capture_content = capture_re .captures(response_body.as_bytes()).unwrap()
.get(1).unwrap().as_bytes();
let capture_content = capture_re
.captures(response_body.as_bytes())
.unwrap()
.get(1)
.unwrap()
.as_bytes();
let token = String::from_utf8_lossy(capture_content).to_string();
assert_eq!(status, Status::Ok);
token
}
fn vks_publish_submit_response<'a>(client: &'a Client, data: &[u8]) ->
LocalResponse<'a> {
fn vks_publish_submit_response<'a>(client: &'a Client, data: &[u8]) -> LocalResponse<'a> {
let ct = ContentType::with_params(
"multipart", "form-data",
("boundary", "---------------------------14733842173518794281682249499"));
"multipart",
"form-data",
(
"boundary",
"---------------------------14733842173518794281682249499",
),
);
let header =
b"-----------------------------14733842173518794281682249499\r\n\
let header = b"-----------------------------14733842173518794281682249499\r\n\
Content-Disposition: form-data; name=\"csrf\"\r\n\
\r\n\
\r\n\
@ -1287,35 +1415,39 @@ pub mod tests {
body.extend_from_slice(header);
body.extend_from_slice(data);
body.extend_from_slice(footer);
client.post("/upload/submit")
client
.post("/upload/submit")
.header(ct)
.body(&body[..])
.dispatch()
}
fn vks_publish_shortcut_get_token(client: &Client, data: &[u8]) -> String {
let response = client.put("/")
.body(data)
.dispatch();
let response = client.put("/").body(data).dispatch();
assert_eq!(response.status(), Status::Ok);
let response_body = response.into_string().unwrap();
assert!(response_body.contains("Key successfully uploaded"));
let pattern = format!("{}/upload/([^ \t\n]*)", BASE_URI);
let capture_re = regex::bytes::Regex::new(&pattern).unwrap();
let capture_content = capture_re .captures(response_body.as_bytes()).unwrap()
.get(1).unwrap().as_bytes();
let capture_content = capture_re
.captures(response_body.as_bytes())
.unwrap()
.get(1)
.unwrap()
.as_bytes();
String::from_utf8_lossy(capture_content).to_string()
}
fn vks_publish_json_get_token(client: &Client, data: &[u8]) -> String {
let response = client.post("/vks/v1/upload")
let response = client
.post("/vks/v1/upload")
.header(ContentType::JSON)
.body(format!(r#"{{ "keytext": "{}" }}"#, base64::encode(data)))
.dispatch();
let status = response.status();
let response_body = response.into_string().unwrap();
let result: vks_api::json::UploadResult = serde_json::from_str(&response_body).unwrap();
let result: vks_api::json::UploadResult = serde_json::from_str(&response_body).unwrap();
assert_eq!(status, Status::Ok);
result.token
@ -1325,7 +1457,8 @@ pub mod tests {
let encoded = ::url::form_urlencoded::Serializer::new(String::new())
.append_pair("search_term", search_term)
.finish();
let response = client.post("/manage")
let response = client
.post("/manage")
.header(ContentType::Form)
.body(encoded.as_bytes())
.dispatch();
@ -1337,7 +1470,8 @@ pub mod tests {
.append_pair("token", token)
.append_pair("address", address)
.finish();
let response = client.post("/manage/unpublish")
let response = client
.post("/manage/unpublish")
.header(ContentType::Form)
.body(encoded.as_bytes())
.dispatch();

View File

@ -1,24 +1,26 @@
use crate::Result;
use crate::database::{Database, KeyDatabase, StatefulTokens, EmailAddressStatus, TpkStatus, ImportResult};
use crate::database::types::{Fingerprint,Email};
use crate::mail;
use crate::counters;
use crate::tokens::{self, StatelessSerializable};
use crate::database::types::{Email, Fingerprint};
use crate::database::{
Database, EmailAddressStatus, ImportResult, KeyDatabase, StatefulTokens, TpkStatus,
};
use crate::mail;
use crate::rate_limiter::RateLimiter;
use crate::tokens::{self, StatelessSerializable};
use crate::web::RequestOrigin;
use rocket_i18n::I18n;
use gettext_macros::i18n;
use rocket_i18n::I18n;
use sequoia_openpgp::Cert;
use sequoia_openpgp::parse::{Parse, PacketParserBuilder, Dearmor};
use sequoia_openpgp::cert::CertParser;
use sequoia_openpgp::armor::ReaderMode;
use sequoia_openpgp::cert::CertParser;
use sequoia_openpgp::parse::{Dearmor, PacketParserBuilder, Parse};
use sequoia_openpgp::Cert;
use std::io::Read;
use std::convert::TryFrom;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::io::Read;
use self::response::*;
@ -38,7 +40,7 @@ pub mod request {
pub mod response {
use crate::database::types::Email;
#[derive(Debug,Serialize,Deserialize,PartialEq,Eq)]
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum EmailStatus {
#[serde(rename = "unpublished")]
Unpublished,
@ -57,12 +59,14 @@ pub mod response {
token: String,
key_fpr: String,
is_revoked: bool,
status: HashMap<String,EmailStatus>,
status: HashMap<String, EmailStatus>,
count_unparsed: usize,
is_new_key: bool,
primary_uid: Option<Email>,
},
OkMulti { key_fprs: Vec<String> },
OkMulti {
key_fprs: Vec<String>,
},
Error(String),
}
@ -84,15 +88,14 @@ pub mod response {
}
}
#[derive(Serialize,Deserialize)]
#[derive(Serialize, Deserialize)]
struct VerifyTpkState {
fpr: Fingerprint,
addresses: Vec<Email>,
requested: Vec<Email>,
}
impl StatelessSerializable for VerifyTpkState {
}
impl StatelessSerializable for VerifyTpkState {}
pub fn process_key(
db: &KeyDatabase,
@ -103,9 +106,7 @@ pub fn process_key(
) -> response::UploadResponse {
// First, parse all Certs and error out if one fails.
let parser = match PacketParserBuilder::from_reader(reader)
.and_then(|ppb| {
ppb.dearmor(Dearmor::Auto(ReaderMode::VeryTolerant)).build()
})
.and_then(|ppb| ppb.dearmor(Dearmor::Auto(ReaderMode::VeryTolerant)).build())
{
Ok(ppr) => CertParser::from(ppr),
Err(_) => return UploadResponse::err(i18n!(i18n.catalog, "Parsing of key data failed.")),
@ -122,7 +123,7 @@ pub fn process_key(
));
}
t
},
}
Err(_) => {
return UploadResponse::err(i18n!(i18n.catalog, "Parsing of key data failed."));
}
@ -131,7 +132,13 @@ pub fn process_key(
match tpks.len() {
0 => UploadResponse::err(i18n!(i18n.catalog, "No key uploaded.")),
1 => process_key_single(db, i18n, tokens_stateless, rate_limiter, tpks.into_iter().next().unwrap()),
1 => process_key_single(
db,
i18n,
tokens_stateless,
rate_limiter,
tpks.into_iter().next().unwrap(),
),
_ => process_key_multiple(db, tpks),
}
}
@ -147,14 +154,10 @@ fn log_db_merge(import_result: Result<ImportResult>) -> Result<ImportResult> {
import_result
}
fn process_key_multiple(
db: &KeyDatabase,
tpks: Vec<Cert>,
) -> response::UploadResponse {
fn process_key_multiple(db: &KeyDatabase, tpks: Vec<Cert>) -> response::UploadResponse {
let key_fprs: Vec<_> = tpks
.into_iter()
.flat_map(|tpk| Fingerprint::try_from(tpk.fingerprint())
.map(|fpr| (fpr, tpk)))
.flat_map(|tpk| Fingerprint::try_from(tpk.fingerprint()).map(|fpr| (fpr, tpk)))
.flat_map(|(fpr, tpk)| log_db_merge(db.merge(tpk)).map(|_| fpr.to_string()))
.collect();
@ -174,17 +177,21 @@ fn process_key_single(
Ok(ImportResult::New(tpk_status)) => (tpk_status, true),
Ok(ImportResult::Updated(tpk_status)) => (tpk_status, false),
Ok(ImportResult::Unchanged(tpk_status)) => (tpk_status, false),
Err(_) => return UploadResponse::err(i18n!(i18n.catalog, "Error processing uploaded key.")),
Err(_) => {
return UploadResponse::err(i18n!(i18n.catalog, "Error processing uploaded key."))
}
};
let verify_state = {
let emails = tpk_status.email_status.iter()
.map(|(email,_)| email.clone())
let emails = tpk_status
.email_status
.iter()
.map(|(email, _)| email.clone())
.collect();
VerifyTpkState {
fpr: fp,
addresses: emails,
requested: vec!(),
requested: vec![],
}
};
@ -210,18 +217,19 @@ pub fn request_verify(
};
if tpk_status.is_revoked {
return show_upload_verify(
rate_limiter, token, tpk_status, verify_state, false);
return show_upload_verify(rate_limiter, token, tpk_status, verify_state, false);
}
let emails_requested: Vec<_> = addresses.into_iter()
let emails_requested: Vec<_> = addresses
.into_iter()
.map(|address| address.parse::<Email>())
.flatten()
.filter(|email| verify_state.addresses.contains(email))
.filter(|email| tpk_status.email_status.iter()
.any(|(uid_email, status)|
.filter(|email| {
tpk_status.email_status.iter().any(|(uid_email, status)| {
uid_email == email && *status == EmailAddressStatus::NotPublished
))
})
})
.collect();
for email in emails_requested {
@ -237,10 +245,7 @@ pub fn request_verify(
)
.is_err()
{
return UploadResponse::err(&format!(
"error sending email to {}",
&email
));
return UploadResponse::err(&format!("error sending email to {}", &email));
}
}
@ -252,12 +257,15 @@ fn check_tpk_state(
token_stateless: &tokens::Service,
i18n: &I18n,
token: &str,
) -> Result<(VerifyTpkState,TpkStatus)> {
let verify_state = token_stateless.check::<VerifyTpkState>(token)
.map_err(|_| anyhow!(i18n!(
i18n.catalog,
"Upload session expired. Please try again."
)))?;
) -> Result<(VerifyTpkState, TpkStatus)> {
let verify_state = token_stateless
.check::<VerifyTpkState>(token)
.map_err(|_| {
anyhow!(i18n!(
i18n.catalog,
"Upload session expired. Please try again."
))
})?;
let tpk_status = db.get_tpk_status(&verify_state.fpr, &verify_state.addresses)?;
Ok((verify_state, tpk_status))
}
@ -291,13 +299,12 @@ pub fn verify_confirm(
) -> response::PublishResponse {
let (fingerprint, email) = match check_publish_token(db, token_service, token) {
Ok(x) => x,
Err(_) => return PublishResponse::err(
i18n!(i18n.catalog, "Invalid verification link.")),
Err(_) => return PublishResponse::err(i18n!(i18n.catalog, "Invalid verification link.")),
};
response::PublishResponse::Ok {
fingerprint: fingerprint.to_string(),
email: email.to_string()
email: email.to_string(),
}
}
@ -305,7 +312,7 @@ fn check_publish_token(
db: &KeyDatabase,
token_service: &StatefulTokens,
token: String,
) -> Result<(Fingerprint,Email)> {
) -> Result<(Fingerprint, Email)> {
let payload = token_service.pop_token("verify", &token)?;
let (fingerprint, email) = serde_json::from_str(&payload)?;
@ -335,28 +342,41 @@ fn show_upload_verify(
};
}
let status: HashMap<_,_> = tpk_status.email_status
let status: HashMap<_, _> = tpk_status
.email_status
.iter()
.map(|(email,status)| {
let is_pending = (*status == EmailAddressStatus::NotPublished) &&
!rate_limiter.action_check(format!("verify-{}", &email));
.map(|(email, status)| {
let is_pending = (*status == EmailAddressStatus::NotPublished)
&& !rate_limiter.action_check(format!("verify-{}", &email));
if is_pending {
(email.to_string(), EmailStatus::Pending)
} else {
(email.to_string(), match status {
EmailAddressStatus::NotPublished => EmailStatus::Unpublished,
EmailAddressStatus::Published => EmailStatus::Published,
EmailAddressStatus::Revoked => EmailStatus::Revoked,
})
(
email.to_string(),
match status {
EmailAddressStatus::NotPublished => EmailStatus::Unpublished,
EmailAddressStatus::Published => EmailStatus::Published,
EmailAddressStatus::Revoked => EmailStatus::Revoked,
},
)
}
})
.collect();
let primary_uid = tpk_status.email_status
let primary_uid = tpk_status
.email_status
.get(0)
.map(|(email, _)| email)
.cloned();
let count_unparsed = tpk_status.unparsed_uids;
response::UploadResponse::Ok { token, key_fpr, count_unparsed, is_revoked: false, status, is_new_key, primary_uid }
response::UploadResponse::Ok {
token,
key_fpr,
count_unparsed,
is_revoked: false,
status,
is_new_key,
primary_uid,
}
}

View File

@ -1,20 +1,21 @@
use rocket::request::Request; use rocket::response::{self, Response, Responder};
use rocket::http::{ContentType,Status};
use rocket::http::{ContentType, Status};
use rocket::request::Request;
use rocket::response::{self, Responder, Response};
use rocket::serde::json::Json;
use rocket_i18n::{I18n, Translations};
use serde_json::json;
use std::io::Cursor;
use crate::database::{KeyDatabase, StatefulTokens, Query};
use crate::database::types::{Email, Fingerprint, KeyID};
use crate::database::{KeyDatabase, Query, StatefulTokens};
use crate::mail;
use crate::tokens;
use crate::rate_limiter::RateLimiter;
use crate::tokens;
use crate::web;
use crate::web::{RequestOrigin, MyResponse};
use crate::web::vks;
use crate::web::vks::response::*;
use crate::web::{MyResponse, RequestOrigin};
use rocket::serde::json::Error as JsonError;
@ -34,18 +35,18 @@ pub mod json {
pub keytext: String,
}
#[derive(Serialize,Deserialize)]
#[derive(Serialize, Deserialize)]
pub struct UploadResult {
pub token: String,
pub key_fpr: String,
pub status: HashMap<String,EmailStatus>,
pub status: HashMap<String, EmailStatus>,
}
}
type JsonResult = Result<serde_json::Value, JsonErrorResponse>;
#[derive(Debug)]
pub struct JsonErrorResponse(Status,String);
pub struct JsonErrorResponse(Status, String);
impl<'r> Responder<'r, 'static> for JsonErrorResponse {
fn respond_to(self, _: &'r Request<'_>) -> response::Result<'static> {
@ -61,15 +62,26 @@ impl<'r> Responder<'r, 'static> for JsonErrorResponse {
fn json_or_error<T>(data: Result<Json<T>, JsonError>) -> Result<Json<T>, JsonErrorResponse> {
match data {
Ok(data) => Ok(data),
Err(JsonError::Io(_)) => Err(JsonErrorResponse(Status::InternalServerError, "i/o error!".to_owned())),
Err(JsonError::Io(_)) => Err(JsonErrorResponse(
Status::InternalServerError,
"i/o error!".to_owned(),
)),
Err(JsonError::Parse(_, e)) => Err(JsonErrorResponse(Status::BadRequest, e.to_string())),
}
}
fn upload_ok_json(response: UploadResponse) -> Result<serde_json::Value, JsonErrorResponse> {
match response {
UploadResponse::Ok { token, key_fpr, status, .. } =>
Ok(json!(json::UploadResult { token, key_fpr, status })),
UploadResponse::Ok {
token,
key_fpr,
status,
..
} => Ok(json!(json::UploadResult {
token,
key_fpr,
status
})),
UploadResponse::OkMulti { key_fprs } => Ok(json!(key_fprs)),
UploadResponse::Error(error) => Err(JsonErrorResponse(Status::BadRequest, error)),
}
@ -91,28 +103,29 @@ pub fn upload_json(
}
#[post("/vks/v1/upload", rank = 2)]
pub fn upload_fallback(
origin: RequestOrigin,
) -> JsonErrorResponse {
let error_msg = format!("expected application/json data. see {}/about/api for api docs.", origin.get_base_uri());
pub fn upload_fallback(origin: RequestOrigin) -> JsonErrorResponse {
let error_msg = format!(
"expected application/json data. see {}/about/api for api docs.",
origin.get_base_uri()
);
JsonErrorResponse(Status::BadRequest, error_msg)
}
fn get_locale(
langs: &rocket::State<Translations>,
locales: Vec<String>,
) -> I18n {
fn get_locale(langs: &rocket::State<Translations>, locales: Vec<String>) -> I18n {
locales
.iter()
.flat_map(|lang| lang.split(|c| c == '-' || c == ';' || c == '_').next())
.flat_map(|lang| langs.iter().find(|(trans, _)| trans == &lang))
.next()
.or_else(|| langs.iter().find(|(trans, _)| trans == &"en"))
.map(|(lang, catalog)| I18n { catalog: catalog.clone(), lang })
.map(|(lang, catalog)| I18n {
catalog: catalog.clone(),
lang,
})
.expect("Expected to have an english translation!")
}
#[post("/vks/v1/request-verify", format = "json", data="<data>")]
#[post("/vks/v1/request-verify", format = "json", data = "<data>")]
pub fn request_verify_json(
db: &rocket::State<KeyDatabase>,
langs: &rocket::State<Translations>,
@ -124,19 +137,32 @@ pub fn request_verify_json(
data: Result<Json<json::VerifyRequest>, JsonError>,
) -> JsonResult {
let data = json_or_error(data)?;
let json::VerifyRequest { token, addresses, locale } = data.into_inner();
let json::VerifyRequest {
token,
addresses,
locale,
} = data.into_inner();
let i18n = get_locale(langs, locale.unwrap_or_default());
let result = vks::request_verify(
db, &origin, token_stateful, token_stateless, mail_service,
rate_limiter, &i18n, token, addresses);
db,
&origin,
token_stateful,
token_stateless,
mail_service,
rate_limiter,
&i18n,
token,
addresses,
);
upload_ok_json(result)
}
#[post("/vks/v1/request-verify", rank = 2)]
pub fn request_verify_fallback(
origin: RequestOrigin,
) -> JsonErrorResponse {
let error_msg = format!("expected application/json data. see {}/about/api for api docs.", origin.get_base_uri());
pub fn request_verify_fallback(origin: RequestOrigin) -> JsonErrorResponse {
let error_msg = format!(
"expected application/json data. see {}/about/api for api docs.",
origin.get_base_uri()
);
JsonErrorResponse(Status::BadRequest, error_msg)
}
@ -155,11 +181,7 @@ pub fn vks_v1_by_fingerprint(
}
#[get("/vks/v1/by-email/<email>")]
pub fn vks_v1_by_email(
db: &rocket::State<KeyDatabase>,
i18n: I18n,
email: String,
) -> MyResponse {
pub fn vks_v1_by_email(db: &rocket::State<KeyDatabase>, i18n: I18n, email: String) -> MyResponse {
let email = email.replace("%40", "@");
let query = match email.parse::<Email>() {
Ok(email) => Query::ByEmail(email),
@ -170,11 +192,7 @@ pub fn vks_v1_by_email(
}
#[get("/vks/v1/by-keyid/<kid>")]
pub fn vks_v1_by_keyid(
db: &rocket::State<KeyDatabase>,
i18n: I18n,
kid: String,
) -> MyResponse {
pub fn vks_v1_by_keyid(db: &rocket::State<KeyDatabase>, i18n: I18n, kid: String) -> MyResponse {
let query = match kid.parse::<KeyID>() {
Ok(keyid) => Query::ByKeyID(keyid),
Err(_) => return MyResponse::bad_request_plain("malformed key id"),

View File

@ -4,21 +4,21 @@ use multipart::server::save::Entries;
use multipart::server::save::SaveResult::*;
use multipart::server::Multipart;
use gettext_macros::i18n;
use rocket::data::ByteUnit;
use rocket::form::Form;
use rocket::form::ValueField;
use rocket::http::ContentType;
use rocket::Data;
use rocket_i18n::I18n;
use gettext_macros::i18n;
use url::percent_encoding::percent_decode;
use crate::database::{KeyDatabase, StatefulTokens, Query, Database};
use crate::mail;
use crate::tokens;
use crate::web::{RequestOrigin, MyResponse};
use crate::rate_limiter::RateLimiter;
use crate::database::{Database, KeyDatabase, Query, StatefulTokens};
use crate::i18n_helpers::describe_query_error;
use crate::mail;
use crate::rate_limiter::RateLimiter;
use crate::tokens;
use crate::web::{MyResponse, RequestOrigin};
use std::collections::HashMap;
use std::io::Cursor;
@ -29,7 +29,7 @@ use crate::web::vks::response::*;
const UPLOAD_LIMIT: ByteUnit = ByteUnit::Mebibyte(1);
mod forms {
#[derive(FromForm,Deserialize)]
#[derive(FromForm, Deserialize)]
pub struct VerifyRequest {
pub token: String,
pub address: String,
@ -90,12 +90,10 @@ mod template {
pub address: String,
pub requested: bool,
}
}
impl MyResponse {
fn upload_response_quick(response: UploadResponse,
i18n: I18n, origin: RequestOrigin) -> Self {
fn upload_response_quick(response: UploadResponse, i18n: I18n, origin: RequestOrigin) -> Self {
match response {
UploadResponse::Ok { token, .. } => {
let uri = uri!(quick_upload_proceed(token));
@ -105,23 +103,39 @@ impl MyResponse {
uri
);
MyResponse::plain(text)
},
UploadResponse::OkMulti { key_fprs } =>
MyResponse::plain(format!("Uploaded {} keys. For verification, please upload keys individually.\n", key_fprs.len())),
UploadResponse::Error(error) => MyResponse::bad_request(
"400-plain", anyhow!(error), i18n, origin),
}
UploadResponse::OkMulti { key_fprs } => MyResponse::plain(format!(
"Uploaded {} keys. For verification, please upload keys individually.\n",
key_fprs.len()
)),
UploadResponse::Error(error) => {
MyResponse::bad_request("400-plain", anyhow!(error), i18n, origin)
}
}
}
fn upload_response(response: UploadResponse,
i18n: I18n, origin: RequestOrigin) -> Self {
fn upload_response(response: UploadResponse, i18n: I18n, origin: RequestOrigin) -> Self {
match response {
UploadResponse::Ok { token, key_fpr, is_revoked, count_unparsed, status, .. } =>
Self::upload_ok(token, key_fpr, is_revoked, count_unparsed, status, i18n, origin),
UploadResponse::OkMulti { key_fprs } =>
Self::upload_ok_multi(key_fprs, i18n, origin),
UploadResponse::Error(error) => MyResponse::bad_request(
"upload/upload", anyhow!(error), i18n, origin),
UploadResponse::Ok {
token,
key_fpr,
is_revoked,
count_unparsed,
status,
..
} => Self::upload_ok(
token,
key_fpr,
is_revoked,
count_unparsed,
status,
i18n,
origin,
),
UploadResponse::OkMulti { key_fprs } => Self::upload_ok_multi(key_fprs, i18n, origin),
UploadResponse::Error(error) => {
MyResponse::bad_request("upload/upload", anyhow!(error), i18n, origin)
}
}
}
@ -130,33 +144,35 @@ impl MyResponse {
key_fpr: String,
is_revoked: bool,
count_unparsed: usize,
uid_status: HashMap<String,EmailStatus>,
uid_status: HashMap<String, EmailStatus>,
i18n: I18n,
origin: RequestOrigin,
) -> Self {
let key_link = uri!(search(q = &key_fpr)).to_string();
let count_revoked = uid_status.iter()
.filter(|(_,status)| **status == EmailStatus::Revoked)
let count_revoked = uid_status
.iter()
.filter(|(_, status)| **status == EmailStatus::Revoked)
.count();
let mut email_published: Vec<_> = uid_status.iter()
.filter(|(_,status)| **status == EmailStatus::Published)
.map(|(email,_)| email.to_string())
let mut email_published: Vec<_> = uid_status
.iter()
.filter(|(_, status)| **status == EmailStatus::Published)
.map(|(email, _)| email.to_string())
.collect();
email_published.sort_unstable();
let mut email_unpublished: Vec<_> = uid_status.into_iter()
.filter(|(_,status)| *status == EmailStatus::Unpublished ||
*status == EmailStatus::Pending)
.map(|(email,status)|
template::UploadUidStatus {
address: email,
requested: status == EmailStatus::Pending,
})
let mut email_unpublished: Vec<_> = uid_status
.into_iter()
.filter(|(_, status)| {
*status == EmailStatus::Unpublished || *status == EmailStatus::Pending
})
.map(|(email, status)| template::UploadUidStatus {
address: email,
requested: status == EmailStatus::Pending,
})
.collect();
email_unpublished
.sort_unstable_by(|fst,snd| fst.address.cmp(&snd.address));
email_unpublished.sort_unstable_by(|fst, snd| fst.address.cmp(&snd.address));
let context = template::VerificationSent {
is_revoked,
@ -173,9 +189,9 @@ impl MyResponse {
MyResponse::ok("upload/upload-ok", context, i18n, origin)
}
fn upload_ok_multi(key_fprs: Vec<String>,
i18n: I18n, origin: RequestOrigin) -> Self {
let keys = key_fprs.into_iter()
fn upload_ok_multi(key_fprs: Vec<String>, i18n: I18n, origin: RequestOrigin) -> Self {
let keys = key_fprs
.into_iter()
.map(|fpr| {
let key_link = uri!(search(q = &fpr)).to_string();
template::UploadOkKey {
@ -185,9 +201,7 @@ impl MyResponse {
})
.collect();
let context = template::UploadOkMultiple {
keys,
};
let context = template::UploadOkMultiple { keys };
MyResponse::ok("upload/upload-ok-multiple", context, i18n, origin)
}
@ -208,9 +222,7 @@ pub async fn upload_post_form_data(
cont_type: &ContentType,
data: Data<'_>,
) -> MyResponse {
match process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type)
.await
{
match process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type).await {
Ok(response) => MyResponse::upload_response(response, i18n, origin),
Err(err) => MyResponse::bad_request("upload/upload", err, i18n, origin),
}
@ -224,8 +236,7 @@ pub async fn process_post_form_data(
cont_type: &ContentType,
data: Data<'_>,
) -> Result<UploadResponse> {
process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type)
.await
process_upload(db, tokens_stateless, rate_limiter, &i18n, data, cont_type).await
}
#[get("/search?<q>")]
@ -251,14 +262,17 @@ fn key_to_response(
let fp = if let Some(fp) = db.lookup_primary_fingerprint(&query) {
fp
} else if query.is_invalid() {
return MyResponse::bad_request("index", anyhow!(describe_query_error(&i18n, &query)),
i18n, origin);
return MyResponse::bad_request(
"index",
anyhow!(describe_query_error(&i18n, &query)),
i18n,
origin,
);
} else {
return MyResponse::not_found(None, describe_query_error(&i18n, &query),
i18n, origin);
return MyResponse::not_found(None, describe_query_error(&i18n, &query), i18n, origin);
};
let context = template::Search{
let context = template::Search {
query: query_string,
fpr: fp.to_string(),
};
@ -266,7 +280,6 @@ fn key_to_response(
MyResponse::ok("found", context, i18n, origin)
}
#[put("/", data = "<data>")]
pub async fn quick_upload(
db: &rocket::State<KeyDatabase>,
@ -278,20 +291,14 @@ pub async fn quick_upload(
) -> MyResponse {
let buf = match data.open(UPLOAD_LIMIT).into_bytes().await {
Ok(buf) => buf.into_inner(),
Err(error) =>
return MyResponse::bad_request("400-plain", anyhow!(error),
i18n, origin),
Err(error) => return MyResponse::bad_request("400-plain", anyhow!(error), i18n, origin),
};
MyResponse::upload_response_quick(
vks::process_key(
db,
&i18n,
tokens_stateless,
rate_limiter,
Cursor::new(buf)
),
i18n, origin)
vks::process_key(db, &i18n, tokens_stateless, rate_limiter, Cursor::new(buf)),
i18n,
origin,
)
}
#[get("/upload/<token>", rank = 2)]
@ -306,13 +313,24 @@ pub fn quick_upload_proceed(
token: String,
) -> MyResponse {
let result = vks::request_verify(
db, &origin, token_stateful, token_stateless, mail_service,
rate_limiter, &i18n, token, vec!());
db,
&origin,
token_stateful,
token_stateless,
mail_service,
rate_limiter,
&i18n,
token,
vec![],
);
MyResponse::upload_response(result, i18n, origin)
}
#[post("/upload/submit", format = "application/x-www-form-urlencoded", data = "<data>")]
#[post(
"/upload/submit",
format = "application/x-www-form-urlencoded",
data = "<data>"
)]
pub async fn upload_post_form(
db: &rocket::State<KeyDatabase>,
origin: RequestOrigin,
@ -322,10 +340,8 @@ pub async fn upload_post_form(
data: Data<'_>,
) -> MyResponse {
match process_post_form(db, tokens_stateless, rate_limiter, &i18n, data).await {
Ok(response) => MyResponse::upload_response(response,
i18n, origin),
Err(err) => MyResponse::bad_request("upload/upload", err,
i18n, origin),
Ok(response) => MyResponse::upload_response(response, i18n, origin),
Err(err) => MyResponse::bad_request("upload/upload", err, i18n, origin),
}
}
@ -340,9 +356,9 @@ pub async fn process_post_form(
let buf = data.open(UPLOAD_LIMIT).into_bytes().await?;
for ValueField { name, value } in Form::values(&*String::from_utf8_lossy(&buf)) {
let decoded_value = percent_decode(value.as_bytes()).decode_utf8().map_err(|_|
anyhow!("`Content-Type: application/x-www-form-urlencoded` not valid")
)?;
let decoded_value = percent_decode(value.as_bytes())
.decode_utf8()
.map_err(|_| anyhow!("`Content-Type: application/x-www-form-urlencoded` not valid"))?;
if name.to_string().as_str() == "keytext" {
return Ok(vks::process_key(
@ -350,7 +366,7 @@ pub async fn process_post_form(
i18n,
tokens_stateless,
rate_limiter,
Cursor::new(decoded_value.as_bytes())
Cursor::new(decoded_value.as_bytes()),
));
}
}
@ -358,7 +374,6 @@ pub async fn process_post_form(
Err(anyhow!("No keytext found"))
}
async fn process_upload(
db: &KeyDatabase,
tokens_stateless: &tokens::Service,
@ -371,21 +386,23 @@ async fn process_upload(
let (_, boundary) = cont_type
.params()
.find(|&(k, _)| k == "boundary")
.ok_or_else(|| anyhow!("`Content-Type: multipart/form-data` \
boundary param not provided"))?;
.ok_or_else(|| {
anyhow!(
"`Content-Type: multipart/form-data` \
boundary param not provided"
)
})?;
// saves all fields, any field longer than 10kB goes to a temporary directory
// Entries could implement FromData though that would give zero control over
// how the files are saved; Multipart would be a good impl candidate though
let data = Cursor::new(data.open(UPLOAD_LIMIT).into_bytes().await?.value);
match Multipart::with_body(data, boundary).save().temp() {
Full(entries) => {
process_multipart(db, tokens_stateless, rate_limiter, i18n, entries)
}
Full(entries) => process_multipart(db, tokens_stateless, rate_limiter, i18n, entries),
Partial(partial, _) => {
process_multipart(db, tokens_stateless, rate_limiter, i18n, partial.entries)
}
Error(err) => Err(err.into())
Error(err) => Err(err.into()),
}
}
@ -399,14 +416,24 @@ fn process_multipart(
match entries.fields.get("keytext") {
Some(ent) if ent.len() == 1 => {
let reader = ent[0].data.readable()?;
Ok(vks::process_key(db, i18n, tokens_stateless, rate_limiter, reader))
Ok(vks::process_key(
db,
i18n,
tokens_stateless,
rate_limiter,
reader,
))
}
Some(_) => Err(anyhow!("Multiple keytexts found")),
None => Err(anyhow!("No keytext found")),
}
}
#[post("/upload/request-verify", format = "application/x-www-form-urlencoded", data="<request>")]
#[post(
"/upload/request-verify",
format = "application/x-www-form-urlencoded",
data = "<request>"
)]
pub fn request_verify_form(
db: &rocket::State<KeyDatabase>,
origin: RequestOrigin,
@ -419,12 +446,24 @@ pub fn request_verify_form(
) -> MyResponse {
let forms::VerifyRequest { token, address } = request.into_inner();
let result = vks::request_verify(
db, &origin, token_stateful, token_stateless, mail_service,
rate_limiter, &i18n, token, vec!(address));
db,
&origin,
token_stateful,
token_stateless,
mail_service,
rate_limiter,
&i18n,
token,
vec![address],
);
MyResponse::upload_response(result, i18n, origin)
}
#[post("/upload/request-verify", format = "multipart/form-data", data="<request>")]
#[post(
"/upload/request-verify",
format = "multipart/form-data",
data = "<request>"
)]
pub fn request_verify_form_data(
db: &rocket::State<KeyDatabase>,
origin: RequestOrigin,
@ -437,8 +476,16 @@ pub fn request_verify_form_data(
) -> MyResponse {
let forms::VerifyRequest { token, address } = request.into_inner();
let result = vks::request_verify(
db, &origin, token_stateful, token_stateless, mail_service,
rate_limiter, &i18n, token, vec!(address));
db,
&origin,
token_stateful,
token_stateless,
mail_service,
rate_limiter,
&i18n,
token,
vec![address],
);
MyResponse::upload_response(result, i18n, origin)
}
@ -463,12 +510,15 @@ pub fn verify_confirm(
};
MyResponse::ok("upload/publish-result", context, i18n, origin)
},
}
PublishResponse::Error(error) => {
let error_msg = if rate_limiter.action_check(rate_limit_id) {
anyhow!(error)
} else {
anyhow!(i18n!(i18n.catalog, "This address has already been verified."))
anyhow!(i18n!(
i18n.catalog,
"This address has already been verified."
))
};
MyResponse::bad_request("400", error_msg, i18n, origin)
}
@ -476,12 +526,11 @@ pub fn verify_confirm(
}
#[get("/verify/<token>")]
pub fn verify_confirm_form(
origin: RequestOrigin,
i18n: I18n,
token: String,
) -> MyResponse {
MyResponse::ok("upload/verification-form", template::VerifyForm {
token
}, i18n, origin)
pub fn verify_confirm_form(origin: RequestOrigin, i18n: I18n, token: String) -> MyResponse {
MyResponse::ok(
"upload/verification-form",
template::VerifyForm { token },
i18n,
origin,
)
}

View File

@ -3,24 +3,16 @@ use crate::web::MyResponse;
// WKD queries
#[get("/.well-known/openpgpkey/<domain>/hu/<wkd_hash>")]
pub fn wkd_query(
db: &rocket::State<KeyDatabase>,
domain: String,
wkd_hash: String,
) -> MyResponse {
pub fn wkd_query(db: &rocket::State<KeyDatabase>, domain: String, wkd_hash: String) -> MyResponse {
match db.by_domain_and_hash_wkd(&domain, &wkd_hash) {
Some(key) => MyResponse::wkd(key, &wkd_hash),
None => MyResponse::not_found_plain(
"No key found for this email address.",
),
None => MyResponse::not_found_plain("No key found for this email address."),
}
}
// Policy requests.
// 200 response with an empty body.
#[get("/.well-known/openpgpkey/<_domain>/policy")]
pub fn wkd_policy(
_domain: String,
) -> MyResponse {
pub fn wkd_policy(_domain: String) -> MyResponse {
MyResponse::plain("".to_string())
}