1
0
Fork 0
mirror of https://gitlab.com/hagrid-keyserver/hagrid.git synced 2023-02-13 20:55:02 -05:00

extract stateful token functionality from database

This commit is contained in:
Vincent Breitmoser 2019-04-26 22:14:57 +02:00
parent 385efb37b3
commit 960c0d89f7
9 changed files with 202 additions and 206 deletions

View file

@ -1,17 +1,15 @@
use std::convert::{TryInto, TryFrom};
use std::fs::{create_dir_all, read_link, remove_file, rename, File};
use std::io::{Read, Write};
use std::fs::{create_dir_all, read_link, remove_file, rename};
use std::io::Write;
use std::path::{Path, PathBuf};
use std::str;
use serde_json;
use tempfile;
use url;
use pathdiff::diff_paths;
//use sequoia_openpgp::armor::{Writer, Kind};
use {Database, Verify, Query};
use {Database, Query};
use types::{Email, Fingerprint, KeyID};
use sync::{MutexGuard, FlockMutex};
use Result;
@ -19,9 +17,9 @@ use Result;
pub struct Filesystem {
update_lock: FlockMutex,
state_dir: PathBuf,
tmp_dir: PathBuf,
keys_dir: PathBuf,
keys_dir_by_keyid: PathBuf,
keys_dir_by_fingerprint: PathBuf,
keys_dir_by_email: PathBuf,
@ -43,21 +41,18 @@ impl Filesystem {
let base_dir: PathBuf = base_dir.into();
let keys_dir = base_dir.join("keys");
let state_dir = base_dir.join("hagrid_state");
let tmp_dir = base_dir.join("tmp");
Self::new(keys_dir, state_dir, tmp_dir)
Self::new(keys_dir, tmp_dir)
}
pub fn new(
keys_dir: impl Into<PathBuf>,
state_dir: impl Into<PathBuf>,
tmp_dir: impl Into<PathBuf>,
) -> Result<Self> {
/*
use std::fs;
let state_dir = state_dir.into();
if fs::create_dir(&state_dir).is_err() {
let meta = fs::metadata(&state_dir);
@ -82,14 +77,11 @@ impl Filesystem {
state_dir.display(), e));
}
}
}
}*/
let tmp_dir = tmp_dir.into();
create_dir_all(&tmp_dir)?;
let token_dir = state_dir.join("verification_tokens");
create_dir_all(token_dir)?;
let keys_dir: PathBuf = keys_dir.into();
let keys_dir_by_keyid = keys_dir.join("by-keyid");
let keys_dir_by_fingerprint = keys_dir.join("by-fpr");
@ -100,11 +92,10 @@ impl Filesystem {
info!("Opened filesystem database.");
info!("keys_dir: '{}'", keys_dir.display());
info!("state_dir: '{}'", state_dir.display());
info!("tmp_dir: '{}'", tmp_dir.display());
Ok(Filesystem {
update_lock: FlockMutex::new(&state_dir)?,
state_dir: state_dir,
update_lock: FlockMutex::new(&keys_dir)?,
keys_dir: keys_dir,
tmp_dir: tmp_dir,
keys_dir_by_keyid: keys_dir_by_keyid,
keys_dir_by_fingerprint: keys_dir_by_fingerprint,
@ -201,36 +192,6 @@ impl Filesystem {
Email::from_str(&decoded).ok()
}
fn new_token<'a>(&self, token_type: &'a str) -> Result<(File, String)> {
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
let mut rng = thread_rng();
// samples from [a-zA-Z0-9]
// 43 chars ~ 256 bit
let name: String = rng.sample_iter(&Alphanumeric).take(43).collect();
let dir = self.state_dir.join(token_type);
let fd = File::create(dir.join(&name))?;
Ok((fd, name))
}
fn pop_token<'a>(
&self, token_type: &'a str, token: &'a str,
) -> Result<Box<[u8]>> {
let path = self.state_dir.join(token_type).join(token);
let buf = {
let mut fd = File::open(&path)?;
let mut buf = Vec::default();
fd.read_to_end(&mut buf)?;
buf.into_boxed_slice()
};
remove_file(path)?;
Ok(buf)
}
/// Checks the database for consistency.
///
/// Note that this operation may take a long time, and is
@ -443,13 +404,6 @@ impl Database for Filesystem {
self.update_lock.lock().into()
}
fn new_verify_token(&self, payload: Verify) -> Result<String> {
let (mut fd, name) = self.new_token("verification_tokens")?;
fd.write_all(serde_json::to_string(&payload)?.as_bytes())?;
Ok(name)
}
fn update(
&self, fpr: &Fingerprint, new: Option<String>,
) -> Result<()> {
@ -527,8 +481,11 @@ impl Database for Filesystem {
};
if path.exists() {
Some(diff_paths(&path, &self.state_dir).expect("related paths"))
let x = diff_paths(&path, &self.keys_dir).expect("related paths");
println!("YEAP: {:?}", &x);
Some(x)
} else {
println!("NOPE");
None
}
}
@ -631,16 +588,6 @@ impl Database for Filesystem {
Ok(())
}
fn pop_verify_token(&self, token: &str) -> Option<Verify> {
self.pop_token("verification_tokens", token)
.ok()
.and_then(|raw| str::from_utf8(&raw).ok().map(|s| s.to_string()))
.and_then(|s| {
let s = serde_json::from_str(&s);
s.ok()
})
}
// XXX: slow
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
let path = self.fingerprint_to_path(fpr);
@ -677,13 +624,13 @@ mod tests {
#[test]
fn init() {
let tmpdir = TempDir::new().unwrap();
let _ = Filesystem::new(tmpdir.path()).unwrap();
let _ = Filesystem::new_from_base(tmpdir.path()).unwrap();
}
#[test]
fn new() {
let tmpdir = TempDir::new().unwrap();
let db = Filesystem::new(tmpdir.path()).unwrap();
let db = Filesystem::new_from_base(tmpdir.path()).unwrap();
let k1 = TPKBuilder::default().add_userid("a@invalid.example.org")
.generate().unwrap().0;
let k2 = TPKBuilder::default().add_userid("b@invalid.example.org")
@ -702,7 +649,7 @@ mod tests {
#[test]
fn uid_verification() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_uid_verification(&mut db);
}
@ -710,7 +657,7 @@ mod tests {
#[test]
fn uid_deletion() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_uid_deletion(&mut db);
}
@ -718,7 +665,7 @@ mod tests {
#[test]
fn subkey_lookup() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_subkey_lookup(&mut db);
}
@ -726,7 +673,7 @@ mod tests {
#[test]
fn kid_lookup() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_kid_lookup(&mut db);
}
@ -734,14 +681,14 @@ mod tests {
#[test]
fn upload_revoked_tpk() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_upload_revoked_tpk(&mut db);
}
#[test]
fn uid_revocation() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_uid_revocation(&mut db);
}
@ -749,7 +696,7 @@ mod tests {
#[test]
fn key_reupload() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_reupload(&mut db);
}
@ -757,7 +704,7 @@ mod tests {
#[test]
fn uid_replacement() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_uid_replacement(&mut db);
}
@ -765,7 +712,7 @@ mod tests {
#[test]
fn uid_stealing() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_steal_uid(&mut db);
}
@ -773,14 +720,14 @@ mod tests {
#[test]
fn uid_unlinking() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_unlink_uid(&mut db);
}
#[test]
fn same_email_1() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_same_email_1(&mut db);
}
@ -788,7 +735,7 @@ mod tests {
#[test]
fn same_email_2() {
let tmpdir = TempDir::new().unwrap();
let mut db = Filesystem::new(tmpdir.path()).unwrap();
let mut db = Filesystem::new_from_base(tmpdir.path()).unwrap();
test::test_same_email_2(&mut db);
}
@ -796,7 +743,7 @@ mod tests {
#[test]
fn reverse_fingerprint_to_path() {
let tmpdir = TempDir::new().unwrap();
let db = Filesystem::new(tmpdir.path()).unwrap();
let db = Filesystem::new_from_base(tmpdir.path()).unwrap();
let fp: Fingerprint =
"CBCD8F030588653EEDD7E2659B7DD433F254904A".parse().unwrap();

View file

@ -51,6 +51,9 @@ pub use self::memory::Memory;
mod poly;
pub use self::poly::Polymorphic;
mod stateful_tokens;
pub use stateful_tokens::StatefulTokens;
#[cfg(test)]
mod test;
@ -149,8 +152,6 @@ pub trait Database: Sync + Send {
/// read operations to ensure that we return something sane.
fn lock(&self) -> MutexGuard<()>;
fn new_verify_token(&self, payload: Verify) -> Result<String>;
/// Update the data associated with `fpr` with the data in new.
///
/// If new is None, this removes any associated data.
@ -224,9 +225,6 @@ pub trait Database: Sync + Send {
fn link_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
fn unlink_fpr(&self, from: &Fingerprint, to: &Fingerprint) -> Result<()>;
// (verified uid, fpr)
fn pop_verify_token(&self, token: &str) -> Option<Verify>;
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String>;
fn by_kid(&self, kid: &KeyID) -> Option<String>;
fn by_email(&self, email: &Email) -> Option<String>;
@ -397,8 +395,8 @@ pub trait Database: Sync + Send {
self.link_subkeys(&fpr, subkeys)?;
let mut tokens = Vec::new();
for (fp, verify) in unverified_uids.into_iter() {
tokens.push((fp, self.new_verify_token(verify)?));
for (email, verify) in unverified_uids.into_iter() {
tokens.push((email, serde_json::to_string(&verify)?));
}
Ok(tokens)
}
@ -411,12 +409,12 @@ pub trait Database: Sync + Send {
// }
// }
fn verify_token(
&self, token: &str,
&self, token_str: &str,
) -> Result<Option<(Email, Fingerprint)>> {
let _ = self.lock();
match self.pop_verify_token(token) {
Some(Verify { created, packets, fpr, email }) => {
let Verify { created, packets, fpr, email } = serde_json::from_str(&token_str)?;
let now = time::now().to_timespec().sec;
if created > now || now - created > 3 * 3600 {
return Ok(None);
@ -450,9 +448,6 @@ pub trait Database: Sync + Send {
}
}
}
None => Err(failure::err_msg("No such token")),
}
}
/// Deletes all UserID packets and unlinks all email addresses.
fn delete_userids(&self, fpr: &Fingerprint) -> Result <()> {

View file

@ -1,7 +1,7 @@
use parking_lot::Mutex;
use std::collections::HashMap;
use {Database, Verify, Query};
use {Database, Query};
use types::{Email, Fingerprint, KeyID};
use sync::MutexGuard;
use Result;
@ -15,7 +15,6 @@ pub struct Memory {
fpr_links: Mutex<HashMap<Fingerprint, Fingerprint>>,
email: Mutex<HashMap<Email, Fingerprint>>,
kid: Mutex<HashMap<KeyID, Fingerprint>>,
verify_token: Mutex<HashMap<String, Verify>>,
}
impl Default for Memory {
@ -26,7 +25,6 @@ impl Default for Memory {
fpr_links: Mutex::new(HashMap::default()),
kid: Mutex::new(HashMap::default()),
email: Mutex::new(HashMap::default()),
verify_token: Mutex::new(HashMap::default()),
}
}
}
@ -36,13 +34,6 @@ impl Database for Memory {
self.update_lock.lock().into()
}
fn new_verify_token(&self, payload: Verify) -> Result<String> {
let token = Self::new_token();
self.verify_token.lock().insert(token.clone(), payload);
Ok(token)
}
fn update(
&self, fpr: &Fingerprint, new: Option<String>,
) -> Result<()> {
@ -103,11 +94,6 @@ impl Database for Memory {
Ok(())
}
// (verified uid, fpr)
fn pop_verify_token(&self, token: &str) -> Option<Verify> {
self.verify_token.lock().remove(token)
}
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
let fprs = self.fpr.lock();
let links = self.fpr_links.lock();

View file

@ -1,6 +1,6 @@
use std::path::PathBuf;
use {Database, Filesystem, Memory, Verify, Query};
use {Database, Filesystem, Memory, Query};
use Result;
use types::{Email, Fingerprint, KeyID};
use sync::MutexGuard;
@ -18,13 +18,6 @@ impl Database for Polymorphic {
}
}
fn new_verify_token(&self, payload: Verify) -> Result<String> {
match self {
&Polymorphic::Memory(ref db) => db.new_verify_token(payload),
&Polymorphic::Filesystem(ref db) => db.new_verify_token(payload),
}
}
fn update(
&self, fpr: &Fingerprint, new: Option<String>,
) -> Result<()> {
@ -99,13 +92,6 @@ impl Database for Polymorphic {
}
}
fn pop_verify_token(&self, token: &str) -> Option<Verify> {
match self {
&Polymorphic::Memory(ref db) => db.pop_verify_token(token),
&Polymorphic::Filesystem(ref db) => db.pop_verify_token(token),
}
}
fn by_fpr(&self, fpr: &Fingerprint) -> Option<String> {
match self {
&Polymorphic::Memory(ref db) => db.by_fpr(fpr),

View file

@ -0,0 +1,55 @@
use std::io::{Read,Write};
use std::path::PathBuf;
use std::fs::{create_dir_all, remove_file, File};
use std::str;
use Result;
pub struct StatefulTokens {
state_dir: PathBuf,
}
impl StatefulTokens {
pub fn new(state_dir: impl Into<PathBuf>) -> Result<Self> {
let state_dir = state_dir.into();
create_dir_all(&state_dir)?;
info!("Opened stateful token store");
info!("state_dir: '{}'", state_dir.display());
Ok(StatefulTokens { state_dir })
}
pub fn new_token(&self, token_type: &str, payload: &[u8]) -> Result<String> {
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
let mut rng = thread_rng();
// samples from [a-zA-Z0-9]
// 43 chars ~ 256 bit
let name: String = rng.sample_iter(&Alphanumeric).take(43).collect();
let dir = self.state_dir.join(token_type);
create_dir_all(&dir)?;
let mut fd = File::create(dir.join(&name))?;
fd.write_all(payload)?;
Ok(name)
}
pub fn pop_token(&self, token_type: &str, token: &str) -> Result<String> {
let path = self.state_dir.join(token_type).join(token);
let buf = {
let mut fd = File::open(&path)?;
let mut buf = Vec::default();
fd.read_to_end(&mut buf)?;
buf.into_boxed_slice()
};
remove_file(path)?;
Ok(str::from_utf8(&buf)?.to_string())
}
}

View file

@ -89,8 +89,8 @@ pub fn test_uid_verification<D: Database>(db: &mut D) {
}
}
// verify 1st uid again
assert!(db.verify_token(&tokens[0].1).is_err());
// this operation is idempotent - let's try again!
assert!(db.verify_token(&tokens[0].1).unwrap().is_some());
{
// fetch by fpr

View file

@ -108,10 +108,12 @@ impl<'a, 'r> FromRequest<'a, 'r> for Hkp {
}
#[post("/pks/add", data = "<data>")]
pub fn pks_add(db: rocket::State<Polymorphic>, cont_type: &ContentType,
data: Data)
-> MyResponse {
match upload::handle_upload(db, cont_type, data, None) {
pub fn pks_add(
db: rocket::State<Polymorphic>,
cont_type: &ContentType,
data: Data,
) -> MyResponse {
match upload::handle_upload_without_verify(db, cont_type, data) {
Ok(_) => MyResponse::plain("Ok".into()),
Err(err) => MyResponse::ise(err),
}

View file

@ -13,7 +13,7 @@ pub mod upload;
use mail;
use tokens;
use database::{Database, Polymorphic, Query};
use database::{Database, Polymorphic, Query, StatefulTokens};
use database::types::{Email, Fingerprint, KeyID};
use Result;
@ -66,20 +66,12 @@ impl MyResponse {
})
}
// XXX needs fixing for keys_dir!
pub fn x_accel_redirect(path: PathBuf, fp: &Fingerprint) -> Self {
pub fn x_accel_redirect(x_accel_path: String, fp: &Fingerprint) -> Self {
use rocket::http::hyper::header::{ContentDisposition, DispositionType,
DispositionParam, Charset};
// The path is relative to our base directory, but we need to
// get it relative to base/public.
let mut path = path.into_os_string().into_string().expect("valid UTF8");
// Drop the first component.
assert!(path.starts_with("public/"));
path.drain(..6);
MyResponse::XAccelRedirect(
"",
Header::new("X-Accel-Redirect", path),
Header::new("X-Accel-Redirect", x_accel_path),
ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![
@ -91,6 +83,7 @@ impl MyResponse {
}
pub fn ise(e: failure::Error) -> Self {
println!("Internal error: {:?}", e);
let ctx = templates::FiveHundred{
error: format!("{}", e),
version: env!("VERGEN_SEMVER").to_string(),
@ -172,9 +165,6 @@ mod templates {
}
pub struct HagridState {
/// State directory, used internally by hagrid
state_dir: PathBuf,
/// Assets directory, mounted to /assets, served by hagrid or nginx
assets_dir: PathBuf,
@ -202,8 +192,9 @@ fn key_to_response<'a>(state: rocket::State<HagridState>,
if machine_readable {
if state.x_accel_redirect {
if let Some(path) = db.lookup_path(&query) {
return MyResponse::x_accel_redirect(path, &fp);
if let Some(key_path) = db.lookup_path(&query) {
let x_accel_path = state.keys_dir.join(&key_path).to_string_lossy().to_string();
return MyResponse::x_accel_redirect(x_accel_path, &fp);
}
}
@ -239,7 +230,7 @@ fn key_has_uids(state: &HagridState, db: &Polymorphic, query: &Query)
use sequoia_openpgp::Packet;
use sequoia_openpgp::parse::{Parse, PacketParser, PacketParserResult};
let mut ppr = match db.lookup_path(query) {
Some(path) => PacketParser::from_file(&state.state_dir.join(path))?,
Some(path) => PacketParser::from_file(&state.keys_dir.join(path))?,
None => return Err(failure::err_msg("key vanished")),
};
@ -290,10 +281,27 @@ fn vks_v1_by_keyid(state: rocket::State<HagridState>,
}
#[get("/publish/<token>")]
fn publish_verify(db: rocket::State<Polymorphic>,
token: String) -> MyResponse {
match db.verify_token(&token) {
Ok(Some((userid, _fpr))) => {
fn publish_verify(
db: rocket::State<Polymorphic>,
token_service: rocket::State<StatefulTokens>,
token: String,
) -> MyResponse {
match publish_verify_or_fail(db, token_service, token) {
Ok(response) => response,
Err(e) => MyResponse::ise(e),
}
}
fn publish_verify_or_fail(
db: rocket::State<Polymorphic>,
token_service: rocket::State<StatefulTokens>,
token: String,
) -> Result<MyResponse> {
println!("hi");
let payload = token_service.pop_token("verify", &token)?;
match db.verify_token(&payload)? {
Some((userid, _fpr)) => {
let context = templates::Verify {
verified: true,
userid: userid.to_string(),
@ -301,10 +309,9 @@ fn publish_verify(db: rocket::State<Polymorphic>,
commit: env!("VERGEN_SHA_SHORT").to_string(),
};
MyResponse::ok("publish-result", context)
Ok(MyResponse::ok("publish-result", context))
}
Ok(None) => MyResponse::not_found(Some("generic-error"), None),
Err(e) => MyResponse::ise(e),
None => Ok(MyResponse::not_found(Some("generic-error"), None)),
}
}
@ -365,13 +372,15 @@ fn rocket_factory(rocket: rocket::Rocket) -> Result<rocket::Rocket> {
let db_service = configure_db_service(rocket.config())?;
let hagrid_state = configure_hagrid_state(rocket.config())?;
let token_service = configure_token_service(rocket.config())?;
let stateful_token_service = configure_stateful_token_service(rocket.config())?;
let stateless_token_service = configure_stateless_token_service(rocket.config())?;
let mail_service = configure_mail_service(rocket.config())?;
Ok(rocket
.attach(Template::fairing())
.manage(hagrid_state)
.manage(token_service)
.manage(stateless_token_service)
.manage(stateful_token_service)
.manage(mail_service)
.manage(db_service)
.mount("/", routes)
@ -382,30 +391,32 @@ fn configure_db_service(config: &Config) -> Result<Polymorphic> {
use database::{Filesystem, Polymorphic};
let keys_dir: PathBuf = config.get_str("keys_dir")?.into();
let state_dir: PathBuf = config.get_str("state_dir")?.into();
let tmp_dir: PathBuf = config.get_str("tmp_dir")?.into();
let fs_db = Filesystem::new(keys_dir, state_dir, tmp_dir)?;
let fs_db = Filesystem::new(keys_dir, tmp_dir)?;
Ok(Polymorphic::Filesystem(fs_db))
}
fn configure_hagrid_state(config: &Config) -> Result<HagridState> {
let state_dir: PathBuf = config.get_str("state_dir")?.into();
let assets_dir: PathBuf = config.get_str("assets_dir")?.into();
let keys_dir: PathBuf = config.get_str("keys_dir")?.into();
// State
let base_uri = config.get_str("base-URI")?.to_string();
Ok(HagridState {
state_dir,
assets_dir,
keys_dir,
keys_dir: keys_dir,
base_uri: base_uri.clone(),
x_accel_redirect: config.get_bool("x-accel-redirect")?,
})
}
fn configure_token_service(config: &Config) -> Result<tokens::Service> {
fn configure_stateful_token_service(config: &Config) -> Result<database::StatefulTokens> {
let state_dir: PathBuf = config.get_str("state_dir")?.into();
database::StatefulTokens::new(state_dir)
}
fn configure_stateless_token_service(config: &Config) -> Result<tokens::Service> {
use std::convert::TryFrom;
let secret = config.get_str("token_secret")?.to_string();

View file

@ -8,7 +8,7 @@ use multipart::server::Multipart;
use rocket::http::ContentType;
use rocket::Data;
use database::{Database, Polymorphic};
use database::{Database, Polymorphic, StatefulTokens};
use mail;
use web::MyResponse;
@ -45,19 +45,29 @@ pub fn publish(guide: bool) -> MyResponse {
#[post("/vks/v1/publish", data = "<data>")]
pub fn vks_v1_publish_post(
db: rocket::State<Polymorphic>, cont_type: &ContentType, data: Data,
mail_service: rocket::State<mail::Service>
db: rocket::State<Polymorphic>,
mail_service: rocket::State<mail::Service>,
token_service: rocket::State<StatefulTokens>,
cont_type: &ContentType,
data: Data,
) -> MyResponse {
match handle_upload(db, cont_type, data, Some(mail_service)) {
match handle_upload(db, cont_type, data, Some((mail_service, token_service))) {
Ok(ok) => ok,
Err(err) => MyResponse::ise(err),
}
}
pub fn handle_upload_without_verify(
db: rocket::State<Polymorphic>,
cont_type: &ContentType,
data: Data,
) -> Result<MyResponse> {
handle_upload(db, cont_type, data, None)
}
// signature requires the request to have a `Content-Type`
pub fn handle_upload(
db: rocket::State<Polymorphic>, cont_type: &ContentType, data: Data,
mail_service: Option<rocket::State<mail::Service>>
services: Option<(rocket::State<mail::Service>, rocket::State<StatefulTokens>)>,
) -> Result<MyResponse> {
if cont_type.is_form_data() {
// multipart/form-data
@ -70,7 +80,7 @@ pub fn handle_upload(
boundary param not provided"))),
};
process_upload(boundary, data, db.inner(), mail_service)
process_upload(boundary, data, db.inner(), services)
} else if cont_type.is_form() {
use rocket::request::FormItems;
use std::io::Cursor;
@ -93,7 +103,7 @@ pub fn handle_upload(
return process_key(
Cursor::new(decoded_value.as_bytes()),
&db,
mail_service,
services,
);
}
_ => { /* skip */ }
@ -110,29 +120,30 @@ pub fn handle_upload(
fn process_upload(
boundary: &str, data: Data, db: &Polymorphic,
mail_service: Option<rocket::State<mail::Service>>,
services: Option<(rocket::State<mail::Service>, rocket::State<StatefulTokens>)>,
) -> Result<MyResponse> {
// saves all fields, any field longer than 10kB goes to a temporary directory
// Entries could implement FromData though that would give zero control over
// how the files are saved; Multipart would be a good impl candidate though
match Multipart::with_body(data.open().take(UPLOAD_LIMIT), boundary).save().temp() {
Full(entries) => {
process_multipart(entries, db, mail_service)
process_multipart(entries, db, services)
}
Partial(partial, _) => {
process_multipart(partial.entries, db, mail_service)
process_multipart(partial.entries, db, services)
}
Error(err) => Err(err.into())
}
}
fn process_multipart(entries: Entries, db: &Polymorphic,
mail_service: Option<rocket::State<mail::Service>>)
-> Result<MyResponse> {
fn process_multipart(
entries: Entries, db: &Polymorphic,
services: Option<(rocket::State<mail::Service>, rocket::State<StatefulTokens>)>,
) -> Result<MyResponse> {
match entries.fields.get("keytext") {
Some(ent) if ent.len() == 1 => {
let reader = ent[0].data.readable()?;
process_key(reader, db, mail_service)
process_key(reader, db, services)
}
Some(_) =>
Ok(MyResponse::bad_request(
@ -144,7 +155,9 @@ fn process_multipart(entries: Entries, db: &Polymorphic,
}
fn process_key<R>(
reader: R, db: &Polymorphic, mail_service: Option<rocket::State<mail::Service>>,
reader: R,
db: &Polymorphic,
services: Option<(rocket::State<mail::Service>, rocket::State<StatefulTokens>)>,
) -> Result<MyResponse>
where
R: Read,
@ -173,10 +186,11 @@ where
let mut results: Vec<String> = vec!();
for tpk in tpks {
let tokens = db.merge_or_publish(&tpk)?;
let verification_strings = db.merge_or_publish(&tpk)?;
if let Some(ref mail_service) = mail_service {
for (email, token) in tokens {
if let Some((ref mail_service, ref token_service)) = services {
for (email, data) in verification_strings {
let token = token_service.new_token("verify", data.as_bytes())?;
mail_service.send_verification(
&tpk,
&email,